[med-svn] [python-dendropy] 01/02: Imported Upstream version 4.0.2

Andreas Tille tille at debian.org
Mon Jun 22 13:54:37 UTC 2015


This is an automated email from the git hooks/post-receive script.

tille pushed a commit to branch master
in repository python-dendropy.

commit 5ac580a86be10a292233484f15555b280a98d600
Author: Andreas Tille <tille at debian.org>
Date:   Mon Jun 22 15:53:14 2015 +0200

    Imported Upstream version 4.0.2
---
 CHANGES.rst                                        |   15 +
 DendroPy.egg-info/PKG-INFO                         |  116 +
 DendroPy.egg-info/SOURCES.txt                      |  457 ++
 DendroPy.egg-info/dependency_links.txt             |    1 +
 DendroPy.egg-info/entry_points.txt                 |    3 +
 DendroPy.egg-info/requires.txt                     |    1 +
 DendroPy.egg-info/top_level.txt                    |    1 +
 DendroPy.egg-info/zip-safe                         |    1 +
 LICENSE.rst                                        |   36 +
 MANIFEST.in                                        |   11 +
 PKG-INFO                                           |  116 +
 README.rst                                         |   86 +
 applications/sumtrees/sumtrees.py                  | 2007 +++++++
 dendropy/__init__.py                               |  196 +
 dendropy/__main__.py                               |   23 +
 dendropy/calculate/__init__.py                     |   18 +
 dendropy/calculate/mathfn.py                       |   36 +
 dendropy/calculate/popgenstat.py                   |  414 ++
 dendropy/calculate/probability.py                  |  324 ++
 dendropy/calculate/statistics.py                   |  502 ++
 dendropy/calculate/treecompare.py                  |  502 ++
 dendropy/calculate/treemeasure.py                  |  361 ++
 dendropy/calculate/treescore.py                    |   27 +
 dendropy/calculate/treesum.py                      |  455 ++
 dendropy/dataio/__init__.py                        |  106 +
 dendropy/dataio/fastareader.py                     |  145 +
 dendropy/dataio/fastawriter.py                     |   74 +
 dendropy/dataio/ioservice.py                       |  569 ++
 dendropy/dataio/newick.py                          |   18 +
 dendropy/dataio/newickreader.py                    |  672 +++
 dendropy/dataio/newickwriter.py                    |  420 ++
 dendropy/dataio/newickyielder.py                   |   75 +
 dendropy/dataio/nexmlreader.py                     |  904 +++
 dendropy/dataio/nexmlwriter.py                     |  665 +++
 dendropy/dataio/nexmlyielder.py                    |   89 +
 dendropy/dataio/nexusprocessing.py                 |  533 ++
 dendropy/dataio/nexusreader.py                     | 1391 +++++
 dendropy/dataio/nexuswriter.py                     |  536 ++
 dendropy/dataio/nexusyielder.py                    |  185 +
 dendropy/dataio/phylipreader.py                    |  308 +
 dendropy/dataio/phylipwriter.py                    |  124 +
 dendropy/dataio/tokenizer.py                       |  293 +
 dendropy/dataio/xmlprocessing.py                   |  177 +
 dendropy/datamodel/__init__.py                     |   18 +
 dendropy/datamodel/basemodel.py                    | 1563 +++++
 dendropy/datamodel/charmatrixmodel.py              | 1978 +++++++
 dendropy/datamodel/charstatemodel.py               | 1425 +++++
 dendropy/datamodel/datasetmodel.py                 |  652 +++
 dendropy/datamodel/taxonmodel.py                   | 2133 +++++++
 dendropy/datamodel/treecollectionmodel.py          | 2989 ++++++++++
 dendropy/datamodel/treemodel.py                    | 6020 ++++++++++++++++++++
 dendropy/interop/__init__.py                       |   21 +
 dendropy/interop/ape.py                            |  189 +
 dendropy/interop/biopython.py                      |   53 +
 dendropy/interop/entrez.py                         |   91 +
 dendropy/interop/ete.py                            |   68 +
 dendropy/interop/gbif.py                           |  394 ++
 dendropy/interop/genbank.py                        | 1014 ++++
 dendropy/interop/muscle.py                         |   43 +
 dendropy/interop/paup.py                           |  954 ++++
 dendropy/interop/raxml.py                          |  678 +++
 dendropy/interop/rstats.py                         |   99 +
 dendropy/interop/seqgen.py                         |  212 +
 dendropy/legacy/__init__.py                        |   19 +
 dendropy/legacy/coalescent.py                      |  106 +
 dendropy/legacy/continuous.py                      |   49 +
 dendropy/legacy/ncbi.py                            |  328 ++
 dendropy/legacy/popgensim.py                       |   34 +
 dendropy/legacy/popgenstat.py                      |   94 +
 dendropy/legacy/reconcile.py                       |   49 +
 dendropy/legacy/seqmodel.py                        |   60 +
 dendropy/legacy/seqsim.py                          |  107 +
 dendropy/legacy/treecalc.py                        |  134 +
 dendropy/legacy/treemanip.py                       |   78 +
 dendropy/legacy/treesim.py                         |   95 +
 dendropy/legacy/treesplit.py                       |  179 +
 dendropy/legacy/treesum.py                         |   43 +
 dendropy/mathlib/__init__.py                       |   19 +
 dendropy/mathlib/mathfn.py                         |   23 +
 dendropy/mathlib/probability.py                    |   23 +
 dendropy/mathlib/statistics.py                     |   23 +
 dendropy/model/__init__.py                         |    0
 dendropy/model/birthdeath.py                       |  932 +++
 dendropy/model/coalescent.py                       |  686 +++
 dendropy/model/continuous.py                       |  500 ++
 dendropy/model/discrete.py                         |  572 ++
 dendropy/model/parsimony.py                        |  291 +
 dendropy/model/reconcile.py                        |  599 ++
 dendropy/model/treeshape.py                        |   31 +
 dendropy/simulate/__init__.py                      |   24 +
 dendropy/simulate/charsim.py                       |   32 +
 dendropy/simulate/popgensim.py                     |  259 +
 dendropy/simulate/treesim.py                       |   51 +
 dendropy/test/__init__.py                          |   26 +
 dendropy/test/__main__.py                          |  144 +
 dendropy/test/base_newick_test_cases.py            |   71 +
 .../test/base_standard_trees_parsing_test_cases.py |  268 +
 dendropy/test/benchmark/__init__.py                |   18 +
 .../benchmark_newick_light_tree_parser.py          |  205 +
 .../test/benchmark/benchmark_newick_tree_parser.py |  123 +
 dendropy/test/benchmark/benchmark_tokenizer.py     |  144 +
 dendropy/test/support/__init__.py                  |    0
 dendropy/test/support/compare_and_validate.py      |  286 +
 dendropy/test/support/coverage_analysis.py         |  123 +
 dendropy/test/support/curated_dataset.py           |   50 +
 dendropy/test/support/curated_test_tree.py         |  355 ++
 dendropy/test/support/curated_test_tree_list.py    |  118 +
 dendropy/test/support/dendropytest.py              |   98 +
 dendropy/test/support/mockrandom.py                |  411 ++
 dendropy/test/support/mockreader.py                |   31 +
 dendropy/test/support/pathmap.py                   |  140 +
 dendropy/test/support/paupsplitsreference.py       |   59 +
 dendropy/test/support/standard_file_test_chars.py  |  372 ++
 .../test/support/standard_file_test_datasets.py    |  142 +
 dendropy/test/support/standard_file_test_trees.py  |  487 ++
 dendropy/test/test_asciitree.py                    |   51 +
 dendropy/test/test_birthdeath.py                   |   92 +
 dendropy/test/test_coalescent.py                   |   76 +
 .../test/test_container_frozen_ordered_dict.py     |   87 +
 .../test/test_container_normalized_bitmask_dict.py |   58 +
 dendropy/test/test_container_ordered_set.py        |  189 +
 dendropy/test/test_continuous.py                   |  294 +
 dendropy/test/test_dataio_basic_tree.py            |   39 +
 dendropy/test/test_dataio_fasta_reader.py          |   93 +
 dendropy/test/test_dataio_fasta_writer.py          |   79 +
 dendropy/test/test_dataio_newick_reader_rooting.py |  176 +
 dendropy/test/test_dataio_newick_reader_tree.py    |  934 +++
 .../test/test_dataio_newick_reader_tree_iter.py    |   39 +
 .../test/test_dataio_newick_reader_tree_list.py    |  205 +
 dendropy/test/test_dataio_newick_writer.py         |  444 ++
 dendropy/test/test_dataio_nexml_reader_chars.py    |  170 +
 .../test/test_dataio_nexml_reader_tree_list.py     |   70 +
 dendropy/test/test_dataio_nexml_writer_chars.py    |   83 +
 dendropy/test/test_dataio_nexml_writer_trees.py    |  100 +
 dendropy/test/test_dataio_nexus_reader_chars.py    |  288 +
 dendropy/test/test_dataio_nexus_reader_dataset.py  |  361 ++
 .../test/test_dataio_nexus_reader_tree_list.py     |  109 +
 dendropy/test/test_dataio_nexus_taxon_handling.py  |  279 +
 dendropy/test/test_dataio_nexus_tree_yielder.py    |   89 +
 dendropy/test/test_dataio_nexus_writer_chars.py    |   79 +
 dendropy/test/test_dataio_nexus_writer_dataset.py  |   91 +
 dendropy/test/test_dataio_nexus_writer_tree.py     |  407 ++
 .../test/test_dataio_nexus_writer_tree_list.py     |   60 +
 dendropy/test/test_dataio_phylip_reader_chars.py   |  310 +
 dendropy/test/test_dataio_phylip_writer_chars.py   |  116 +
 dendropy/test/test_dataio_tokenizer.py             |  163 +
 dendropy/test/test_datamodel_annotations.py        |   58 +
 dendropy/test/test_datamodel_bipartitions.py       |  105 +
 dendropy/test/test_datamodel_charmatrix.py         | 1126 ++++
 dendropy/test/test_datamodel_dataset.py            |  237 +
 dendropy/test/test_datamodel_split_bitmasks.py     |  380 ++
 dendropy/test/test_datamodel_statealphabet.py      |  581 ++
 dendropy/test/test_datamodel_taxon.py              |  897 +++
 ...model_tree_construction_copying_and_identity.py |  214 +
 .../test/test_datamodel_tree_edge_fundamentals.py  |  169 +
 dendropy/test/test_datamodel_tree_list.py          | 1174 ++++
 .../test/test_datamodel_tree_node_fundamentals.py  |  322 ++
 .../test_datamodel_tree_structure_and_iteration.py |  597 ++
 .../test/test_datamodel_tree_taxon_management.py   |  460 ++
 dendropy/test/test_datamodel_treearray.py          |   59 +
 dendropy/test/test_fitch.py                        |   93 +
 dendropy/test/test_paup.py                         |  259 +
 dendropy/test/test_popgenstat.py                   |  109 +
 dendropy/test/test_statistics.py                   |  644 +++
 .../test/test_tree_calculations_and_metrics.py     | 1119 ++++
 dendropy/test/test_tree_from_splits.py             |   55 +
 .../test/test_tree_operations_and_manipulations.py |  842 +++
 .../test_tree_reconciliation_and_discordance.py    |  159 +
 dendropy/test/test_tree_split_compatibility.py     |  167 +
 .../test/test_tree_summarization_and_consensus.py  |  384 ++
 dendropy/utility/__init__.py                       |   23 +
 dendropy/utility/bibtex.py                         |  326 ++
 dendropy/utility/bitprocessing.py                  |   95 +
 dendropy/utility/cli.py                            |  240 +
 dendropy/utility/constants.py                      |   22 +
 dendropy/utility/container.py                      |  912 +++
 dendropy/utility/debug.py                          |   37 +
 dendropy/utility/deprecate.py                      |   74 +
 dendropy/utility/error.py                          |  192 +
 dendropy/utility/filesys.py                        |  338 ++
 dendropy/utility/libexec/__init__.py               |   37 +
 dendropy/utility/messaging.py                      |  177 +
 dendropy/utility/metavar.py                        |   36 +
 dendropy/utility/processio.py                      |  110 +
 dendropy/utility/terminal.py                       |   59 +
 dendropy/utility/textprocessing.py                 |  182 +
 dendropy/utility/timeprocessing.py                 |   70 +
 dendropy/utility/urlio.py                          |   47 +
 dendropy/utility/vcsinfo.py                        |  246 +
 doc/Makefile                                       |  177 +
 doc/source/_static/Octocat.png                     |  Bin 0 -> 2131769 bytes
 doc/source/_static/dendropy3.png                   |  Bin 0 -> 6067 bytes
 doc/source/_static/dendropy_icon.png               |  Bin 0 -> 80904 bytes
 doc/source/_static/dendropy_logo.png               |  Bin 0 -> 11493 bytes
 doc/source/_static/google-groups-logo1.png         |  Bin 0 -> 13911 bytes
 doc/source/_static/logo_cipres.gif                 |  Bin 0 -> 5400 bytes
 doc/source/_static/nsf.gif                         |  Bin 0 -> 6975 bytes
 doc/source/_themes/rtd-fabric/static/rtd.css       |  773 +++
 doc/source/_themes/rtd-fabric/theme.conf           |    3 +
 doc/source/_themes/sphinx_dendropy_theme/logo.html |    3 +
 .../sphinx_dendropy_theme/side_supplemental.html   |   78 +
 .../_themes/sphinx_dendropy_theme/static/rtd.css   |  775 +++
 .../_themes/sphinx_dendropy_theme/theme.conf       |    3 +
 doc/source/acknowledgements.inc                    |    5 +
 doc/source/changes.rst                             |    3 +
 doc/source/citation.inc                            |   15 +
 doc/source/conf.py                                 |  559 ++
 doc/source/downloading.rst                         |   51 +
 doc/source/examples/bdtree_multi1.py               |   22 +
 doc/source/examples/bdtree_multi2.py               |   20 +
 doc/source/examples/bibtex_annotations1.py         |   24 +
 doc/source/examples/bibtex_annotations2.py         |   23 +
 doc/source/examples/bibtex_annotations3.py         |   29 +
 doc/source/examples/build_tree_programmatically.py |   51 +
 doc/source/examples/char_mat_concat.py             |   20 +
 doc/source/examples/char_mat_concat2.py            |   12 +
 doc/source/examples/chars_access1.py               |   18 +
 doc/source/examples/chars_access2.py               |   18 +
 doc/source/examples/chars_access3.py               |   32 +
 doc/source/examples/chars_access4.py               |   32 +
 doc/source/examples/contained_coalescent1.py       |   19 +
 doc/source/examples/contained_coalescent2.py       |   19 +
 doc/source/examples/ds1.py                         |   43 +
 doc/source/examples/ds2.py                         |   16 +
 doc/source/examples/ds4.py                         |   10 +
 doc/source/examples/ds5.py                         |   11 +
 doc/source/examples/dsrw1.py                       |   13 +
 doc/source/examples/dynamic_annotations1.py        |   34 +
 doc/source/examples/euctree.py                     |   19 +
 doc/source/examples/find_taxon_node1.py            |    8 +
 doc/source/examples/find_taxon_node2.py            |    7 +
 doc/source/examples/hiv1.nexus                     |  399 ++
 doc/source/examples/ladderize.py                   |   22 +
 doc/source/examples/ltt.py                         |   36 +
 doc/source/examples/majrule.py                     |   15 +
 doc/source/examples/mcct.py                        |   19 +
 doc/source/examples/mrca.py                        |    9 +
 doc/source/examples/mrca2.py                       |   11 +
 doc/source/examples/node_ages1.py                  |   19 +
 doc/source/examples/orti1994.nex                   |   32 +
 doc/source/examples/paup_estimate_model.py         |   18 +
 doc/source/examples/paup_estimate_tree_ml.py       |   15 +
 doc/source/examples/paup_estimate_tree_nj.py       |   11 +
 doc/source/examples/pbhg.py                        |   16 +
 doc/source/examples/pdm.py                         |   12 +
 doc/source/examples/pgstats1.py                    |   33 +
 doc/source/examples/pic1.py                        |   35 +
 doc/source/examples/pic2.py                        |    7 +
 doc/source/examples/pic3.py                        |   10 +
 doc/source/examples/pic4.py                        |   26 +
 doc/source/examples/pic_annotated.py               |   32 +
 .../examples/preorder_filtered_edge_iteration.py   |   10 +
 .../examples/preorder_filtered_node_iteration.py   |   10 +
 doc/source/examples/primates.cc.combined.nex       |   17 +
 doc/source/examples/primates.cc.nex                |   12 +
 doc/source/examples/primates.cc.tre                |    1 +
 .../examples/primates.chars.interleaved.nexus      |  228 +
 doc/source/examples/primates.chars.nexus           |   45 +
 .../primates.chars.simple.interleaved.nexus        |  202 +
 doc/source/examples/primates.chars.simple.nexus    |   27 +
 .../examples/primates.chars.subsets-1stpos.nexus   |   20 +
 .../examples/primates.chars.subsets-2ndpos.nexus   |   20 +
 .../examples/primates.chars.subsets-3rdpos.nexus   |   20 +
 .../examples/primates.chars.subsets-all.nexus      |   72 +
 .../examples/primates.chars.subsets-coding.nexus   |   20 +
 .../primates.chars.subsets-noncoding.nexus         |   20 +
 doc/source/examples/prune_taxa_with_labels.py      |   17 +
 doc/source/examples/pure_kingman1.py               |   11 +
 doc/source/examples/pythonidae.beast-mcmc.trees    | 1079 ++++
 doc/source/examples/pythonidae.chars.nexus         |   99 +
 doc/source/examples/pythonidae.chars.phylip        |   34 +
 doc/source/examples/pythonidae.fasta               |   99 +
 doc/source/examples/pythonidae.mb.run1.t           |  108 +
 doc/source/examples/pythonidae.mb.run2.t           |  108 +
 doc/source/examples/pythonidae.mb.run3.t           |  108 +
 doc/source/examples/pythonidae.mb.run4.t           |  108 +
 doc/source/examples/pythonidae.mcmc-con.nex        |   13 +
 doc/source/examples/pythonidae.mcmc.nex            | 1035 ++++
 doc/source/examples/pythonidae.mle.nex             |   46 +
 doc/source/examples/pythonidae.nex                 |   97 +
 doc/source/examples/pythonidae.random.bd0301.tre   |  143 +
 doc/source/examples/pythonidae_combined.nex        |  494 ++
 .../examples/pythonidae_continuous.chars.nexml     | 1339 +++++
 doc/source/examples/pythonidae_cytb.fasta          |   99 +
 doc/source/examples/raxml_estimate_tree.py         |   14 +
 doc/source/examples/readroot.py                    |   20 +
 doc/source/examples/readroot2.py                   |   14 +
 doc/source/examples/remove_branch_lengths.py       |   11 +
 doc/source/examples/reroot_at_external_edge.py     |   19 +
 doc/source/examples/reroot_at_internal_edge.py     |   19 +
 doc/source/examples/reroot_at_midpoint.py          |   18 +
 doc/source/examples/reroot_at_node.py              |   18 +
 doc/source/examples/rescale_tree_length.py         |   12 +
 doc/source/examples/retain_taxa_with_labels.py     |   18 +
 doc/source/examples/seqgen.py                      |   28 +
 doc/source/examples/setroot1.py                    |   26 +
 doc/source/examples/sim_and_count_deepcoal1.py     |   71 +
 doc/source/examples/sim_and_count_deepcoal2.py     |   65 +
 doc/source/examples/splitfreq.py                   |   16 +
 doc/source/examples/splits_on_trees.py             |   16 +
 doc/source/examples/symdiff1.py                    |   21 +
 doc/source/examples/symdiff2.py                    |   37 +
 doc/source/examples/taxa_mgmt1.py                  |   12 +
 doc/source/examples/taxa_mgmt1a.py                 |   10 +
 doc/source/examples/taxa_mgmt1b.py                 |   13 +
 doc/source/examples/taxa_mgmt2.py                  |   18 +
 doc/source/examples/taxa_mgmt3.py                  |   42 +
 doc/source/examples/taxa_mgmt4.py                  |   18 +
 doc/source/examples/taxa_mgmt5.py                  |   12 +
 doc/source/examples/taxon_labels1.py               |   35 +
 doc/source/examples/taxon_labels2.py               |   35 +
 doc/source/examples/taxon_labels2b.py              |   35 +
 doc/source/examples/taxon_labels3.py               |   35 +
 doc/source/examples/taxon_labels4.py               |   36 +
 doc/source/examples/to_outgroup_position.py        |   18 +
 doc/source/examples/tree_copy1.py                  |   26 +
 doc/source/examples/tree_copy2.py                  |   23 +
 doc/source/examples/tree_evolve_char1.py           |   20 +
 doc/source/examples/tree_evolve_char2.py           |   19 +
 doc/source/examples/tree_iter1.py                  |   30 +
 doc/source/examples/tree_iter2.py                  |   22 +
 doc/source/examples/tree_length_crit.py            |   14 +
 doc/source/examples/tree_list_add1.py              |    6 +
 doc/source/examples/tree_list_copy1.py             |   24 +
 doc/source/examples/tree_list_copy2.py             |   26 +
 doc/source/examples/tree_list_copy3.py             |   23 +
 doc/source/examples/tree_list_ops1.py              |   38 +
 doc/source/examples/tree_list_ops2.py              |   28 +
 doc/source/examples/tree_rootings1.py              |   36 +
 doc/source/examples/treemeasures1.py               |   30 +
 doc/source/examples/weightedrf1.py                 |   22 +
 doc/source/glossary.rst                            |  310 +
 doc/source/index.rst                               |  118 +
 doc/source/library/basemodel.rst                   |   38 +
 doc/source/library/birthdeath.rst                  |    8 +
 doc/source/library/charmatrixmodel.rst             |   88 +
 doc/source/library/charstatemodel.rst              |   19 +
 doc/source/library/coalescent.rst                  |    9 +
 doc/source/library/continuous.rst                  |    8 +
 doc/source/library/datasetmodel.rst                |    7 +
 doc/source/library/discrete.rst                    |    9 +
 doc/source/library/index.rst                       |   59 +
 doc/source/library/parsimony.rst                   |    9 +
 doc/source/library/popgensim.rst                   |   10 +
 doc/source/library/popgenstat.rst                  |    8 +
 doc/source/library/probability.rst                 |    8 +
 doc/source/library/reconcile.rst                   |    9 +
 doc/source/library/statistics.rst                  |    8 +
 doc/source/library/taxonmodel.rst                  |   23 +
 doc/source/library/treecollectionmodel.rst         |   54 +
 doc/source/library/treecompare.rst                 |    8 +
 doc/source/library/treemeasure.rst                 |    7 +
 doc/source/library/treemodel.rst                   |   33 +
 doc/source/library/treescore.rst                   |    8 +
 doc/source/library/treeshape.rst                   |    8 +
 doc/source/library/treesim.rst                     |   10 +
 doc/source/license.inc                             |   61 +
 doc/source/migration.rst                           |  269 +
 doc/source/primer/bipartitions.rst                 |  404 ++
 doc/source/primer/chars.rst                        |  181 +
 doc/source/primer/converting.rst                   |   86 +
 doc/source/primer/dataobjects.rst                  |   89 +
 doc/source/primer/datasets.rst                     |  178 +
 doc/source/primer/genbank.rst                      |  565 ++
 doc/source/primer/index.rst                        |   78 +
 doc/source/primer/paup.rst                         |   17 +
 .../primer/phylogenetic_character_analyses.rst     |  139 +
 doc/source/primer/popgenstats.rst                  |   46 +
 doc/source/primer/raxml.rst                        |   20 +
 doc/source/primer/reading_and_writing.rst          |  250 +
 doc/source/primer/seqgen.rst                       |   13 +
 doc/source/primer/taxa.rst                         |  277 +
 doc/source/primer/taxa_partitions.rst              |  142 +
 doc/source/primer/treecollections.rst              |  108 +
 doc/source/primer/treemanips.rst                   |  452 ++
 doc/source/primer/trees.rst                        |  220 +
 doc/source/primer/treesims.rst                     |  285 +
 doc/source/primer/treestats.rst                    |  189 +
 .../primer/working_with_metadata_annotations.rst   |  898 +++
 doc/source/programs/index.rst                      |    8 +
 doc/source/programs/sumtrees.rst                   |  567 ++
 doc/source/schemas/fasta.rst                       |  182 +
 doc/source/schemas/index.rst                       |   36 +
 doc/source/schemas/interfaces/fasta_as_string.py   |    6 +
 doc/source/schemas/interfaces/fasta_dataset_get.py |   36 +
 .../schemas/interfaces/fasta_dataset_read.py       |   40 +
 .../interfaces/fasta_dnacharactermatrix_get.py     |    9 +
 .../fasta_infinitesitescharactermatrix_get.py      |    9 +
 .../interfaces/fasta_proteincharactermatrix_get.py |    9 +
 .../fasta_restrictionsitescharactermatrix_get.py   |    9 +
 .../interfaces/fasta_rnacharactermatrix_get.py     |    8 +
 .../fasta_standardcharactermatrix_get.py           |   19 +
 doc/source/schemas/interfaces/fasta_write.py       |    7 +
 doc/source/schemas/interfaces/newick_as_string.py  |   22 +
 .../schemas/interfaces/newick_dataset_get.py       |   24 +
 .../schemas/interfaces/newick_dataset_read.py      |   23 +
 doc/source/schemas/interfaces/newick_tree_get.py   |   23 +
 .../interfaces/newick_tree_get_from_path.py        |   22 +
 .../interfaces/newick_tree_get_from_stream.py      |   22 +
 .../schemas/interfaces/newick_treearray_read.py    |   22 +
 .../schemas/interfaces/newick_treelist_get.py      |   21 +
 .../schemas/interfaces/newick_treelist_read.py     |   20 +
 doc/source/schemas/interfaces/newick_write.py      |   23 +
 .../schemas/interfaces/nexml_chars_as_string.py    |    8 +
 doc/source/schemas/interfaces/nexml_chars_write.py |    9 +
 .../schemas/interfaces/nexml_dataset_as_string.py  |    8 +
 doc/source/schemas/interfaces/nexml_dataset_get.py |   14 +
 .../schemas/interfaces/nexml_dataset_read.py       |   13 +
 .../schemas/interfaces/nexml_dataset_write.py      |    9 +
 .../interfaces/nexml_dnacharactermatrix_get.py     |   10 +
 .../nexml_infinitesitescharactermatrix_get.py      |   10 +
 .../interfaces/nexml_proteincharactermatrix_get.py |   10 +
 .../nexml_restrictionsitescharactermatrix_get.py   |   10 +
 .../interfaces/nexml_rnacharactermatrix_get.py     |   10 +
 .../nexml_standardcharactermatrix_get.py           |   10 +
 doc/source/schemas/interfaces/nexml_tree_get.py    |   11 +
 .../schemas/interfaces/nexml_treearray_get.py      |   11 +
 .../schemas/interfaces/nexml_treearray_read.py     |   10 +
 .../schemas/interfaces/nexml_treelist_get.py       |   11 +
 .../schemas/interfaces/nexml_treelist_read.py      |   10 +
 .../schemas/interfaces/nexml_trees_as_string.py    |    6 +
 doc/source/schemas/interfaces/nexml_trees_write.py |    7 +
 .../schemas/interfaces/nexus_chars_as_string.py    |   20 +
 doc/source/schemas/interfaces/nexus_chars_write.py |   21 +
 .../schemas/interfaces/nexus_dataset_as_string.py  |   32 +
 doc/source/schemas/interfaces/nexus_dataset_get.py |   24 +
 .../schemas/interfaces/nexus_dataset_read.py       |   23 +
 .../schemas/interfaces/nexus_dataset_write.py      |   33 +
 .../interfaces/nexus_dnacharactermatrix_get.py     |    9 +
 .../nexus_infinitesitescharactermatrix_get.py      |    9 +
 .../interfaces/nexus_proteincharactermatrix_get.py |    9 +
 .../nexus_restrictionsitescharactermatrix_get.py   |    9 +
 .../interfaces/nexus_rnacharactermatrix_get.py     |    8 +
 .../nexus_standardcharactermatrix_get.py           |   19 +
 doc/source/schemas/interfaces/nexus_tree_get.py    |   23 +
 .../schemas/interfaces/nexus_tree_get_from_path.py |   22 +
 .../interfaces/nexus_tree_get_from_stream.py       |   22 +
 .../schemas/interfaces/nexus_treearray_read.py     |   22 +
 .../schemas/interfaces/nexus_treelist_get.py       |   21 +
 .../schemas/interfaces/nexus_treelist_read.py      |   20 +
 .../schemas/interfaces/nexus_trees_as_string.py    |   28 +
 doc/source/schemas/interfaces/nexus_trees_write.py |   29 +
 doc/source/schemas/interfaces/phylip_as_string.py  |    8 +
 .../schemas/interfaces/phylip_dataset_get.py       |   56 +
 .../schemas/interfaces/phylip_dataset_read.py      |   60 +
 .../interfaces/phylip_dnacharactermatrix_get.py    |   13 +
 .../phylip_infinitesitescharactermatrix_get.py     |   13 +
 .../phylip_proteincharactermatrix_get.py           |   13 +
 .../phylip_restrictionsitescharactermatrix_get.py  |   13 +
 .../interfaces/phylip_rnacharactermatrix_get.py    |   14 +
 .../phylip_standardcharactermatrix_get.py          |   29 +
 doc/source/schemas/interfaces/phylip_write.py      |    9 +
 doc/source/schemas/newick.rst                      |  114 +
 doc/source/schemas/nexml.rst                       |  224 +
 doc/source/schemas/nexus.rst                       |  225 +
 doc/source/schemas/phylip.rst                      |  180 +
 setup.cfg                                          |   13 +
 setup.py                                           |  170 +
 458 files changed, 81148 insertions(+)

diff --git a/CHANGES.rst b/CHANGES.rst
new file mode 100644
index 0000000..e0c4b20
--- /dev/null
+++ b/CHANGES.rst
@@ -0,0 +1,15 @@
+Release 4.0.1
+-------------
+
+Bug Fixes
+^^^^^^^^^
+
+    -   Fix for installing using in virtual environments under ``virtualenv``.
+
+Release 4.0.2
+-------------
+
+Bug Fixes
+^^^^^^^^^
+
+    -   Adjustment of child edge lengths when collapsing basal bifurcations.
diff --git a/DendroPy.egg-info/PKG-INFO b/DendroPy.egg-info/PKG-INFO
new file mode 100644
index 0000000..a017d89
--- /dev/null
+++ b/DendroPy.egg-info/PKG-INFO
@@ -0,0 +1,116 @@
+Metadata-Version: 1.1
+Name: DendroPy
+Version: 4.0.2
+Summary: A Python library for phylogenetics and phylogenetic computing: reading, writing, simulation, processing and manipulation of phylogenetic trees (phylogenies) and characters.
+Home-page: http://packages.python.org/DendroPy/
+Author: Jeet Sukumaran and Mark T. Holder
+Author-email: jeetsukumaran at gmail.com and mtholder at ku.edu
+License: BSD
+Description: .. image:: https://raw.githubusercontent.com/jeetsukumaran/DendroPy/DendroPy4/doc/source/_static/dendropy_logo.png
+           :align: right
+           :alt: DendroPy
+        
+        DendroPy is a Python library for phylogenetic computing.
+        It provides classes and functions for the simulation, processing, and
+        manipulation of phylogenetic trees and character matrices, and supports the
+        reading and writing of phylogenetic data in a range of formats, such as NEXUS,
+        NEWICK, NeXML, Phylip, FASTA, etc.  Application scripts for performing some
+        useful phylogenetic operations, such as data conversion and tree posterior
+        distribution summarization, are also distributed and installed as part of the
+        libary.  DendroPy can thus function as a stand-alone library for phylogenetics,
+        a component of more complex multi-library phyloinformatic pipelines, or as a
+        scripting "glue" that assembles and drives such pipelines.
+        
+        The primary home page for DendroPy, with detailed tutorials and documentation, is at:
+        
+            http://dendropy.org/
+        
+        DendroPy is also hosted in the official Python repository:
+        
+            http://packages.python.org/DendroPy/
+        
+        Requirements and Installation
+        =============================
+        
+        DendroPy 4.x runs under Python 3 (all versions > 3.1) and Python 2 (Python 2.7 only).
+        
+        You can install DendroPy by running::
+        
+            $ sudo pip install dendropy
+        
+        More information is available here:
+        
+            http://dendropy.org/downloading.html
+        
+        Documentation
+        =============
+        
+        Full documentation is available here:
+        
+            http://dendropy.org/
+        
+        This includes:
+        
+            -   `A comprehensive "getting started" primer <http://dendropy.org/primer/index.html>`_ .
+            -   `API documentation <http://dendropy.org/library/index.html>`_ .
+            -   `Descriptions of data formats supported for reading/writing <http://dendropy.org/schemas/index.html>`_ .
+        
+        and more.
+        
+        Testing
+        =======
+        
+        Tests can be run by typing::
+        
+            $ python -m dendropy.test
+        
+        By default, all tests are run. You can run specific by providing the
+        fully-qualified name of the modules, test cases, or specific test methods to
+        run, e.g.::
+        
+            $ python -m dendropy.test test_tokenizer
+            $ python -m dendropy.test test_tokenizer.TestCase
+            $ python -m dendropy.test test_tokenizer.TestCase.test1
+            $ python -m dendropy.test test_tokenizer test_datamodel_taxon
+        
+        Or special pre-defined sub-groups of tests, e.g.::
+        
+            $ python -m dendropy.test @datamodel
+            $ python -m dendropy.test @dataio
+            $ python -m dendropy.test @datamodel @newick
+        
+        A list of all available sub-groups can be seen by::
+        
+            $ python -m dendropy.test --help-testgroups
+        
+        For any tests run, you can set the level at which the test progress is logged
+        by::
+        
+            $ python -m dendropy.test -l DEBUG all
+        
+        For all options, type::
+        
+            $ python -m dendropy.test --help
+        
+        
+        Current Release
+        ===============
+        
+        The current release of DendroPy is version 4.0.2 (master-e02fbea, 2015-06-09 17:33:05).
+        
+        
+Keywords: phylogenetics phylogeny phylogenies phylogeography evolution evolutionary biology systematics coalescent population genetics phyloinformatics bioinformatics
+Platform: UNKNOWN
+Classifier: Intended Audience :: Developers
+Classifier: Intended Audience :: Science/Research
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Natural Language :: English
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.1
+Classifier: Programming Language :: Python :: 3.2
+Classifier: Programming Language :: Python :: 3.3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python
+Classifier: Topic :: Scientific/Engineering :: Bio-Informatics
diff --git a/DendroPy.egg-info/SOURCES.txt b/DendroPy.egg-info/SOURCES.txt
new file mode 100644
index 0000000..36a0508
--- /dev/null
+++ b/DendroPy.egg-info/SOURCES.txt
@@ -0,0 +1,457 @@
+CHANGES.rst
+LICENSE.rst
+MANIFEST.in
+README.rst
+setup.cfg
+setup.py
+DendroPy.egg-info/PKG-INFO
+DendroPy.egg-info/SOURCES.txt
+DendroPy.egg-info/dependency_links.txt
+DendroPy.egg-info/entry_points.txt
+DendroPy.egg-info/requires.txt
+DendroPy.egg-info/top_level.txt
+DendroPy.egg-info/zip-safe
+applications/sumtrees/sumtrees.py
+dendropy/__init__.py
+dendropy/__main__.py
+dendropy/calculate/__init__.py
+dendropy/calculate/mathfn.py
+dendropy/calculate/popgenstat.py
+dendropy/calculate/probability.py
+dendropy/calculate/statistics.py
+dendropy/calculate/treecompare.py
+dendropy/calculate/treemeasure.py
+dendropy/calculate/treescore.py
+dendropy/calculate/treesum.py
+dendropy/dataio/__init__.py
+dendropy/dataio/fastareader.py
+dendropy/dataio/fastawriter.py
+dendropy/dataio/ioservice.py
+dendropy/dataio/newick.py
+dendropy/dataio/newickreader.py
+dendropy/dataio/newickwriter.py
+dendropy/dataio/newickyielder.py
+dendropy/dataio/nexmlreader.py
+dendropy/dataio/nexmlwriter.py
+dendropy/dataio/nexmlyielder.py
+dendropy/dataio/nexusprocessing.py
+dendropy/dataio/nexusreader.py
+dendropy/dataio/nexuswriter.py
+dendropy/dataio/nexusyielder.py
+dendropy/dataio/phylipreader.py
+dendropy/dataio/phylipwriter.py
+dendropy/dataio/tokenizer.py
+dendropy/dataio/xmlprocessing.py
+dendropy/datamodel/__init__.py
+dendropy/datamodel/basemodel.py
+dendropy/datamodel/charmatrixmodel.py
+dendropy/datamodel/charstatemodel.py
+dendropy/datamodel/datasetmodel.py
+dendropy/datamodel/taxonmodel.py
+dendropy/datamodel/treecollectionmodel.py
+dendropy/datamodel/treemodel.py
+dendropy/interop/__init__.py
+dendropy/interop/ape.py
+dendropy/interop/biopython.py
+dendropy/interop/entrez.py
+dendropy/interop/ete.py
+dendropy/interop/gbif.py
+dendropy/interop/genbank.py
+dendropy/interop/muscle.py
+dendropy/interop/paup.py
+dendropy/interop/raxml.py
+dendropy/interop/rstats.py
+dendropy/interop/seqgen.py
+dendropy/legacy/__init__.py
+dendropy/legacy/coalescent.py
+dendropy/legacy/continuous.py
+dendropy/legacy/ncbi.py
+dendropy/legacy/popgensim.py
+dendropy/legacy/popgenstat.py
+dendropy/legacy/reconcile.py
+dendropy/legacy/seqmodel.py
+dendropy/legacy/seqsim.py
+dendropy/legacy/treecalc.py
+dendropy/legacy/treemanip.py
+dendropy/legacy/treesim.py
+dendropy/legacy/treesplit.py
+dendropy/legacy/treesum.py
+dendropy/mathlib/__init__.py
+dendropy/mathlib/mathfn.py
+dendropy/mathlib/probability.py
+dendropy/mathlib/statistics.py
+dendropy/model/__init__.py
+dendropy/model/birthdeath.py
+dendropy/model/coalescent.py
+dendropy/model/continuous.py
+dendropy/model/discrete.py
+dendropy/model/parsimony.py
+dendropy/model/reconcile.py
+dendropy/model/treeshape.py
+dendropy/simulate/__init__.py
+dendropy/simulate/charsim.py
+dendropy/simulate/popgensim.py
+dendropy/simulate/treesim.py
+dendropy/test/__init__.py
+dendropy/test/__main__.py
+dendropy/test/base_newick_test_cases.py
+dendropy/test/base_standard_trees_parsing_test_cases.py
+dendropy/test/test_asciitree.py
+dendropy/test/test_birthdeath.py
+dendropy/test/test_coalescent.py
+dendropy/test/test_container_frozen_ordered_dict.py
+dendropy/test/test_container_normalized_bitmask_dict.py
+dendropy/test/test_container_ordered_set.py
+dendropy/test/test_continuous.py
+dendropy/test/test_dataio_basic_tree.py
+dendropy/test/test_dataio_fasta_reader.py
+dendropy/test/test_dataio_fasta_writer.py
+dendropy/test/test_dataio_newick_reader_rooting.py
+dendropy/test/test_dataio_newick_reader_tree.py
+dendropy/test/test_dataio_newick_reader_tree_iter.py
+dendropy/test/test_dataio_newick_reader_tree_list.py
+dendropy/test/test_dataio_newick_writer.py
+dendropy/test/test_dataio_nexml_reader_chars.py
+dendropy/test/test_dataio_nexml_reader_tree_list.py
+dendropy/test/test_dataio_nexml_writer_chars.py
+dendropy/test/test_dataio_nexml_writer_trees.py
+dendropy/test/test_dataio_nexus_reader_chars.py
+dendropy/test/test_dataio_nexus_reader_dataset.py
+dendropy/test/test_dataio_nexus_reader_tree_list.py
+dendropy/test/test_dataio_nexus_taxon_handling.py
+dendropy/test/test_dataio_nexus_tree_yielder.py
+dendropy/test/test_dataio_nexus_writer_chars.py
+dendropy/test/test_dataio_nexus_writer_dataset.py
+dendropy/test/test_dataio_nexus_writer_tree.py
+dendropy/test/test_dataio_nexus_writer_tree_list.py
+dendropy/test/test_dataio_phylip_reader_chars.py
+dendropy/test/test_dataio_phylip_writer_chars.py
+dendropy/test/test_dataio_tokenizer.py
+dendropy/test/test_datamodel_annotations.py
+dendropy/test/test_datamodel_bipartitions.py
+dendropy/test/test_datamodel_charmatrix.py
+dendropy/test/test_datamodel_dataset.py
+dendropy/test/test_datamodel_split_bitmasks.py
+dendropy/test/test_datamodel_statealphabet.py
+dendropy/test/test_datamodel_taxon.py
+dendropy/test/test_datamodel_tree_construction_copying_and_identity.py
+dendropy/test/test_datamodel_tree_edge_fundamentals.py
+dendropy/test/test_datamodel_tree_list.py
+dendropy/test/test_datamodel_tree_node_fundamentals.py
+dendropy/test/test_datamodel_tree_structure_and_iteration.py
+dendropy/test/test_datamodel_tree_taxon_management.py
+dendropy/test/test_datamodel_treearray.py
+dendropy/test/test_fitch.py
+dendropy/test/test_paup.py
+dendropy/test/test_popgenstat.py
+dendropy/test/test_statistics.py
+dendropy/test/test_tree_calculations_and_metrics.py
+dendropy/test/test_tree_from_splits.py
+dendropy/test/test_tree_operations_and_manipulations.py
+dendropy/test/test_tree_reconciliation_and_discordance.py
+dendropy/test/test_tree_split_compatibility.py
+dendropy/test/test_tree_summarization_and_consensus.py
+dendropy/test/benchmark/__init__.py
+dendropy/test/benchmark/benchmark_newick_light_tree_parser.py
+dendropy/test/benchmark/benchmark_newick_tree_parser.py
+dendropy/test/benchmark/benchmark_tokenizer.py
+dendropy/test/support/__init__.py
+dendropy/test/support/compare_and_validate.py
+dendropy/test/support/coverage_analysis.py
+dendropy/test/support/curated_dataset.py
+dendropy/test/support/curated_test_tree.py
+dendropy/test/support/curated_test_tree_list.py
+dendropy/test/support/dendropytest.py
+dendropy/test/support/mockrandom.py
+dendropy/test/support/mockreader.py
+dendropy/test/support/pathmap.py
+dendropy/test/support/paupsplitsreference.py
+dendropy/test/support/standard_file_test_chars.py
+dendropy/test/support/standard_file_test_datasets.py
+dendropy/test/support/standard_file_test_trees.py
+dendropy/utility/__init__.py
+dendropy/utility/bibtex.py
+dendropy/utility/bitprocessing.py
+dendropy/utility/cli.py
+dendropy/utility/constants.py
+dendropy/utility/container.py
+dendropy/utility/debug.py
+dendropy/utility/deprecate.py
+dendropy/utility/error.py
+dendropy/utility/filesys.py
+dendropy/utility/messaging.py
+dendropy/utility/metavar.py
+dendropy/utility/processio.py
+dendropy/utility/terminal.py
+dendropy/utility/textprocessing.py
+dendropy/utility/timeprocessing.py
+dendropy/utility/urlio.py
+dendropy/utility/vcsinfo.py
+dendropy/utility/libexec/__init__.py
+doc/Makefile
+doc/source/acknowledgements.inc
+doc/source/changes.rst
+doc/source/citation.inc
+doc/source/conf.py
+doc/source/downloading.rst
+doc/source/glossary.rst
+doc/source/index.rst
+doc/source/license.inc
+doc/source/migration.rst
+doc/source/_static/Octocat.png
+doc/source/_static/dendropy3.png
+doc/source/_static/dendropy_icon.png
+doc/source/_static/dendropy_logo.png
+doc/source/_static/google-groups-logo1.png
+doc/source/_static/logo_cipres.gif
+doc/source/_static/nsf.gif
+doc/source/_themes/rtd-fabric/theme.conf
+doc/source/_themes/rtd-fabric/static/rtd.css
+doc/source/_themes/sphinx_dendropy_theme/logo.html
+doc/source/_themes/sphinx_dendropy_theme/side_supplemental.html
+doc/source/_themes/sphinx_dendropy_theme/theme.conf
+doc/source/_themes/sphinx_dendropy_theme/static/rtd.css
+doc/source/examples/bdtree_multi1.py
+doc/source/examples/bdtree_multi2.py
+doc/source/examples/bibtex_annotations1.py
+doc/source/examples/bibtex_annotations2.py
+doc/source/examples/bibtex_annotations3.py
+doc/source/examples/build_tree_programmatically.py
+doc/source/examples/char_mat_concat.py
+doc/source/examples/char_mat_concat2.py
+doc/source/examples/chars_access1.py
+doc/source/examples/chars_access2.py
+doc/source/examples/chars_access3.py
+doc/source/examples/chars_access4.py
+doc/source/examples/contained_coalescent1.py
+doc/source/examples/contained_coalescent2.py
+doc/source/examples/ds1.py
+doc/source/examples/ds2.py
+doc/source/examples/ds4.py
+doc/source/examples/ds5.py
+doc/source/examples/dsrw1.py
+doc/source/examples/dynamic_annotations1.py
+doc/source/examples/euctree.py
+doc/source/examples/find_taxon_node1.py
+doc/source/examples/find_taxon_node2.py
+doc/source/examples/hiv1.nexus
+doc/source/examples/ladderize.py
+doc/source/examples/ltt.py
+doc/source/examples/majrule.py
+doc/source/examples/mcct.py
+doc/source/examples/mrca.py
+doc/source/examples/mrca2.py
+doc/source/examples/node_ages1.py
+doc/source/examples/orti1994.nex
+doc/source/examples/paup_estimate_model.py
+doc/source/examples/paup_estimate_tree_ml.py
+doc/source/examples/paup_estimate_tree_nj.py
+doc/source/examples/pbhg.py
+doc/source/examples/pdm.py
+doc/source/examples/pgstats1.py
+doc/source/examples/pic1.py
+doc/source/examples/pic2.py
+doc/source/examples/pic3.py
+doc/source/examples/pic4.py
+doc/source/examples/pic_annotated.py
+doc/source/examples/preorder_filtered_edge_iteration.py
+doc/source/examples/preorder_filtered_node_iteration.py
+doc/source/examples/primates.cc.combined.nex
+doc/source/examples/primates.cc.nex
+doc/source/examples/primates.cc.tre
+doc/source/examples/primates.chars.interleaved.nexus
+doc/source/examples/primates.chars.nexus
+doc/source/examples/primates.chars.simple.interleaved.nexus
+doc/source/examples/primates.chars.simple.nexus
+doc/source/examples/primates.chars.subsets-1stpos.nexus
+doc/source/examples/primates.chars.subsets-2ndpos.nexus
+doc/source/examples/primates.chars.subsets-3rdpos.nexus
+doc/source/examples/primates.chars.subsets-all.nexus
+doc/source/examples/primates.chars.subsets-coding.nexus
+doc/source/examples/primates.chars.subsets-noncoding.nexus
+doc/source/examples/prune_taxa_with_labels.py
+doc/source/examples/pure_kingman1.py
+doc/source/examples/pythonidae.beast-mcmc.trees
+doc/source/examples/pythonidae.chars.nexus
+doc/source/examples/pythonidae.chars.phylip
+doc/source/examples/pythonidae.fasta
+doc/source/examples/pythonidae.mb.run1.t
+doc/source/examples/pythonidae.mb.run2.t
+doc/source/examples/pythonidae.mb.run3.t
+doc/source/examples/pythonidae.mb.run4.t
+doc/source/examples/pythonidae.mcmc-con.nex
+doc/source/examples/pythonidae.mcmc.nex
+doc/source/examples/pythonidae.mle.nex
+doc/source/examples/pythonidae.nex
+doc/source/examples/pythonidae.random.bd0301.tre
+doc/source/examples/pythonidae_combined.nex
+doc/source/examples/pythonidae_continuous.chars.nexml
+doc/source/examples/pythonidae_cytb.fasta
+doc/source/examples/raxml_estimate_tree.py
+doc/source/examples/readroot.py
+doc/source/examples/readroot2.py
+doc/source/examples/remove_branch_lengths.py
+doc/source/examples/reroot_at_external_edge.py
+doc/source/examples/reroot_at_internal_edge.py
+doc/source/examples/reroot_at_midpoint.py
+doc/source/examples/reroot_at_node.py
+doc/source/examples/rescale_tree_length.py
+doc/source/examples/retain_taxa_with_labels.py
+doc/source/examples/seqgen.py
+doc/source/examples/setroot1.py
+doc/source/examples/sim_and_count_deepcoal1.py
+doc/source/examples/sim_and_count_deepcoal2.py
+doc/source/examples/splitfreq.py
+doc/source/examples/splits_on_trees.py
+doc/source/examples/symdiff1.py
+doc/source/examples/symdiff2.py
+doc/source/examples/taxa_mgmt1.py
+doc/source/examples/taxa_mgmt1a.py
+doc/source/examples/taxa_mgmt1b.py
+doc/source/examples/taxa_mgmt2.py
+doc/source/examples/taxa_mgmt3.py
+doc/source/examples/taxa_mgmt4.py
+doc/source/examples/taxa_mgmt5.py
+doc/source/examples/taxon_labels1.py
+doc/source/examples/taxon_labels2.py
+doc/source/examples/taxon_labels2b.py
+doc/source/examples/taxon_labels3.py
+doc/source/examples/taxon_labels4.py
+doc/source/examples/to_outgroup_position.py
+doc/source/examples/tree_copy1.py
+doc/source/examples/tree_copy2.py
+doc/source/examples/tree_evolve_char1.py
+doc/source/examples/tree_evolve_char2.py
+doc/source/examples/tree_iter1.py
+doc/source/examples/tree_iter2.py
+doc/source/examples/tree_length_crit.py
+doc/source/examples/tree_list_add1.py
+doc/source/examples/tree_list_copy1.py
+doc/source/examples/tree_list_copy2.py
+doc/source/examples/tree_list_copy3.py
+doc/source/examples/tree_list_ops1.py
+doc/source/examples/tree_list_ops2.py
+doc/source/examples/tree_rootings1.py
+doc/source/examples/treemeasures1.py
+doc/source/examples/weightedrf1.py
+doc/source/library/basemodel.rst
+doc/source/library/birthdeath.rst
+doc/source/library/charmatrixmodel.rst
+doc/source/library/charstatemodel.rst
+doc/source/library/coalescent.rst
+doc/source/library/continuous.rst
+doc/source/library/datasetmodel.rst
+doc/source/library/discrete.rst
+doc/source/library/index.rst
+doc/source/library/parsimony.rst
+doc/source/library/popgensim.rst
+doc/source/library/popgenstat.rst
+doc/source/library/probability.rst
+doc/source/library/reconcile.rst
+doc/source/library/statistics.rst
+doc/source/library/taxonmodel.rst
+doc/source/library/treecollectionmodel.rst
+doc/source/library/treecompare.rst
+doc/source/library/treemeasure.rst
+doc/source/library/treemodel.rst
+doc/source/library/treescore.rst
+doc/source/library/treeshape.rst
+doc/source/library/treesim.rst
+doc/source/primer/bipartitions.rst
+doc/source/primer/chars.rst
+doc/source/primer/converting.rst
+doc/source/primer/dataobjects.rst
+doc/source/primer/datasets.rst
+doc/source/primer/genbank.rst
+doc/source/primer/index.rst
+doc/source/primer/paup.rst
+doc/source/primer/phylogenetic_character_analyses.rst
+doc/source/primer/popgenstats.rst
+doc/source/primer/raxml.rst
+doc/source/primer/reading_and_writing.rst
+doc/source/primer/seqgen.rst
+doc/source/primer/taxa.rst
+doc/source/primer/taxa_partitions.rst
+doc/source/primer/treecollections.rst
+doc/source/primer/treemanips.rst
+doc/source/primer/trees.rst
+doc/source/primer/treesims.rst
+doc/source/primer/treestats.rst
+doc/source/primer/working_with_metadata_annotations.rst
+doc/source/programs/index.rst
+doc/source/programs/sumtrees.rst
+doc/source/schemas/fasta.rst
+doc/source/schemas/index.rst
+doc/source/schemas/newick.rst
+doc/source/schemas/nexml.rst
+doc/source/schemas/nexus.rst
+doc/source/schemas/phylip.rst
+doc/source/schemas/interfaces/fasta_as_string.py
+doc/source/schemas/interfaces/fasta_dataset_get.py
+doc/source/schemas/interfaces/fasta_dataset_read.py
+doc/source/schemas/interfaces/fasta_dnacharactermatrix_get.py
+doc/source/schemas/interfaces/fasta_infinitesitescharactermatrix_get.py
+doc/source/schemas/interfaces/fasta_proteincharactermatrix_get.py
+doc/source/schemas/interfaces/fasta_restrictionsitescharactermatrix_get.py
+doc/source/schemas/interfaces/fasta_rnacharactermatrix_get.py
+doc/source/schemas/interfaces/fasta_standardcharactermatrix_get.py
+doc/source/schemas/interfaces/fasta_write.py
+doc/source/schemas/interfaces/newick_as_string.py
+doc/source/schemas/interfaces/newick_dataset_get.py
+doc/source/schemas/interfaces/newick_dataset_read.py
+doc/source/schemas/interfaces/newick_tree_get.py
+doc/source/schemas/interfaces/newick_tree_get_from_path.py
+doc/source/schemas/interfaces/newick_tree_get_from_stream.py
+doc/source/schemas/interfaces/newick_treearray_read.py
+doc/source/schemas/interfaces/newick_treelist_get.py
+doc/source/schemas/interfaces/newick_treelist_read.py
+doc/source/schemas/interfaces/newick_write.py
+doc/source/schemas/interfaces/nexml_chars_as_string.py
+doc/source/schemas/interfaces/nexml_chars_write.py
+doc/source/schemas/interfaces/nexml_dataset_as_string.py
+doc/source/schemas/interfaces/nexml_dataset_get.py
+doc/source/schemas/interfaces/nexml_dataset_read.py
+doc/source/schemas/interfaces/nexml_dataset_write.py
+doc/source/schemas/interfaces/nexml_dnacharactermatrix_get.py
+doc/source/schemas/interfaces/nexml_infinitesitescharactermatrix_get.py
+doc/source/schemas/interfaces/nexml_proteincharactermatrix_get.py
+doc/source/schemas/interfaces/nexml_restrictionsitescharactermatrix_get.py
+doc/source/schemas/interfaces/nexml_rnacharactermatrix_get.py
+doc/source/schemas/interfaces/nexml_standardcharactermatrix_get.py
+doc/source/schemas/interfaces/nexml_tree_get.py
+doc/source/schemas/interfaces/nexml_treearray_get.py
+doc/source/schemas/interfaces/nexml_treearray_read.py
+doc/source/schemas/interfaces/nexml_treelist_get.py
+doc/source/schemas/interfaces/nexml_treelist_read.py
+doc/source/schemas/interfaces/nexml_trees_as_string.py
+doc/source/schemas/interfaces/nexml_trees_write.py
+doc/source/schemas/interfaces/nexus_chars_as_string.py
+doc/source/schemas/interfaces/nexus_chars_write.py
+doc/source/schemas/interfaces/nexus_dataset_as_string.py
+doc/source/schemas/interfaces/nexus_dataset_get.py
+doc/source/schemas/interfaces/nexus_dataset_read.py
+doc/source/schemas/interfaces/nexus_dataset_write.py
+doc/source/schemas/interfaces/nexus_dnacharactermatrix_get.py
+doc/source/schemas/interfaces/nexus_infinitesitescharactermatrix_get.py
+doc/source/schemas/interfaces/nexus_proteincharactermatrix_get.py
+doc/source/schemas/interfaces/nexus_restrictionsitescharactermatrix_get.py
+doc/source/schemas/interfaces/nexus_rnacharactermatrix_get.py
+doc/source/schemas/interfaces/nexus_standardcharactermatrix_get.py
+doc/source/schemas/interfaces/nexus_tree_get.py
+doc/source/schemas/interfaces/nexus_tree_get_from_path.py
+doc/source/schemas/interfaces/nexus_tree_get_from_stream.py
+doc/source/schemas/interfaces/nexus_treearray_read.py
+doc/source/schemas/interfaces/nexus_treelist_get.py
+doc/source/schemas/interfaces/nexus_treelist_read.py
+doc/source/schemas/interfaces/nexus_trees_as_string.py
+doc/source/schemas/interfaces/nexus_trees_write.py
+doc/source/schemas/interfaces/phylip_as_string.py
+doc/source/schemas/interfaces/phylip_dataset_get.py
+doc/source/schemas/interfaces/phylip_dataset_read.py
+doc/source/schemas/interfaces/phylip_dnacharactermatrix_get.py
+doc/source/schemas/interfaces/phylip_infinitesitescharactermatrix_get.py
+doc/source/schemas/interfaces/phylip_proteincharactermatrix_get.py
+doc/source/schemas/interfaces/phylip_restrictionsitescharactermatrix_get.py
+doc/source/schemas/interfaces/phylip_rnacharactermatrix_get.py
+doc/source/schemas/interfaces/phylip_standardcharactermatrix_get.py
+doc/source/schemas/interfaces/phylip_write.py
\ No newline at end of file
diff --git a/DendroPy.egg-info/dependency_links.txt b/DendroPy.egg-info/dependency_links.txt
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/DendroPy.egg-info/dependency_links.txt
@@ -0,0 +1 @@
+
diff --git a/DendroPy.egg-info/entry_points.txt b/DendroPy.egg-info/entry_points.txt
new file mode 100644
index 0000000..1d11c4d
--- /dev/null
+++ b/DendroPy.egg-info/entry_points.txt
@@ -0,0 +1,3 @@
+[distutils.commands]
+
+
diff --git a/DendroPy.egg-info/requires.txt b/DendroPy.egg-info/requires.txt
new file mode 100644
index 0000000..8b6d003
--- /dev/null
+++ b/DendroPy.egg-info/requires.txt
@@ -0,0 +1 @@
+setuptools
\ No newline at end of file
diff --git a/DendroPy.egg-info/top_level.txt b/DendroPy.egg-info/top_level.txt
new file mode 100644
index 0000000..11eec60
--- /dev/null
+++ b/DendroPy.egg-info/top_level.txt
@@ -0,0 +1 @@
+dendropy
diff --git a/DendroPy.egg-info/zip-safe b/DendroPy.egg-info/zip-safe
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/DendroPy.egg-info/zip-safe
@@ -0,0 +1 @@
+
diff --git a/LICENSE.rst b/LICENSE.rst
new file mode 100644
index 0000000..6ffb85a
--- /dev/null
+++ b/LICENSE.rst
@@ -0,0 +1,36 @@
+DendroPy Phylogenetic Computing Library
+
+Copyright (c) 2014 Jeet Sukumaran and Mark T. Holder.
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+    * Redistributions of source code must retain the above copyright
+      notice, this list of conditions and the following disclaimer.
+    * Redistributions in binary form must reproduce the above copyright
+      notice, this list of conditions and the following disclaimer in the
+      documentation and/or other materials provided with the distribution.
+    * The names of its contributors may not be used to endorse or promote
+      products derived from this software without specific prior written
+      permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
+IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
+THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JEET SUKUMARAN OR MARK T. HOLDER
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGE.
+
+
+If you use this work or any portion thereof in published work,
+please cite it as:
+
+   Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+   for phylogenetic computing. Bioinformatics 26: 1569-1571.
+
diff --git a/MANIFEST.in b/MANIFEST.in
new file mode 100644
index 0000000..4179643
--- /dev/null
+++ b/MANIFEST.in
@@ -0,0 +1,11 @@
+include README.rst
+include CHANGES.rst
+include LICENSE.rst
+include doc/Makefile
+recursive-include doc/source *
+global-exclude .DS_Store
+global-exclude *.pyc
+global-exclude .gitignore
+global-exclude .gitattributes
+global-exclude .idea
+global-exclude __pycache__
diff --git a/PKG-INFO b/PKG-INFO
new file mode 100644
index 0000000..a017d89
--- /dev/null
+++ b/PKG-INFO
@@ -0,0 +1,116 @@
+Metadata-Version: 1.1
+Name: DendroPy
+Version: 4.0.2
+Summary: A Python library for phylogenetics and phylogenetic computing: reading, writing, simulation, processing and manipulation of phylogenetic trees (phylogenies) and characters.
+Home-page: http://packages.python.org/DendroPy/
+Author: Jeet Sukumaran and Mark T. Holder
+Author-email: jeetsukumaran at gmail.com and mtholder at ku.edu
+License: BSD
+Description: .. image:: https://raw.githubusercontent.com/jeetsukumaran/DendroPy/DendroPy4/doc/source/_static/dendropy_logo.png
+           :align: right
+           :alt: DendroPy
+        
+        DendroPy is a Python library for phylogenetic computing.
+        It provides classes and functions for the simulation, processing, and
+        manipulation of phylogenetic trees and character matrices, and supports the
+        reading and writing of phylogenetic data in a range of formats, such as NEXUS,
+        NEWICK, NeXML, Phylip, FASTA, etc.  Application scripts for performing some
+        useful phylogenetic operations, such as data conversion and tree posterior
+        distribution summarization, are also distributed and installed as part of the
+        libary.  DendroPy can thus function as a stand-alone library for phylogenetics,
+        a component of more complex multi-library phyloinformatic pipelines, or as a
+        scripting "glue" that assembles and drives such pipelines.
+        
+        The primary home page for DendroPy, with detailed tutorials and documentation, is at:
+        
+            http://dendropy.org/
+        
+        DendroPy is also hosted in the official Python repository:
+        
+            http://packages.python.org/DendroPy/
+        
+        Requirements and Installation
+        =============================
+        
+        DendroPy 4.x runs under Python 3 (all versions > 3.1) and Python 2 (Python 2.7 only).
+        
+        You can install DendroPy by running::
+        
+            $ sudo pip install dendropy
+        
+        More information is available here:
+        
+            http://dendropy.org/downloading.html
+        
+        Documentation
+        =============
+        
+        Full documentation is available here:
+        
+            http://dendropy.org/
+        
+        This includes:
+        
+            -   `A comprehensive "getting started" primer <http://dendropy.org/primer/index.html>`_ .
+            -   `API documentation <http://dendropy.org/library/index.html>`_ .
+            -   `Descriptions of data formats supported for reading/writing <http://dendropy.org/schemas/index.html>`_ .
+        
+        and more.
+        
+        Testing
+        =======
+        
+        Tests can be run by typing::
+        
+            $ python -m dendropy.test
+        
+        By default, all tests are run. You can run specific by providing the
+        fully-qualified name of the modules, test cases, or specific test methods to
+        run, e.g.::
+        
+            $ python -m dendropy.test test_tokenizer
+            $ python -m dendropy.test test_tokenizer.TestCase
+            $ python -m dendropy.test test_tokenizer.TestCase.test1
+            $ python -m dendropy.test test_tokenizer test_datamodel_taxon
+        
+        Or special pre-defined sub-groups of tests, e.g.::
+        
+            $ python -m dendropy.test @datamodel
+            $ python -m dendropy.test @dataio
+            $ python -m dendropy.test @datamodel @newick
+        
+        A list of all available sub-groups can be seen by::
+        
+            $ python -m dendropy.test --help-testgroups
+        
+        For any tests run, you can set the level at which the test progress is logged
+        by::
+        
+            $ python -m dendropy.test -l DEBUG all
+        
+        For all options, type::
+        
+            $ python -m dendropy.test --help
+        
+        
+        Current Release
+        ===============
+        
+        The current release of DendroPy is version 4.0.2 (master-e02fbea, 2015-06-09 17:33:05).
+        
+        
+Keywords: phylogenetics phylogeny phylogenies phylogeography evolution evolutionary biology systematics coalescent population genetics phyloinformatics bioinformatics
+Platform: UNKNOWN
+Classifier: Intended Audience :: Developers
+Classifier: Intended Audience :: Science/Research
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Natural Language :: English
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.1
+Classifier: Programming Language :: Python :: 3.2
+Classifier: Programming Language :: Python :: 3.3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python
+Classifier: Topic :: Scientific/Engineering :: Bio-Informatics
diff --git a/README.rst b/README.rst
new file mode 100644
index 0000000..ee585aa
--- /dev/null
+++ b/README.rst
@@ -0,0 +1,86 @@
+.. image:: https://raw.githubusercontent.com/jeetsukumaran/DendroPy/DendroPy4/doc/source/_static/dendropy_logo.png
+   :align: right
+   :alt: DendroPy
+
+DendroPy is a Python library for phylogenetic computing.
+It provides classes and functions for the simulation, processing, and
+manipulation of phylogenetic trees and character matrices, and supports the
+reading and writing of phylogenetic data in a range of formats, such as NEXUS,
+NEWICK, NeXML, Phylip, FASTA, etc.  Application scripts for performing some
+useful phylogenetic operations, such as data conversion and tree posterior
+distribution summarization, are also distributed and installed as part of the
+libary.  DendroPy can thus function as a stand-alone library for phylogenetics,
+a component of more complex multi-library phyloinformatic pipelines, or as a
+scripting "glue" that assembles and drives such pipelines.
+
+The primary home page for DendroPy, with detailed tutorials and documentation, is at:
+
+    http://dendropy.org/
+
+DendroPy is also hosted in the official Python repository:
+
+    http://packages.python.org/DendroPy/
+
+Requirements and Installation
+=============================
+
+DendroPy 4.x runs under Python 3 (all versions > 3.1) and Python 2 (Python 2.7 only).
+
+You can install DendroPy by running::
+
+    $ sudo pip install dendropy
+
+More information is available here:
+
+    http://dendropy.org/downloading.html
+
+Documentation
+=============
+
+Full documentation is available here:
+
+    http://dendropy.org/
+
+This includes:
+
+    -   `A comprehensive "getting started" primer <http://dendropy.org/primer/index.html>`_ .
+    -   `API documentation <http://dendropy.org/library/index.html>`_ .
+    -   `Descriptions of data formats supported for reading/writing <http://dendropy.org/schemas/index.html>`_ .
+
+and more.
+
+Testing
+=======
+
+Tests can be run by typing::
+
+    $ python -m dendropy.test
+
+By default, all tests are run. You can run specific by providing the
+fully-qualified name of the modules, test cases, or specific test methods to
+run, e.g.::
+
+    $ python -m dendropy.test test_tokenizer
+    $ python -m dendropy.test test_tokenizer.TestCase
+    $ python -m dendropy.test test_tokenizer.TestCase.test1
+    $ python -m dendropy.test test_tokenizer test_datamodel_taxon
+
+Or special pre-defined sub-groups of tests, e.g.::
+
+    $ python -m dendropy.test @datamodel
+    $ python -m dendropy.test @dataio
+    $ python -m dendropy.test @datamodel @newick
+
+A list of all available sub-groups can be seen by::
+
+    $ python -m dendropy.test --help-testgroups
+
+For any tests run, you can set the level at which the test progress is logged
+by::
+
+    $ python -m dendropy.test -l DEBUG all
+
+For all options, type::
+
+    $ python -m dendropy.test --help
+
diff --git a/applications/sumtrees/sumtrees.py b/applications/sumtrees/sumtrees.py
new file mode 100755
index 0000000..ecb6c66
--- /dev/null
+++ b/applications/sumtrees/sumtrees.py
@@ -0,0 +1,2007 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Summarizations collections of trees, e.g., MCMC samples from a posterior
+distribution, non-parametric bootstrap replicates, mapping posterior
+probability, support, or frequency that splits/clades are found in the source
+set of trees onto a target tree.
+"""
+
+import os
+import sys
+import re
+import getpass
+import argparse
+import collections
+import datetime
+import platform
+import socket
+import math
+import csv
+
+if not (sys.version_info.major >= 3 and sys.version_info.minor >= 4):
+    from dendropy.utility.filesys import pre_py34_open as open
+try:
+    # Python 3
+    import queue
+except ImportError:
+    # Python 2.7
+    import Queue as queue
+import multiprocessing
+
+import dendropy
+from dendropy.utility import cli
+from dendropy.utility import constants
+from dendropy.utility import error
+from dendropy.utility import messaging
+from dendropy.utility import timeprocessing
+from dendropy.utility import bitprocessing
+from dendropy.utility import textprocessing
+
+##############################################################################
+## Preamble
+
+_program_name = "SumTrees"
+_program_subtitle = "Phylogenetic Tree Summarization"
+_program_date = "Jan 31 2015"
+# _program_version = "{} ({})".format(dendropy.__version__, _program_date)
+_program_version = "4.0.0 ({})".format(_program_date)
+_program_author = "Jeet Sukumaran and Mark T. Holder"
+_program_contact = "jeetsukumaran at gmail.com"
+_program_copyright = """\
+Copyright (C) 2008-2014 Jeet Sukumaran and Mark T. Holder.
+License GPLv3+: GNU GPL version 3 or later.
+This is free software: you are free to change and redistribute it.
+There is NO WARRANTY, to the extent permitted by law."""
+
+_program_citation = """\
+Sukumaran, J and MT Holder. {prog_name}: {prog_subtitle}. {prog_version}. Available at https://github.com/jeetsukumaran/DendroPy.
+""".format(prog_name=_program_name, prog_subtitle=_program_subtitle, prog_version=_program_version)
+
+##############################################################################
+## Primary Analyzing
+
+def _read_into_tree_array(
+        tree_array,
+        tree_sources,
+        schema,
+        taxon_namespace,
+        rooting,
+        tree_offset,
+        use_tree_weights,
+        preserve_underscores,
+        info_message_func,
+        error_message_func,
+        log_frequency,
+        debug_mode,
+        ):
+    if not log_frequency:
+        tree_array.read_from_files(
+            files=tree_sources,
+            schema=schema,
+            rooting=rooting,
+            tree_offset=tree_offset,
+            store_tree_weights=use_tree_weights,
+            preserve_underscores=preserve_underscores,
+            ignore_unrecognized_keyword_arguments=True,
+            )
+    else:
+        def _log_progress(source_name, current_tree_offset, aggregate_tree_idx):
+            if (
+                    info_message_func is not None
+                    and (
+                        (log_frequency == 1)
+                        or (tree_offset > 0 and current_tree_offset == tree_offset)
+                        or (aggregate_tree_idx > 0 and log_frequency > 0 and (aggregate_tree_idx % log_frequency) == 0)
+                        )
+                    ):
+                if current_tree_offset >= tree_offset:
+                    coda = " (analyzing)"
+                else:
+                    coda = " (burning-in)"
+                info_message_func("'{source_name}': tree at offset {current_tree_offset}{coda}".format(
+                    source_name=source_name,
+                    current_tree_offset=current_tree_offset,
+                    coda=coda,
+                    ), wrap=False)
+        tree_yielder = dendropy.Tree.yield_from_files(
+                tree_sources,
+                schema=schema,
+                taxon_namespace=taxon_namespace,
+                store_tree_weights=use_tree_weights,
+                preserve_underscores=preserve_underscores,
+                rooting=rooting,
+                ignore_unrecognized_keyword_arguments=True,
+                )
+        current_source_index = None
+        current_tree_offset = None
+        try:
+            for aggregate_tree_idx, tree in enumerate(tree_yielder):
+                current_yielder_index = tree_yielder.current_file_index
+                if current_yielder_index != current_source_index:
+                    current_source_index = current_yielder_index
+                    current_tree_offset = 0
+                    source_name = tree_yielder.current_file_name
+                    if source_name is None:
+                        source_name = "<stdin>"
+                    if len(tree_sources) > 1:
+                        info_message_func("Analyzing {} of {}: '{}'".format(current_source_index+1, len(tree_sources), source_name), wrap=False)
+                    else:
+                        info_message_func("Analyzing: '{}'".format(source_name), wrap=False)
+                if current_tree_offset >= tree_offset:
+                    tree_array.add_tree(tree=tree, is_bipartitions_updated=False)
+                    _log_progress(source_name, current_tree_offset, aggregate_tree_idx)
+                else:
+                    _log_progress(source_name, current_tree_offset, aggregate_tree_idx)
+                current_tree_offset += 1
+        except (Exception, KeyboardInterrupt) as e:
+            if debug_mode and not isinstance(e, KeyboardInterrupt):
+                raise
+            e.exception_tree_source_name = tree_yielder.current_file_name
+            e.exception_tree_offset = current_tree_offset
+            raise e
+
+class TreeAnalysisWorker(multiprocessing.Process):
+
+    def __init__(self,
+            name,
+            work_queue,
+            results_queue,
+            source_schema,
+            taxon_labels,
+            tree_offset,
+            is_source_trees_rooted,
+            preserve_underscores,
+            ignore_edge_lengths,
+            ignore_node_ages,
+            use_tree_weights,
+            ultrametricity_precision,
+            log_frequency,
+            messenger,
+            messenger_lock,
+            debug_mode,
+            ):
+        multiprocessing.Process.__init__(self, name=name)
+        self.work_queue = work_queue
+        self.results_queue = results_queue
+        self.source_schema = source_schema
+        self.taxon_labels = taxon_labels
+        self.taxon_namespace = dendropy.TaxonNamespace(self.taxon_labels)
+        self.taxon_namespace.is_mutable = False
+        self.tree_offset = tree_offset
+        self.is_source_trees_rooted = is_source_trees_rooted
+        self.rooting_interpretation = dendropy.get_rooting_argument(is_rooted=self.is_source_trees_rooted)
+        self.preserve_underscores = preserve_underscores
+        self.ignore_edge_lengths = ignore_edge_lengths
+        self.ignore_node_ages = ignore_node_ages
+        self.use_tree_weights = use_tree_weights
+        self.ultrametricity_precision = ultrametricity_precision
+        self.log_frequency = log_frequency
+        self.messenger = messenger
+        self.messenger_lock = messenger_lock
+        self.kill_received = False
+        self.tree_array = dendropy.TreeArray(
+                taxon_namespace=self.taxon_namespace,
+                is_rooted_trees=self.is_source_trees_rooted,
+                ignore_edge_lengths=self.ignore_edge_lengths,
+                ignore_node_ages=self.ignore_node_ages,
+                use_tree_weights=self.use_tree_weights,
+                ultrametricity_precision=self.ultrametricity_precision,
+                )
+        self.tree_array.worker_name = self.name
+        self.num_tasks_received = 0
+        self.num_tasks_completed = 0
+        self.debug_mode = debug_mode
+
+    def send_message(self, msg, level, wrap=True):
+        if self.messenger is None:
+            return
+        if self.messenger.messaging_level > level or self.messenger.silent:
+            return
+        msg = "{}: {}".format(self.name, msg)
+        self.messenger_lock.acquire()
+        try:
+            self.messenger.log(msg, level=level, wrap=wrap)
+        finally:
+            self.messenger_lock.release()
+
+    def send_info(self, msg, wrap=True):
+        self.send_message(msg, messaging.ConsoleMessenger.INFO_MESSAGING_LEVEL, wrap=wrap)
+
+    def send_warning(self, msg, wrap=True):
+        self.send_message(msg, messaging.ConsoleMessenger.WARNING_MESSAGING_LEVEL, wrap=wrap)
+
+    def send_error(self, msg, wrap=True):
+        self.send_message(msg, messaging.ConsoleMessenger.ERROR_MESSAGING_LEVEL, wrap=wrap)
+
+    def run(self):
+        while not self.kill_received:
+            try:
+                tree_source = self.work_queue.get_nowait()
+            except queue.Empty:
+                break
+            self.num_tasks_received += 1
+            # self.send_info("Received task {task_count}: '{task_name}'".format(
+            self.send_info("Received task: '{task_name}'".format(
+                task_count=self.num_tasks_received,
+                task_name=tree_source), wrap=False)
+            # self.tree_array.read_from_files(
+            #     files=[tree_source],
+            #     schema=self.source_schema,
+            #     rooting=self.rooting_interpretation,
+            #     tree_offset=self.tree_offset,
+            #     preserve_underscores=self.preserve_underscores,
+            #     store_tree_weights=self.use_tree_weights,
+            #     ignore_unrecognized_keyword_arguments=True,
+            #     )
+            try:
+                _read_into_tree_array(
+                        tree_array=self.tree_array,
+                        tree_sources=[tree_source],
+                        schema=self.source_schema,
+                        taxon_namespace=self.taxon_namespace,
+                        rooting=self.rooting_interpretation,
+                        tree_offset=self.tree_offset,
+                        use_tree_weights=self.use_tree_weights,
+                        preserve_underscores=self.preserve_underscores,
+                        info_message_func=self.send_info,
+                        error_message_func=self.send_error,
+                        log_frequency=self.log_frequency,
+                        debug_mode=self.debug_mode,
+                        )
+            except (KeyboardInterrupt, Exception) as e:
+                e.worker_name = self.name
+                self.results_queue.put(e)
+                break
+            if self.kill_received:
+                break
+            self.num_tasks_completed += 1
+            # self.send_info("Completed task {task_count}: '{task_name}'".format(
+            self.send_info("Completed task: '{task_name}'".format(
+                task_count=self.num_tasks_received,
+                task_name=tree_source), wrap=False)
+        if self.kill_received:
+            self.send_warning("Terminating in response to kill request")
+        else:
+            self.results_queue.put(self.tree_array)
+
+class TreeProcessor(object):
+
+    def __init__(self,
+            is_source_trees_rooted,
+            ignore_edge_lengths,
+            ignore_node_ages,
+            use_tree_weights,
+            ultrametricity_precision,
+            num_processes,
+            log_frequency,
+            messenger,
+            debug_mode,
+            ):
+        self.is_source_trees_rooted = is_source_trees_rooted
+        self.rooting_interpretation = dendropy.get_rooting_argument(is_rooted=self.is_source_trees_rooted)
+        self.ignore_edge_lengths = ignore_edge_lengths
+        self.ignore_node_ages = ignore_node_ages
+        self.use_tree_weights = use_tree_weights
+        self.ultrametricity_precision = ultrametricity_precision
+        self.num_processes = num_processes
+        self.log_frequency = log_frequency
+        self.messenger = messenger
+        self.debug_mode = debug_mode
+
+    def info_message(self, msg, wrap=True, prefix=""):
+        if self.messenger:
+            self.messenger.info(msg, wrap=wrap, prefix=prefix)
+
+    def warning_message(self, msg, wrap=True, prefix=""):
+        if self.messenger:
+            self.messenger.warning(msg, wrap=wrap, prefix=prefix)
+
+    def error_message(self, msg, wrap=True, prefix=""):
+        if self.messenger:
+            self.messenger.error(msg, wrap=wrap, prefix=prefix)
+
+    def analyze_trees(self,
+            tree_sources,
+            schema,
+            taxon_namespace=None,
+            tree_offset=0,
+            preserve_underscores=False,
+            ):
+        if self.num_processes is None or self.num_processes <= 1:
+            tree_array = self.serial_analyze_trees(
+                    tree_sources=tree_sources,
+                    schema=schema,
+                    taxon_namespace=taxon_namespace,
+                    tree_offset=tree_offset,
+                    preserve_underscores=preserve_underscores,
+                    )
+        else:
+            tree_array = self.parallel_analyze_trees(
+                    tree_sources=tree_sources,
+                    schema=schema,
+                    taxon_namespace=taxon_namespace,
+                    tree_offset=tree_offset,
+                    preserve_underscores=preserve_underscores,
+                    )
+        return tree_array
+
+    def serial_analyze_trees(self,
+            tree_sources,
+            schema,
+            taxon_namespace=None,
+            tree_offset=0,
+            preserve_underscores=False,
+            ):
+        if taxon_namespace is None:
+            taxon_namespace = dendropy.TaxonNamespace()
+        self.info_message("Running in serial mode")
+        tree_array = dendropy.TreeArray(
+                taxon_namespace=taxon_namespace,
+                is_rooted_trees=self.is_source_trees_rooted,
+                ignore_edge_lengths=self.ignore_edge_lengths,
+                ignore_node_ages=self.ignore_node_ages,
+                use_tree_weights=self.use_tree_weights,
+                ultrametricity_precision=self.ultrametricity_precision,
+                )
+        _read_into_tree_array(
+                tree_array=tree_array,
+                tree_sources=tree_sources,
+                schema=schema,
+                taxon_namespace=taxon_namespace,
+                rooting=self.rooting_interpretation,
+                tree_offset=tree_offset,
+                use_tree_weights=self.use_tree_weights,
+                preserve_underscores=preserve_underscores,
+                info_message_func=self.info_message,
+                error_message_func=self.error_message,
+                log_frequency=self.log_frequency,
+                debug_mode=self.debug_mode,
+                )
+        return tree_array
+
+    def parallel_analyze_trees(self,
+            tree_sources,
+            schema,
+            tree_offset=0,
+            preserve_underscores=False,
+            taxon_namespace=None,
+            ):
+        # describe
+        self.info_message("Running in multiprocessing mode (up to {} processes)".format(self.num_processes))
+        # taxon definition
+        if taxon_namespace is not None:
+            self.info_message("Using taxon names provided by user")
+        else:
+            tdfpath = tree_sources[0]
+            self.info_message("Pre-loading taxon names based on first tree in source '{}'".format(tdfpath))
+            taxon_namespace = self.discover_taxa(tdfpath, schema, preserve_underscores=preserve_underscores)
+        taxon_labels = [t.label for t in taxon_namespace]
+        self.info_message("{} taxa defined: {}".format( len(taxon_labels), taxon_labels))
+        # max_idx_width = int(math.floor(math.log(len(taxon_labels), 10))) + 1
+        # idx_col_width = (2 * max_idx_width) + 6
+        # for tidx, taxon_label in enumerate(taxon_labels):
+        #     index_col = "{idx:>{idx_col_width}}".format(
+        #         idx=" ({}/{}): ".format(tidx+1, len(taxon_labels)),
+        #         idx_col_width=idx_col_width,
+        #         )
+        #     self.info_message(taxon_label, prefix=index_col)
+
+
+        # load up queue
+        self.info_message("Creating work queue")
+        work_queue = multiprocessing.Queue()
+        for f in tree_sources:
+            work_queue.put(f)
+
+        # launch processes
+        self.info_message("Launching {} worker processes".format(self.num_processes))
+        results_queue = multiprocessing.Queue()
+        messenger_lock = multiprocessing.Lock()
+        workers = []
+        for idx in range(self.num_processes):
+            # self.info_message("Launching {} of {} worker processes".format(idx+1, self.num_processes))
+            tree_analysis_worker = TreeAnalysisWorker(
+                    name="Process-{}".format(idx+1),
+                    work_queue=work_queue,
+                    results_queue=results_queue,
+                    source_schema=schema,
+                    taxon_labels=taxon_labels,
+                    tree_offset=tree_offset,
+                    is_source_trees_rooted=self.is_source_trees_rooted,
+                    preserve_underscores=preserve_underscores,
+                    ignore_edge_lengths=self.ignore_edge_lengths,
+                    ignore_node_ages=self.ignore_node_ages,
+                    use_tree_weights=self.use_tree_weights,
+                    ultrametricity_precision=self.ultrametricity_precision,
+                    messenger=self.messenger,
+                    messenger_lock=messenger_lock,
+                    log_frequency=self.log_frequency,
+                    debug_mode=self.debug_mode)
+            tree_analysis_worker.start()
+            workers.append(tree_analysis_worker)
+
+        # collate results
+        result_count = 0
+        master_tree_array = dendropy.TreeArray(
+                taxon_namespace=taxon_namespace,
+                is_rooted_trees=self.is_source_trees_rooted,
+                ignore_edge_lengths=self.ignore_edge_lengths,
+                ignore_node_ages=self.ignore_node_ages,
+                use_tree_weights=self.use_tree_weights,
+                ultrametricity_precision=self.ultrametricity_precision,
+                )
+        try:
+            while result_count < self.num_processes:
+                result = results_queue.get()
+                if isinstance(result, Exception) or isinstance(result, KeyboardInterrupt):
+                    self.info_message("Exception raised in worker process '{}'".format(result.worker_name))
+                    raise result
+                master_tree_array.update(result)
+                self.info_message("Recovered results from worker process '{}'".format(result.worker_name))
+                result_count += 1
+                # self.info_message("Recovered results from {} of {} worker processes".format(result_count, self.num_processes))
+        except (Exception, KeyboardInterrupt) as e:
+            for worker in workers:
+                worker.terminate()
+            raise
+        self.info_message("All {} worker processes terminated".format(self.num_processes))
+        return master_tree_array
+
+    def discover_taxa(self,
+            treefile,
+            schema,
+            preserve_underscores):
+        """
+        Reads first tree in treefile, and assumes that is sufficient to populate a
+        taxon set object fully, which it then returns.
+        """
+        for tree in dendropy.Tree.yield_from_files([treefile],
+                schema=schema,
+                preserve_underscores=preserve_underscores,
+                ignore_unrecognized_keyword_arguments=True,
+                ):
+            return tree.taxon_namespace
+
+##############################################################################
+## Output
+
+def _write_trees(trees,
+        output_dest,
+        args,
+        file_comments):
+    if args.output_tree_format == "newick" or args.output_tree_format == "phylip":
+        trees.write_to_stream(
+                output_dest,
+                "newick",
+                suppress_rooting=False,
+                suppress_edge_lengths=True if args.edge_length_summarization == "clear" else False,
+                unquoted_underscores=True if args.preserve_underscores else False,
+                preserve_spaces=True if args.preserve_underscores else False,
+                store_tree_weights=True,
+                suppress_annotations=args.suppress_annotations,
+                suppress_item_comments=args.clear_item_comments,
+                )
+    elif args.output_tree_format == "nexus":
+        trees.write_to_stream(
+                output_dest,
+                "nexus",
+                suppress_rooting=False,
+                suppress_edge_lengths=True if args.edge_length_summarization == "clear" else False,
+                unquoted_underscores=True if args.preserve_underscores else False,
+                preserve_spaces=True if args.preserve_underscores else False,
+                store_tree_weights=True,
+                suppress_annotations=args.suppress_annotations,
+                suppress_item_comments=args.clear_item_comments,
+                simple=args.no_taxa_block,
+                file_comments=file_comments,
+                )
+    elif args.output_tree_format == "nexml":
+        if file_comments:
+            trees.comments = file_comments
+        trees.write_to_stream(
+                output_dest,
+                "nexml",
+                )
+    else:
+        raise ValueError(args.output_tree_format)
+
+##############################################################################
+## Front-End
+
+def citation(args):
+    show_splash(dest=sys.stdout)
+    sys.exit(0)
+
+def show_splash(dest=None):
+    if dest is None:
+        dest = sys.stderr
+    cli.show_splash(
+            prog_name=_program_name,
+            prog_subtitle=_program_subtitle,
+            prog_version=_program_version,
+            prog_author=_program_author,
+            prog_copyright=_program_copyright,
+            include_citation=True,
+            include_copyright=False,
+            additional_citations=[_program_citation],
+            dest=dest,
+            )
+    dest.write("\n")
+
+def print_usage_examples(dest=None):
+    if dest is None:
+        dest = sys.stdout
+    examples = ("""\
+Summarize a set of tree files using a 95% rule consensus tree, with support for
+clades expressed as proportions (posterior probabilities) on internal node
+labels and branch lengths the mean across all trees, dropping the first 200
+trees in each file as a burn-in, and saving the result to "``result.tre``"::
+
+    $ sumtrees.py \\
+            --summary-tree consensus \\
+            --min-clade-freq=0.95 \\
+            --edges mean-length \\
+            --burnin=200 \\
+            --support-as-labels \\
+            --output=result.tre \\
+            treefile1.tre treefile2.tre treefile3.tre
+
+To use a different type of summary tree, e.g., the tree that maximizes the
+product of posterior probabilities, you can specify 'mcct' for the
+'--summary-tree' option:
+
+    $ sumtrees.py \\
+            --summary-tree mcct \\
+            --min-clade-freq=0.95 \\
+            --edges mean-length \\
+            --burnin=200 \\
+            --support-as-labels \\
+            --output=result.tre \\
+            treefile1.tre treefile2.tre treefile3.tre
+
+If the input trees are ultrametric and you want to set the node ages to the
+median node age, set the '--edges' argument to 'median-age':
+
+    $ sumtrees.py \\
+            --summary-tree mcct \\
+            --edges median-age \\
+            --burnin=200 \\
+            --output=result.tre \\
+            treefile1.tre treefile2.tre treefile3.tre
+
+Calculate support for nodes on a specific tree, "``best.tre``" as given by a
+set of tree files, with support reported as percentages rounded to integers,
+and saving the result to "``result.tre``"::
+
+    $ sumtrees.py \\
+            --target=best.tre \\
+            --decimals=0 \\
+            --percentages \\
+            --output=result.tre \\
+            treefile1.tre treefile2.tre treefile3.tre
+
+""")
+    dest.write(examples + "\n")
+
+def print_description(dest=None):
+    return dendropy.description(dest=dest)
+
+def main():
+
+    ######################################################################
+    ## Start Recording Total Job Time
+
+    main_time_start = datetime.datetime.now()
+
+    ######################################################################
+    ## CLI
+
+    parser = argparse.ArgumentParser(
+            description=__doc__,
+            formatter_class=cli.CustomFormatter,
+            add_help=False,
+            )
+    source_options = parser.add_argument_group("Source Options")
+    source_options.add_argument("tree_sources",
+            nargs="*",
+            metavar="TREE-FILEPATH",
+            help= (
+                "Source(s) of trees to summarize. At least one valid"
+                " source of trees must be provided. Use '-' to specify"
+                " reading from standard input (note that this requires"
+                " the input file format to be explicitly set using"
+                " the '--source-format' option)."
+            ))
+    source_options.add_argument("-i", "--input-format", "--source-format",
+            metavar="FORMAT",
+            default=None,
+            choices=["nexus/newick", "nexus", "newick", "phylip", "nexml"],
+            help=(
+                 "Format of all input trees (defaults to handling either NEXUS"
+                 " or NEWICK through inspection; it is more efficient to"
+                 " explicitly specify the format if it is known)."
+                 ))
+    source_options.add_argument("-b", "--burnin",
+            type=int,
+            default=0,
+            help=(
+                 "Number of trees to skip from the beginning of *each* tree "
+                 "file when counting support (default: %(default)s)."
+                 ))
+    source_options.add_argument("--force-rooted", "--rooted",
+            dest="is_source_trees_rooted",
+            action="store_true",
+            default=None,
+            help="Treat source trees as rooted.")
+    source_options.add_argument("--force-unrooted", "--unrooted",
+            dest="is_source_trees_rooted",
+            action="store_false",
+            default=None,
+            help="Treat source trees as unrooted.")
+    source_options.add_argument("-v", "--ultrametricity-precision", "--branch-length-epsilon",
+            action="store_true",
+            default=constants.DEFAULT_ULTRAMETRICITY_PRECISION,
+            help="Precision to use when validating ultrametricity (default: %(default)s; specify '0' to disable validation).")
+    source_options.add_argument("--weighted-trees",
+            action="store_true",
+            default=False,
+            help=(
+                "Use weights of trees (as indicated by '[&W m/n]' comment token) "
+                "to weight contribution of splits found on each tree to overall "
+                "split frequencies."
+                ))
+    source_options.add_argument("--preserve-underscores",
+            action="store_true",
+            default=False,
+            help=(
+                "Do not convert unprotected (unquoted) underscores to spaces"
+                " when reading NEXUS/NEWICK format trees."
+                ))
+    source_options.add_argument("--taxon-name-filepath",
+            metavar="FILEPATH",
+            default=None,
+            help=(
+                "Path to file listing all the taxon names or labels that"
+                " will be found across the entire set of source trees."
+                " This file should be a plain text file with a single"
+                " name list on each line. This file is only read when"
+                " multiprocessing ('-M' or '-m') is requested. When"
+                " multiprocessing using the '-M' or '-m' options,"
+                " all taxon names need to be defined in advance"
+                " of any actual tree analysis. By default this is done"
+                " by reading the first tree in the first tree source"
+                " and extracting the taxon names. At best, this is,"
+                " inefficient, as it involves an extraneous reading of"
+                " the tree. At worst, this can be errorneous, if the"
+                " first tree does not contain all the taxa. Explicitly"
+                " providing the taxon names via this option can avoid"
+                " these issues."
+                ))
+
+    target_tree_options = parser.add_argument_group("Target Tree Topology Options")
+    target_tree_options.add_argument(
+            "-t", "--target-tree-filepath",
+            default=None,
+            metavar="FILE",
+            help=(
+                    "Summarize support and other information from the source"
+                    " trees to topology or topologies given by the tree(s)"
+                    " described in FILE. If no use-specified target topologies"
+                    " are given, then a summary topology will be used as the"
+                    " target. Use the '-s' or '--summary-target' to specify the"
+                    " type of summary tree to use."
+                 ))
+    target_tree_options.add_argument(
+            "-s", "--summary-target",
+            default=None,
+            choices=["consensus", "mcct", "msct"],
+            metavar="SUMMARY-TYPE",
+            help=cli.CustomFormatter.format_definition_list_help(
+                    preamble=
+                        (
+                        "Construct and summarize support and other information "
+                        "from the source trees to one of the following summary "
+                        "topologies: "
+                        ),
+                    definitions=
+                        (
+                            ("'consensus'",
+                                "A consensus tree. The minimum frequency       "
+                                "threshold of clades to be included            "
+                                "can be specified using the '-f' or            "
+                                "'--min-clade-freq' flags. This is the DEFAULT "
+                                "if a user- specified target tree is not given "
+                                "through the '-t' or '--target-tree-filepath'       "
+                                "options.                                      "
+                            ),
+                            ("'mcct'",
+                                "The maximum clade credibility tree.   "
+                                "The tree from the source set that     "
+                                "maximizes the *product* of clade      "
+                                "posterior probabilities.              "
+                            ),
+                            ("'msct'",
+                                "The maximum clade credibility tree.   "
+                                "The tree from the source set that     "
+                                "maximizes the *product* of clade      "
+                                "posterior probabilities.              "
+                            ),
+                        )
+                ))
+    target_tree_supplemental_options = parser.add_argument_group("Target Tree Supplemental Options")
+    target_tree_supplemental_options.add_argument("-f", "--min-consensus-freq", "--min-freq", "--min-clade-freq",
+            type=float,
+            default=constants.GREATER_THAN_HALF,
+            metavar="#.##",
+            help=(
+                "If using a consensus tree summarization strategy, then "
+                "this is the minimum frequency or probability for a clade "
+                "or a split to be included in the resulting tree "
+                "(default: > 0.5)."))
+    target_tree_supplemental_options.add_argument("--allow-unknown-target-tree-taxa",
+            action="store_true",
+            default=False,
+            help=(
+                "Do not fail with error if target tree(s) have taxa not"
+                " previously encountered in source trees or defined in"
+                " the taxon discovery file."
+                ))
+
+    target_tree_rooting_options = parser.add_argument_group("Target Tree Rooting Options")
+    target_tree_rooting_options.add_argument("--root-target-at-outgroup",
+            dest="root_target_at_outgroup",
+            metavar="TAXON-LABEL",
+            default=None,
+            help="Root target tree(s) using specified taxon as outgroup.")
+    target_tree_rooting_options.add_argument("--root-target-at-midpoint",
+            action="store_true",
+            default=None,
+            help="Root target tree(s) at midpoint.")
+    target_tree_rooting_options.add_argument("--set-outgroup",
+            dest="set_outgroup",
+            metavar="TAXON-LABEL",
+            default=None,
+            help="Rotate the target trees such the specified taxon is in the outgroup position, but do not explicitly change the target tree rooting.")
+
+    edge_length_summarization_options = parser.add_argument_group("Target Tree Edge Options")
+    edge_length_summarization_choices = ["mean-length", "median-length", "mean-age", "median-age", "support", "keep", "clear",]
+    edge_length_summarization_options.add_argument(
+            "-e",
+            "--set-edges",
+            "--edges",
+            dest="edge_length_summarization",
+            metavar="STRATEGY",
+            choices=edge_length_summarization_choices,
+            default=None,
+            help=cli.CustomFormatter.format_definition_list_help(
+                    preamble=
+                        (
+                        "Set the edge lengths of the target or        "
+                        "summary trees based on the specified         "
+                        "summarization STRATEGY:                      "
+                        ),
+                    definitions=
+                        (
+                            ("'mean-length'",
+                                "Edge lengths will be set to the mean  "
+                                "of the lengths of the corresponding   "
+                                "split or clade in the source trees.   "
+                            ),
+                            ("'median-length'",
+                                " Edge lengths will be set to the      "
+                                " median of the lengths of the         "
+                                " corresponding split or clade in the  "
+                                " source trees.                        "
+                            ),
+                            ("'mean-age'",
+                                "Edge lengths will be adjusted so      "
+                                "that the age of subtended nodes will  "
+                                "be equal to the mean age of the       "
+                                "corresponding split or clade in the   "
+                                "source trees. Source trees will need  "
+                                "to to be ultrametric for this option. "
+                            ),
+                            ("'median-age'",
+                                "Edge lengths will be adjusted so     "
+                                "that the age of subtended nodes will "
+                                "be equal to the median age of the    "
+                                "corresponding split or clade in the  "
+                                "source trees. Source trees will need "
+                                "to to be ultrametric for this option."
+                                            ),
+                            ("support",
+                                "Edge lengths will be set to the       "
+                                "support value for the split           "
+                                "represented by the edge.              "
+                            ),
+                            ("'keep'",
+                                "Do not change the existing edge       "
+                                "lengths. This is the DEFAULT if       "
+                                "target tree(s) are sourced from an    "
+                                "external file using the '-t' or       "
+                                "'--target-tree-filepath' option            "
+                            ),
+                            ("'clear'",
+                                "Edge lengths will be cleared from the "
+                                "target trees if they are present.     "
+                            ),
+                        ),
+                    coda="\n".join((
+                            "<pre>Note the default settings varies according to the ",
+                            "following, in order of preference:                  ",
+                            "(1) If target trees are specified using the '-t' or ",
+                            "    '--target-tree-filepath' option, then the default edge ",
+                            "    summarization strategy is: 'keep'. ",
+                            "(2) If target trees are not specified, but the ",
+                            "    '--summarize-node-ages' option is specified, ",
+                            "    then the default edge summarization strategy is: ",
+                            "    'mean-age'. ",
+                            "(3) If no target trees are specified and the ",
+                            "    node ages are NOT specified to be summarized, ",
+                            "    then the default edge summarization strategy is: ",
+                            "    'mean-length'. ",
+                        ))
+                ))
+    edge_length_summarization_options.add_argument("--force-minimum-edge-length",
+            default=None,
+            type=float,
+            help="(If setting edge lengths) force all edges to be at least this length.")
+    edge_length_summarization_options.add_argument("--collapse-negative-edges",
+            action="store_true",
+            default=False,
+            help="(If setting edge lengths) force parent node ages to be at least as old as its oldest child when summarizing node ages.")
+
+    node_summarization_options = parser.add_argument_group("Target Tree Annotation Options")
+    node_summarization_options.add_argument(
+            "--summarize-node-ages", "--ultrametric", "--node-ages",
+            action="store_true",
+            dest="summarize_node_ages",
+            default=None,
+            help="Assume that source trees are ultrametic and summarize node ages (distances from tips).")
+    node_summarization_options.add_argument("-l","--labels",
+            dest="node_labels",
+            default="support",
+            choices=["support", "keep", "clear",],
+            help=cli.CustomFormatter.format_definition_list_help(
+                preamble="Set the node labels of the summary or target tree(s):",
+                definitions=(
+                    ("'support'",
+                        "Node labels will be set to the support value  "
+                        "for the clade represented by the node. This is "
+                        "the DEFAULT.                                  "
+                    ),
+                    ("'keep'",
+                        "Do not change the existing node labels."
+                    ),
+                    ("'clear'",
+                        "Node labels will be cleared from the target   "
+                        "trees if they are present.                    "
+                    )
+                )
+                ))
+    node_summarization_options.add_argument("--suppress-annotations", "--no-annotations",
+            action="store_true",
+            default=False,
+            help=(
+                "Do NOT annotate nodes and edges with any summarization information metadata such as."
+                "support values, edge length and/or node age summary statistcs, etc."
+                ))
+
+    support_expression_options = parser.add_argument_group("Support Expression Options")
+    support_expression_options.add_argument("-p", "--percentages",
+            action="store_true",
+            dest="support_as_percentages",
+            default=False,
+            help="Indicate branch support as percentages (otherwise, will report as proportions by default).")
+    support_expression_options.add_argument("-d", "--decimals",
+            dest="support_label_decimals",
+            type=int,
+            metavar="#",
+            default=8,
+            help="Number of decimal places in indication of support values (default: %(default)s).")
+    # other_summarization_options.add_argument("--no-summarize-edge-lengths",
+    #         action="store_false",
+    #         dest="summarize_edge_lengths",
+    #         default=None,
+    #         help="Do not summarize edge lengths.")
+
+    output_options = parser.add_argument_group("Output Options")
+    output_options.add_argument("-o","--output-tree-filepath", "--output",
+            metavar="FILEPATH",
+            default=None,
+            help="Path to output file (if not specified, will print to standard output).")
+    output_options.add_argument("-F","--output-tree-format",
+            default=None,
+            choices=["nexus", "newick", "phylip", "nexml"],
+            help="Format of the output tree file (if not specifed, defaults to input format, if this has been explicitly specified, or 'nexus' otherwise).")
+    output_options.add_argument("-x", "--extended-output",
+            dest="extended_output_prefix",
+            default=None,
+            metavar="PREFIX",
+            help=cli.CustomFormatter.format_definition_list_help(
+                    preamble=
+                        (
+                        "If specified, extended summarization information "
+                        "will be generated, consisting of the following "
+                        "files:"
+                        ),
+                    definitions=
+                        (
+                            # ("'<PREFIX>.summary.trees'",
+                            #     "Summary or target trees onto which the "
+                            #     "summarization information from the source set "
+                            #     "has been mapped."
+                            # ),
+                            ("'<PREFIX>.topologies.trees'",
+                                "A collection of topologies found in the sources "
+                                "reported with their associated posterior "
+                                "probabilities as metadata annotations."
+                            ),
+                            ("'<PREFIX>.bipartitions.trees'",
+                                "A collection of bipartitions, each represented as "
+                                "a tree, with associated information as metadata"
+                                "annotations."
+                            ),
+                            ("'<PREFIX>.bipartitions.tsv'",
+                                "Table listing bipartitions as a group pattern as "
+                                "the key column, and information regarding each "
+                                "the bipartitions as the remaining columns."
+                            ),
+                            ("'<PREFIX>.edge-lengths.tsv'",
+                                "List of bipartitions and "
+                                "corresponding edge lengths. Only "
+                                "generated if edge lengths are "
+                                "summarized. "
+                            ),
+                            ("'<PREFIX>.node-ages.tsv'",
+                                "List of bipartitions and corresponding "
+                                "ages. Only generated if node ages are "
+                                "summarized. "
+                            ),
+                        )
+                        ))
+    output_options.add_argument("--no-taxa-block",
+            action="store_true",
+            default=False,
+            help="When writing NEXUS format output, do not include a taxa block in the output treefile (otherwise will create taxa block by default).")
+    output_options.add_argument("--no-analysis-metainformation", "--no-meta-comments",
+            dest="suppress_analysis_metainformation",
+            action="store_true",
+            default=False,
+            help="Do not include meta-information describing the summarization parameters and execution details.")
+    output_options.add_argument("-c", "--additional-comments",
+            action="store",
+            dest="additional_comments",
+            default=None,
+            help="Additional comments to be added to the summary file.")
+    output_options.add_argument("-r", "--replace",
+            action="store_true",
+            dest="replace",
+            default=False,
+            help="Replace/overwrite output file without asking if it already exists.")
+
+    deprecated_output_options = parser.add_argument_group("Deprecated Output Options")
+    deprecated_output_options.add_argument("--trprobs", "--calc-tree-probabilities",
+            dest="trprobs_filepath",
+            default=None,
+            metavar="FILEPATH",
+            help=argparse.SUPPRESS,
+            )
+    deprecated_output_options.add_argument("--support-as-labels",
+            action="store_true",
+            default=None,
+            help=argparse.SUPPRESS,
+            )
+    deprecated_output_options.add_argument("--extract-edges",
+            dest="split_edge_map_filepath",
+            default=None,
+            metavar="FILEPATH",
+            help=argparse.SUPPRESS,
+            )
+
+    multiprocessing_options = parser.add_argument_group("Parallel Processing Options")
+    multiprocessing_options.add_argument("-M", "--maximum-multiprocessing",
+            action="store_const",
+            const="max",
+            dest="multiprocess",
+            help=(
+                 "Run in parallel mode using as many processors as available, up to the number of sources."
+                 ))
+    multiprocessing_options.add_argument("-m", "--multiprocessing",
+            dest="multiprocess",
+            metavar="NUM-PROCESSES",
+            help=(
+                 "Run in parallel mode with up to a maximum of NUM-PROCESSES processes "
+                 "('max' or '#' means to run in as many processes as there are cores on the "
+                 "local machine; i.e., same as specifying '-M' or '--maximum-multiprocessing')."
+                 ))
+
+    logging_options = parser.add_argument_group("Program Logging Options")
+    logging_options.add_argument("-g", "--log-frequency",
+            type=int,
+            metavar="LOG-FREQUENCY",
+            default=500,
+            help="Tree processing progress logging frequency (default: %(default)s; set to 0 to suppress).")
+    logging_options.add_argument("-q", "--quiet",
+            action="store_true",
+            default=False,
+            help="Suppress ALL logging, progress and feedback messages.")
+
+    error_options = parser.add_argument_group("Program Error Options")
+    error_options.add_argument("--ignore-missing-support",
+            action="store_true",
+            default=False,
+            help="Ignore missing support tree files (note that at least one must exist).")
+
+    information_options = parser.add_argument_group("Program Information Options")
+    information_options.add_argument("-h", "--help",
+            action="store_true",
+            default=False,
+            help="Show help information for program and exit.")
+    information_options.add_argument("--citation",
+            action="store_true",
+            default=False,
+            help="Show citation information for program and exit.")
+    information_options.add_argument("--usage-examples",
+            action="store_true",
+            default=False,
+            help="Show usage examples of program and exit.")
+    information_options.add_argument("--describe",
+            action="store_true",
+            default=False,
+            help="Show information regarding your DendroPy and Python installations and exit.")
+
+    parser.add_argument('--debug-mode', action="store_true", help=argparse.SUPPRESS)
+
+    args = parser.parse_args()
+
+    ######################################################################
+    ## add stuff here: incorporate into CLI later
+    args.clear_item_comments = False
+
+    ######################################################################
+    ## Information (Only) Operations
+
+    if args.citation:
+        citation(args)
+
+    if not args.quiet:
+        show_splash()
+
+    if args.help:
+        parser.print_help(sys.stdout)
+        sys.exit(0)
+
+    if args.usage_examples:
+        print_usage_examples(sys.stdout)
+        sys.exit(0)
+
+    if args.describe:
+        print_description(sys.stdout)
+        sys.exit(0)
+
+    ######################################################################
+    ## Set up messenger
+
+    if args.quiet:
+        messaging_level = messaging.ConsoleMessenger.ERROR_MESSAGING_LEVEL
+    else:
+        messaging_level = messaging.ConsoleMessenger.INFO_MESSAGING_LEVEL
+    messenger = messaging.ConsoleMessenger(name="SumTrees", messaging_level=messaging_level)
+
+    processing_report_lines = []
+    def _message_and_log(msg, wrap=True, prefix=""):
+        messenger.info(msg, wrap=wrap, prefix=prefix)
+        processing_report_lines.append(msg)
+    def _bulleted_message_and_log(msg, prefix="- "):
+        messenger.info(msg, wrap=True, prefix=prefix)
+        processing_report_lines.append(prefix + msg)
+
+    ######################################################################
+    ## Set up some common messages
+
+    mixed_rooting_solution = (
+                            "Re-run SumTrees using the"
+                            " '--force-rooted' or the"
+                            " '--force-unrooted' option to force a"
+                            " consistent rooting state for all"
+                            " trees."
+                            )
+
+    ######################################################################
+    ## Support Files
+
+    if len(args.tree_sources) == 0:
+        parser.print_usage()
+        sys.stdout.write("\n")
+        sys.stdout.write("Type 'sumtrees.py --help' for details on usage.\n")
+        sys.stdout.write("Type 'sumtrees.py --usage-examples' for examples of usage.\n")
+        sys.exit(0)
+
+    tree_sources = []
+    ignored_sources = []
+    for fpath in args.tree_sources:
+        if fpath == "-":
+            if args.input_format is None:
+                messenger.error("Format of source trees must be specified using '--source-format' flag when reading trees from standard input")
+                sys.exit(1)
+            elif args.input_format.lower() == "nexus/newick":
+                messenger.error("The 'nexus/newick' format is not supported when reading trees from standard input")
+                sys.exit(1)
+            if len(args.tree_sources) > 1:
+                messenger.error("Cannot specify multiple sources when reading from standard input")
+            tree_sources = None
+            break
+        else:
+            if args.input_format is None:
+                args.input_format = "nexus/newick"
+            else:
+                args.input_format = args.input_format.lower()
+            fpath = os.path.expanduser(os.path.expandvars(fpath))
+            if not os.path.exists(fpath):
+                missing_msg = "Ignoring missing source file: '{}'".format(fpath)
+                if args.ignore_missing_support:
+                    messenger.warning(missing_msg)
+                else:
+                    messenger.error(missing_msg )
+                    messenger.error("Terminating due to missing support files. "
+                            "Use the '--ignore-missing-support' option to continue even "
+                            "if some files are missing.")
+                    sys.exit(1)
+            else:
+                tree_sources.append(fpath)
+    if tree_sources is None:
+        tree_sources = [sys.stdin]
+        messenger.info("Reading trees from standard input")
+        processing_report_lines.append("Trees read from standard input source")
+    elif len(tree_sources) == 0:
+            messenger.error("No valid sources of input trees specified. "
+                    + "Please provide the path to at least one (valid and existing) file "
+                    + "containing tree samples to summarize.")
+            sys.exit(1)
+    else:
+        # messenger.info("{} source(s) to be analyzed and summarized: {}".format(
+        #     len(tree_sources),
+        #     tree_sources))
+        messenger.info("Trees to be read from {} source(s):".format(len(tree_sources)))
+        processing_report_lines.append("Trees read from {} source(s):".format(len(tree_sources)))
+        max_idx_width = int(math.floor(math.log(len(tree_sources), 10))) + 1
+        idx_col_width = (2 * max_idx_width) + 6
+        for tidx, tree_source in enumerate(tree_sources):
+            # index_col = "{idx:>{idx_col_width}}".format(
+            #     idx=" ({}/{}): ".format(tidx+1, len(tree_sources)),
+            #     idx_col_width=idx_col_width,
+            #     )
+            # if tree_source is sys.stdin:
+            #     _bulleted_message_and_log("<standard input>", prefix=index_col)
+            # else:
+            #     _bulleted_message_and_log("'" + tree_source + "'", prefix=index_col)
+            if tree_source is sys.stdin:
+                _bulleted_message_and_log("<standard input>")
+            else:
+                _bulleted_message_and_log("'" + tree_source + "'")
+    if args.burnin:
+        messenger.info("{} initial trees to be discarded/ignored as burn-in from *each* source".format(args.burnin))
+        processing_report_lines.append("{} initial trees discarded/ignored as burn-in from *each* source".format(args.burnin))
+
+    ######################################################################
+    ## Target Validation
+
+    if args.summary_target is not None and args.target_tree_filepath is not None:
+        messenger.error("Cannot specify both '-s'/'--summary-tree-target' and '-t'/'--target-tree-filepath' simultaneously")
+    elif args.target_tree_filepath is not None:
+        target_tree_filepath = os.path.expanduser(os.path.expandvars(args.target_tree_filepath))
+        if not os.path.exists(target_tree_filepath):
+            messenger.error("Target tree file not found: '{}'".format(target_tree_filepath))
+            sys.exit(1)
+    else:
+        target_tree_filepath = None
+        if args.summary_target is None:
+            args.summary_target = "consensus"
+
+    ######################################################################
+    ## Tree Rooting
+
+    if args.root_target_at_outgroup is not None or args.root_target_at_midpoint:
+        if not args.is_source_trees_rooted:
+            messenger.info("Rooting directive specified for target tree(s): source trees will also be treated as rooted")
+            args.is_source_trees_rooted = True
+
+    num_target_rooting_directives = 0
+    if args.root_target_at_outgroup is not None:
+        num_target_rooting_directives += 1
+    if args.set_outgroup is not None:
+        num_target_rooting_directives += 1
+    if args.root_target_at_midpoint:
+        num_target_rooting_directives += 1
+    if num_target_rooting_directives > 1:
+        messenger.error("Only one target tree rooting directive can be specified")
+        sys.exit(1)
+
+    ######################################################################
+    ## Node Age Summarization
+
+    if args.edge_length_summarization in ("mean-age", "median-age"):
+        args.summarize_node_ages = True
+
+    ######################################################################
+    ## Output File Setup
+
+    # legacy
+    if args.trprobs_filepath:
+        messenger.error(
+                "The '--trprobs' or '--calc-tree-probabilities' "
+                "option is no longer supported directly. Use '-x' or "
+                "'--extended-output' to specify an extended suite of "
+                "output, which includes the topology probabilities. "
+                )
+        sys.exit(1)
+    if args.split_edge_map_filepath:
+        messenger.error(
+                "The '--extract-edges' option is no longer supported. "
+                "Use '-x' or '--extended-output' to specify an "
+                "extended suite of output which includes this "
+                "information. "
+                )
+        sys.exit(1)
+
+    # output format
+    if args.output_tree_format is None:
+        if args.input_format is None or args.input_format == "nexus/newick":
+            args.output_tree_format = "nexus"
+        else:
+            args.output_tree_format = args.input_format
+
+    # primary output
+    if args.output_tree_filepath is None:
+        output_dest = sys.stdout
+    else:
+        output_fpath = os.path.expanduser(os.path.expandvars(args.output_tree_filepath))
+        if cli.confirm_overwrite(filepath=output_fpath, replace_without_asking=args.replace):
+            output_dest = open(output_fpath, "w")
+        else:
+            sys.exit(1)
+
+    # extended output
+    extended_output_paths = {}
+    if args.extended_output_prefix is not None:
+        if not args.extended_output_prefix.endswith("."):
+            args.extended_output_prefix += "."
+        for results_key, suffix in (
+                    ("summary-trees", "summary.trees"),
+                    ("topologies", "topologies.trees"),
+                    ("bipartition-trees", "bipartitions.trees"),
+                    ("bipartition-table", "bipartitions.tsv"),
+                    ("edge-lengths", "edge-lengths.tsv"),
+                    ("node-ages", "node-ages.tsv"),
+                ):
+            full_path = args.extended_output_prefix + suffix
+            # if full_path.endswith("trees") and args.output_tree_format == "nexml":
+            #     full_path += ".nexml"
+            if cli.confirm_overwrite(
+                    filepath=full_path,
+                    replace_without_asking=args.replace):
+                extended_output_paths[results_key] = full_path
+            else:
+                sys.exit(1)
+
+    ######################################################################
+    ## Multiprocessing Setup
+
+    num_cpus = multiprocessing.cpu_count()
+    if len(tree_sources) > 1 and args.multiprocess is not None:
+        if (
+                args.multiprocess.lower() == "max"
+                or args.multiprocess == "#"
+                or args.multiprocess == "*"
+            ):
+            num_processes = min(num_cpus, len(tree_sources))
+        # elif args.multiprocess == "@":
+        #     num_processes = len(tree_sources)
+        else:
+            try:
+                num_processes = int(args.multiprocess)
+            except ValueError:
+                messenger.error("'{}' is not a valid number of processes (must be a positive integer)".format(args.multiprocess))
+                sys.exit(1)
+            if num_processes > num_cpus:
+                messenger.warning("Number of requested processes ({}) exceeds number of CPU's ({})".format(num_processes, num_cpus))
+        if num_processes <= 0:
+            messenger.error("Maximum number of processes set to {}: cannot run SumTrees with less than 1 process".format(num_processes))
+            sys.exit(1)
+    else:
+        if args.multiprocess is not None and args.multiprocess > 1:
+            messenger.info("Number of valid sources is less than 2: forcing serial processing")
+        if len(tree_sources) > 1 and num_cpus > 1:
+            messenger.info(
+                    ("Multiple processors ({num_cpus}) available:"
+                    " consider using the '-M' or '-m' options to"
+                    " parallelize processing of trees"
+                    ).format(num_cpus=num_cpus))
+        num_processes = 1
+
+    ######################################################################
+    ## Taxon Discovery
+
+    if args.taxon_name_filepath is not None:
+        with open(os.path.expanduser(os.path.expandvars(args.taxon_name_filepath)), "r") as tnf:
+            taxon_labels = [name.strip() for name in tnf.read().split("\n") if name]
+            taxon_labels = [name for name in taxon_labels if name]
+        taxon_namespace = dendropy.TaxonNamespace(taxon_labels)
+    else:
+        taxon_namespace = None
+
+    ######################################################################
+    ## Main Work
+
+    tree_processor = TreeProcessor(
+            is_source_trees_rooted=args.is_source_trees_rooted,
+            ignore_edge_lengths=False,
+            ignore_node_ages=not args.summarize_node_ages,
+            use_tree_weights=args.weighted_trees,
+            ultrametricity_precision=args.ultrametricity_precision,
+            num_processes=num_processes,
+            log_frequency=args.log_frequency if not args.quiet else 0,
+            messenger=messenger,
+            debug_mode=args.debug_mode,
+            )
+    analysis_time_start = datetime.datetime.now()
+    # messenger.info("Processing of source trees starting at {}".format(
+    #     analysis_time_start,
+    #     ))
+    try:
+        tree_array = tree_processor.analyze_trees(
+                tree_sources=tree_sources,
+                schema=args.input_format,
+                taxon_namespace=taxon_namespace,
+                tree_offset=args.burnin,
+                preserve_underscores=args.preserve_underscores,
+                )
+        if tree_array.split_distribution.is_mixed_rootings_counted():
+            raise TreeArray.IncompatibleRootingTreeArrayUpdate("Mixed rooting states detected in source trees")
+    except KeyboardInterrupt as e:
+        raise e
+    except Exception as exception_object:
+        if isinstance(exception_object, error.MixedRootingError) or isinstance(exception_object, dendropy.TreeArray.IncompatibleRootingTreeArrayUpdate):
+            error_message_epilog = mixed_rooting_solution
+        else:
+            error_message_epilog = ""
+        message = []
+        if hasattr(exception_object, "exception_tree_source_name"):
+            source_name = exception_object.exception_tree_source_name
+            source_offset = exception_object.exception_tree_offset
+            subparts = []
+            if source_name is not None:
+                subparts.append("'{}'".format(source_name))
+                if source_offset is not None:
+                    subparts.append(", tree at offset {}".format(source_offset))
+            else:
+                if source_offset is not None:
+                    subparts.append("Tree at offset {}".format(source_offset))
+            message.append("".join(subparts) + ":")
+        message.append(str(exception_object))
+        if error_message_epilog:
+            if not message[-1].endswith("."):
+                message[-1] = message[-1] + "."
+            message.append(error_message_epilog)
+        message = " ".join(message)
+        messenger.error(message)
+        if args.debug_mode:
+            raise
+        sys.exit(1)
+    analysis_time_end = datetime.datetime.now()
+    analysis_time_delta =  analysis_time_end-analysis_time_start
+    messenger.info("Analysis of source trees completed in: {}".format(timeprocessing.pretty_timedelta(analysis_time_delta),
+        wrap=False,
+        ))
+
+    ######################################################################
+    ## Post-Processing
+
+    ### post-analysis reports
+
+    if len(tree_array) == 0:
+        messenger.error("No trees retained for processing (is the burn-in too high?)")
+        sys.exit(1)
+
+    _message_and_log("Total of {} trees analyzed for summarization:".format(len(tree_array)))
+    if args.weighted_trees:
+        _bulleted_message_and_log("All trees were treated as weighted (default weight = 1.0).")
+    else:
+        _bulleted_message_and_log("All trees were treated as unweighted")
+    if args.is_source_trees_rooted is None:
+        if tree_array.split_distribution.is_all_counted_trees_rooted():
+            _bulleted_message_and_log("All trees were rooted")
+        elif tree_array.split_distribution.is_all_counted_trees_strictly_unrooted():
+            _bulleted_message_and_log("All trees were unrooted")
+        elif tree_array.split_distribution.is_all_counted_trees_treated_as_unrooted():
+            _bulleted_message_and_log("All trees were assumed to be unrooted")
+    elif args.is_source_trees_rooted is True:
+        _bulleted_message_and_log("All trees were treated as rooted")
+    else:
+        _bulleted_message_and_log("All trees were treated as unrooted")
+    # if args.is_source_trees_ultrametric and args.ultrametricity_precision:
+    #     _bulleted_message_and_log("Trees were ultrametric within an error of {}".format(args.ultrametricity_precision))
+    # elif args.is_source_trees_ultrametric:
+    #     _bulleted_message_and_log("Trees were expected to be ultrametric (not verified)")
+    _bulleted_message_and_log("{} unique taxa across all trees".format(len(tree_array.taxon_namespace)))
+    num_splits, num_unique_splits, num_nt_splits, num_nt_unique_splits = tree_array.split_distribution.splits_considered()
+    if args.weighted_trees:
+        _bulleted_message_and_log("{} unique splits with a total weight of {}".format(num_unique_splits, num_splits))
+        _bulleted_message_and_log("{} unique non-trivial splits with a total weight of {}".format(num_nt_unique_splits, num_nt_splits))
+    else:
+        _bulleted_message_and_log("{} unique splits out of a total of {} splits".format(num_unique_splits, int(num_splits)))
+        _bulleted_message_and_log("{} unique non-trivial splits counted out of a total of non-trivial {} splits".format(num_nt_unique_splits, int(num_nt_splits)))
+
+    ### build target tree(s)
+    target_trees = dendropy.TreeList(taxon_namespace=tree_array.taxon_namespace)
+    if target_tree_filepath is None:
+        args.include_external_splits_when_scoring_clade_credibility_tree = False
+        if args.include_external_splits_when_scoring_clade_credibility_tree:
+            coda = ", including tip clades"
+        else:
+            coda = ""
+        if args.summary_target is None:
+            args.summary_target = "consensus"
+        if args.summary_target == "consensus":
+            tree = tree_array.consensus_tree(min_consensus_freq=args.min_consensus_freq, summarize_splits=False)
+            msg = "Summarized onto consensus tree with minimum clade frequency threshold of {}:".format(args.min_consensus_freq)
+        elif args.summary_target == "mcct" or args.summary_target == "mcc":
+            tree = tree_array.maximum_product_of_split_support_tree(
+                    include_external_splits=args.include_external_splits_when_scoring_clade_credibility_tree,
+                    summarize_splits=False)
+            msg = "Summarized onto Maximum Credibility Tree (i.e., tree given in sources that maximizes the product of clade credibilities{}):".format(coda)
+        elif args.summary_target == "msct":
+            tree = tree_array.maximum_sum_of_split_support_tree(
+                    include_external_splits=args.include_external_splits_when_scoring_clade_credibility_tree,
+                    summarize_splits=False)
+            msg = "Summarized onto Maximum Sum of Credibilities Tree (i.e., tree given in sources that maximizes the sum of clade credibilities{}):".format(coda)
+        else:
+            raise ValueError(args.summary_target)
+        target_trees.append(tree)
+        _message_and_log(msg, wrap=True)
+    else:
+        try:
+            if not args.allow_unknown_target_tree_taxa:
+                tree_array.taxon_namespace.is_mutable = False
+            # we go through the yielder because it can handle the 'nexus/newick'
+            # schema; TreeList.get_from_*() etc. does not (yet)
+            is_target_trees_rooted = None
+            for tree_idx, tree in enumerate(dendropy.Tree.yield_from_files(
+                    files=[target_tree_filepath],
+                    schema=args.input_format,
+                    rooting=dendropy.get_rooting_argument(is_rooted=args.is_source_trees_rooted),
+                    preserve_underscores=args.preserve_underscores,
+                    taxon_namespace=target_trees.taxon_namespace,
+                    )):
+                if args.root_target_at_outgroup is not None or args.root_target_at_midpoint:
+                    tree.is_rooted = True
+                if tree.is_rooted is not tree_array.is_rooted_trees:
+                    messenger.error("Target trees rooting state do not match source trees rooting state. " + mixed_rooting_solution)
+                    sys.exit(1)
+                if tree_idx > 0:
+                    if tree.is_rooted is not is_target_trees_rooted:
+                        messenger.error("Mixed rooting states detected in target trees. " + mixed_rooting_solution)
+                        sys.exit(1)
+                is_target_trees_rooted = tree.is_rooted
+                tree.encode_bipartitions()
+                target_trees.append(tree)
+        except (Exception, KeyboardInterrupt) as e:
+            if isinstance(e, dendropy.utility.error.ImmutableTaxonNamespaceError):
+                message = "Target trees have one or more taxon names not seen in sources: {}".format(e)
+            else:
+                message = str(e)
+            messenger.error(message)
+            if args.debug_mode:
+                raise
+            sys.exit(1)
+        if len(target_trees) > 1:
+            msg = "Summarizing onto {} target trees".format(len(target_trees))
+        else:
+            msg = "Summarizing onto target tree".format(len(target_trees))
+        msg += " defined in '{}':".format(target_tree_filepath)
+        _message_and_log(msg, wrap=False)
+
+    ###  rooting
+
+    if args.root_target_at_outgroup is not None or args.set_outgroup is not None:
+        if args.root_target_at_outgroup is not None:
+            outgroup_label = args.root_target_at_outgroup
+        elif args.set_outgroup is not None:
+            outgroup_label = args.set_outgroup
+        if args.input_format in ("nexus/newick", "nexus", "newick"):
+            if not args.preserve_underscores:
+                outgroup_label = outgroup_label.replace("_", " ")
+        for tree in target_trees:
+            outgroup_node = tree.find_node_with_taxon_label(outgroup_label)
+            if outgroup_node is None:
+                messenger.error("Cannot locate node with outgroup taxon '{}' on target tree".format(outgroup_label))
+                sys.exit(1)
+            tree.to_outgroup_position(
+                    outgroup_node=outgroup_node,
+                    update_bipartitions=True,
+                    suppress_unifurcations=True)
+            if args.root_target_at_outgroup is not None:
+                tree.is_rooted = True
+        if args.root_target_at_outgroup is not None:
+            _bulleted_message_and_log("Target tree(s) rerooted using outgroup: '{}'".format(outgroup_label))
+        elif args.set_outgroup is not None:
+            _bulleted_message_and_log("Target tree(s) rotated to set outgroup: '{}'".format(outgroup_label))
+    elif args.root_target_at_midpoint:
+        for tree in target_trees:
+            tree.reroot_at_midpoint(
+                    update_bipartitions=True,
+                    suppress_unifurcations=True)
+        _bulleted_message_and_log("Target tree(s) rerooted at midpoint")
+
+    ###  set up summarization regime
+
+    split_summarization_kwargs = {}
+    if not args.support_as_percentages:
+        _bulleted_message_and_log("Support values expressed as proportions or probabilities")
+        if args.support_label_decimals < 2:
+            messenger.warning("Reporting support by proportions require that support will be reported to at least 2 decimal places")
+            args.support_label_decimals = 2
+    else:
+        _bulleted_message_and_log("Support values expressed as percentages")
+    split_summarization_kwargs["support_as_percentages"] = args.support_as_percentages
+    split_summarization_kwargs["support_label_decimals"] = args.support_label_decimals
+    if args.support_as_labels:
+        split_summarization_kwargs["set_support_as_node_label"] = True
+    if args.node_labels == "support":
+        split_summarization_kwargs["set_support_as_node_label"] = True
+    else:
+        split_summarization_kwargs["set_support_as_node_label"] = False
+
+    if args.edge_length_summarization is None:
+        if target_tree_filepath:
+            args.edge_length_summarization = "keep"
+        elif args.summarize_node_ages:
+            args.edge_length_summarization = "mean-age"
+        else:
+            args.edge_length_summarization = "mean-length"
+    if args.edge_length_summarization == "mean-length":
+        _bulleted_message_and_log("Edge lengths on target trees set to mean of edge lengths in sources")
+    elif args.edge_length_summarization == "median-length":
+        _bulleted_message_and_log("Edge lengths on target trees set to median of edge lengths in sources")
+    elif args.edge_length_summarization == "mean-age":
+        _bulleted_message_and_log("Node ages on target trees set to mean of node ages in sources")
+    elif args.edge_length_summarization == "median-age":
+        _bulleted_message_and_log("Node ages on target trees set to median of node ages in sources")
+    elif args.edge_length_summarization == "support":
+        _bulleted_message_and_log("Edge lengths on target trees set to support values of corresponding split")
+    elif args.edge_length_summarization == "keep":
+        _bulleted_message_and_log("Edge lengths as given on target trees")
+    elif args.edge_length_summarization == "clear":
+        _bulleted_message_and_log("Edge lengths cleared from target trees")
+    else:
+        raise ValueError(args.edge_length_summarization)
+    split_summarization_kwargs["set_edge_lengths"] = args.edge_length_summarization
+
+    split_summarization_kwargs["error_on_negative_edge_lengths"] = False
+    if args.collapse_negative_edges:
+        split_summarization_kwargs["minimum_edge_length"] = 0.0
+        _bulleted_message_and_log("Negative edge lengths collapsed to 0.0 (may result in non-ultrametric trees)")
+    elif args.force_minimum_edge_length is not None:
+        split_summarization_kwargs["minimum_edge_length"] = args.force_minimum_edge_length
+        _bulleted_message_and_log("Edge lengths less than {val} set to {val} (may result in non-ultrametric trees)".format(val=args.force_minimum_edge_length))
+
+    if args.suppress_annotations:
+        split_summarization_kwargs["add_support_as_node_attribute"] = False
+        split_summarization_kwargs["add_support_as_node_annotation"] = False
+        split_summarization_kwargs["add_node_age_summaries_as_node_attributes"] = False
+        split_summarization_kwargs["add_node_age_summaries_as_node_annotations"] = False
+        split_summarization_kwargs["add_edge_length_summaries_as_edge_attributes"] = False
+        split_summarization_kwargs["add_edge_length_summaries_as_edge_annotations"] = False
+        _bulleted_message_and_log("Metadata annotations NOT added to target trees as metadata".format())
+    else:
+        split_summarization_kwargs["add_support_as_node_attribute"] = True
+        split_summarization_kwargs["add_support_as_node_annotation"] = True
+        split_summarization_kwargs["add_node_age_summaries_as_node_attributes"] = True
+        split_summarization_kwargs["add_node_age_summaries_as_node_annotations"] = True
+        split_summarization_kwargs["add_edge_length_summaries_as_edge_attributes"] = True
+        split_summarization_kwargs["add_edge_length_summaries_as_edge_annotations"] = True
+        _bulleted_message_and_log("Support and other summarization annotations added to target trees as metadata".format())
+
+    for tree in target_trees:
+        tree_array.summarize_splits_on_tree(
+                tree=tree,
+                is_bipartitions_updated=True,
+                **split_summarization_kwargs)
+        if args.node_labels == "clear":
+            for nd in tree:
+                nd.label = None
+
+    main_time_end = datetime.datetime.now()
+
+    ###################################################
+    #  Primary Output
+
+    ## set up file-level annotations
+    final_run_report = []
+    final_run_report.append("Started at: {}".format(main_time_start.isoformat(' ')))
+    final_run_report.append("Ended at: {}".format(main_time_end.isoformat(' ')))
+    final_run_report.append("Total elapsed time: {}".format(
+        timeprocessing.pretty_timedelta(main_time_end-main_time_start),
+        ))
+    final_run_report.append("Actual analysis time: {}".format(
+        timeprocessing.pretty_timedelta(analysis_time_delta),
+        ))
+
+    if not args.suppress_analysis_metainformation:
+        summarization_metainfo = []
+
+        summarization_metainfo.append("")
+        summarization_metainfo.append("Summarization Information")
+        summarization_metainfo.append("-------------------------")
+        summarization_metainfo.extend(processing_report_lines)
+
+        summarization_metainfo.append("")
+        summarization_metainfo.append("Program Information")
+        summarization_metainfo.append("-------------------")
+        summarization_metainfo.append("{} {} by {}".format(_program_name, _program_version, _program_author))
+        summarization_metainfo.append("Using {}, located at: '{}'".format(dendropy.description(), dendropy.homedir()))
+        python_version = sys.version.replace("\n", "").replace("[", "(").replace("]",")")
+        summarization_metainfo.append("Running under Python {}, located at: '{}'".format(python_version, sys.executable))
+
+        summarization_metainfo.append("")
+        summarization_metainfo.append("Execution Information")
+        summarization_metainfo.append("---------------------")
+        try:
+            username = getpass.getuser()
+        except:
+            username = "<user>"
+        summarization_metainfo.append("Executed on {} by {}@{}".format(platform.node(), username, socket.gethostname()))
+        summarization_metainfo.append("Working directory: '{}'".format(os.getcwd()))
+        summarization_metainfo.extend(final_run_report)
+
+        summarization_metainfo.append("")
+        summarization_metainfo.append("Citation Information")
+        summarization_metainfo.append("--------------------")
+        summarization_metainfo.append("")
+        citation = cli.compose_citation_for_program(
+                prog_name=_program_name,
+                prog_version=_program_version,
+                additional_citations=[_program_citation],
+                include_preamble=False,
+                include_epilog=False,
+                )
+        summarization_metainfo.extend(citation)
+        summarization_metainfo.append("")
+
+        if args.additional_comments:
+            summarization_metainfo.append("")
+            summarization_metainfo.append("Additional Remarks")
+            summarization_metainfo.append("------------------")
+            summarization_metainfo.append(args.additional_comments)
+    else:
+        summarization_metainfo = []
+
+    ### PRIMARY OUTPUT
+    if not args.suppress_analysis_metainformation:
+        primary_output_metainfo = []
+        primary_output_metainfo.append("=============")
+        primary_output_metainfo.append("Summary Trees")
+        primary_output_metainfo.append("=============")
+        primary_output_metainfo.append("")
+        primary_output_metainfo.append("Summary trees generated by SumTrees.")
+        primary_output_metainfo.extend(summarization_metainfo)
+    else:
+        primary_output_metainfo = []
+    if hasattr(output_dest, "name"):
+        messenger.info("Writing primary results to: '{}'".format(output_dest.name))
+    else:
+        messenger.info("Writing primary results to standard output".format(output_dest.name))
+    _write_trees(trees=target_trees,
+            output_dest=output_dest,
+            args=args,
+            file_comments=primary_output_metainfo)
+
+    ### EXTENDED OUTPUT
+    if extended_output_paths:
+
+        messenger.info("Calculating extended summarization results")
+
+        #### get data: topologies
+        topologies = tree_array.topologies(
+                sort_descending=True,
+                frequency_attr_name="frequency",
+                frequency_annotation_name="frequency",
+                )
+
+        #### get data: bipartitions
+        all_taxa_bitmask = tree_array.taxon_namespace.all_taxa_bitmask()
+        seen_split_bitmasks = set()
+        all_bipartitions = collections.OrderedDict()
+        bipartition_table = []
+        bipartitions_as_trees = dendropy.TreeList(taxon_namespace=tree_array.taxon_namespace)
+        # bipartition_stats_fieldname_map
+        # biparitition_table_fieldnames = [
+        #         "bipartitionId",
+        #         "bipartitionGroup",
+        #         "frequency",
+        # ]
+        # for stat_fieldname in SplitDistribution.SUMMARY_STATS_FIELDNAMES:
+        #     f = textprocessing.camel_case("{}_{}".format(summary_stat_prefix, stat_fieldname))
+        #     bipartition_table_fieldnames.append(f)
+        _inf = float("inf")
+        def _add_split_bitmask_data(split_bitmask):
+
+            # do not add if already accessioned
+            if split_bitmask in seen_split_bitmasks:
+                return
+            seen_split_bitmasks.add(split_bitmask)
+
+            # create bipartition from split
+            bipartition = dendropy.Bipartition(
+                    leafset_bitmask=split_bitmask,
+                    tree_leafset_bitmask=all_taxa_bitmask,
+                    is_rooted=tree_array.is_rooted_trees,
+                    is_mutable=False,
+                    compile_bipartition=True)
+            bipartition_newick_str = bipartition.leafset_as_newick_string(
+                    tree_array.taxon_namespace,
+                    preserve_spaces=True if args.preserve_underscores else False,
+                    quote_underscores=False if args.preserve_underscores else True,
+                    )
+
+            # bipartition table
+            bipartition_data = collections.OrderedDict()
+            bipartition_data["bipartitionGroup"] = bipartition.leafset_as_bitstring(
+                    symbol0=".",
+                    symbol1="*",
+                    reverse=True,
+                    )
+            bipartition_data["bipartitionId"] = bipartition.split_bitmask
+            bipartition_data["bipartitionBitmask"] = bipartition.split_as_bitstring(
+                    symbol0="0",
+                    symbol1="1",
+                    reverse=False,
+                    )
+            bipartition_data["bipartitionLeafset"] = bipartition.leafset_as_bitstring(
+                    symbol0="0",
+                    symbol1="1",
+                    reverse=False,
+                    )
+            bipartition_data["count"] = tree_array.split_distribution.split_counts[split_bitmask]
+            bipartition_data["frequency"] = tree_array.split_distribution[split_bitmask]
+            for summary_stat_prefix, summary_source in (
+                    ("edge_length", tree_array.split_distribution.split_edge_length_summaries),
+                    ("node_age", tree_array.split_distribution.split_node_age_summaries),
+                    ):
+                if not summary_source:
+                    continue
+                for stat_fieldname in dendropy.SplitDistribution.SUMMARY_STATS_FIELDNAMES:
+                    f = textprocessing.camel_case("{}_{}".format(summary_stat_prefix, stat_fieldname))
+                    if split_bitmask in summary_source:
+                        value = summary_source[split_bitmask].get(stat_fieldname, 0.0)
+                    else:
+                        value = None
+                    if value is None:
+                        if stat_fieldname in ("hpd95", "quant_5_95", "range"):
+                            value = (0.0, 0.0)
+                        else:
+                            value = 0.0
+                    elif value == _inf:
+                        value = 0.0
+                    if isinstance(value, list) or isinstance(value, tuple):
+                        for sub_f, sub_value in zip(("Min", "Max"), sorted(value)):
+                            bipartition_data[f+sub_f] = sub_value
+                    else:
+                        bipartition_data[f] = value
+            bipartition_data["newick"] = '"{}"'.format(bipartition_newick_str)
+            bipartition_table.append(bipartition_data)
+
+            # bipartition as tree
+            tree = dendropy.Tree.get_from_string(
+                    bipartition_newick_str,
+                    "newick",
+                    taxon_namespace=tree_array.taxon_namespace,
+                    rooting=dendropy.get_rooting_argument(is_rooted=tree_array.is_rooted_trees),
+                    extract_comment_metadata=False,
+                    )
+            tree.label = "Bipartition{}".format(bipartition_data["bipartitionId"])
+            # tree.label = "Bipartition{}".format(bipartition.split_as_bitstring())
+            tree.weight = bipartition_data["frequency"]
+            # tree.seed_node.annotations.add_new("bipartitionId",
+            #         '"{}"'.format(bipartition_data["bipartitionId"]))
+            # tree_array.summarize_splits_on_tree(
+            #         tree=tree,
+            #         is_bipartitions_updated=False,
+            #         **split_summarization_kwargs)
+            for key in bipartition_data:
+                if key in ("newick", ):
+                    continue
+                value = bipartition_data[key]
+                if key in ("bipartitionId", "bipartitionBitmask", "bitpartitionLeafset"):
+                    # FigTree cannot cast bigger integers values to float
+                    value = '"{}"'.format(value)
+                tree.seed_node.annotations.add_new(
+                        textprocessing.snake_case(key),
+                        value)
+            bipartitions_as_trees.append(tree)
+
+            all_bipartitions[bipartition] = bipartition_data
+            return bipartition
+
+        # this is to preserve order seen in Mr. Bayes
+        _add_split_bitmask_data(all_taxa_bitmask)
+        for taxon in tree_array.taxon_namespace:
+            split_bitmask = tree_array.taxon_namespace.taxon_bitmask(taxon)
+            _add_split_bitmask_data(split_bitmask)
+
+        # add the rest in order
+        sd_split_bitmasks = list(tree_array.split_distribution.split_counts.keys())
+        sd_split_bitmasks.sort(key=lambda x: tree_array.split_distribution.split_counts[x], reverse=True)
+        for split_bitmask in sd_split_bitmasks:
+            _add_split_bitmask_data(split_bitmask)
+
+        #### EXTENDED OUTPUT: topologies / trprobs
+        if not args.suppress_analysis_metainformation:
+            metainfo = []
+            metainfo.append("======================")
+            metainfo.append("Topology Probabilities")
+            metainfo.append("======================")
+            metainfo.append("")
+            metainfo.append("\n".join((
+                    "Topologies in the source set of trees, listing in",
+                    "descending order of frequency with an indication ",
+                    "of their individual frequencies ('frequency') and",
+                    "cumulative frequencies ('cumulative_frequency'). ",
+                    )))
+            metainfo.extend(summarization_metainfo)
+        else:
+            metainfo = []
+        output_path = extended_output_paths["topologies"]
+        messenger.info("Writing topologies to: '{}'".format(output_path))
+        cumulative_frequency = 0.0
+        for tree in topologies:
+            tree.weight = tree.frequency
+            cumulative_frequency += tree.frequency
+            tree.cumulative_frequency = cumulative_frequency
+            tree.annotations.add_bound_attribute("cumulative_frequency")
+            # tree_array.summarize_splits_on_tree(
+            #         tree=tree,
+            #         is_bipartitions_updated=True,
+            #         support_as_percentages=args.support_as_percentages,
+            #         support_label_decimals=args.support_as_percentages,
+            #         add_support_as_node_annotation=not args.suppress_annotations,
+            #         add_node_age_summaries_as_node_attributes=False,
+            #         add_node_age_summaries_as_node_annotations=False,
+            #         add_edge_length_summaries_as_edge_attributes=False,
+            #         add_edge_length_summaries_as_edge_annotations=False,
+            #         )
+        with open(output_path, "w") as out:
+            _write_trees(trees=topologies,
+                    output_dest=out,
+                    args=args,
+                    file_comments=metainfo)
+
+        #### EXTENDED OUTPUT: bipartition trees
+        if not args.suppress_analysis_metainformation:
+            metainfo = []
+            metainfo.append("============")
+            metainfo.append("Bipartitions")
+            metainfo.append("============")
+            metainfo.append("")
+            metainfo.append("\n".join((
+                    "Bipartitions in the source set of trees, represented ",
+                    "as trees, with information summarized from the source ",
+                    "set of trees annotated as metadata.",
+                    )))
+            metainfo.extend(summarization_metainfo)
+        else:
+            metainfo = []
+        output_path = extended_output_paths["bipartition-trees"]
+        messenger.info("Writing bipartition trees to: '{}'".format(output_path))
+        with open(output_path, "w") as out:
+            _write_trees(trees=bipartitions_as_trees,
+                    output_dest=out,
+                    args=args,
+                    file_comments=metainfo)
+
+        #### EXTENDED OUTPUT: bipartition table
+        output_path = extended_output_paths["bipartition-table"]
+        messenger.info("Writing bipartition table to: '{}'".format(output_path))
+        sample_row = list(bipartition_table[0].keys())
+        with open(output_path, "w") as out:
+            writer = csv.DictWriter(
+                    out,
+                    fieldnames=sample_row,
+                    lineterminator=os.linesep,
+                    delimiter="\t",
+                    )
+            writer.writeheader()
+            writer.writerows(bipartition_table)
+
+        #### EXTENDED OUTPUT: edge lengths and node ages
+        bipartition_table_keys_to_import = [
+                "bipartitionGroup",
+                "bipartitionId",
+                "bipartitionBitmask",
+                "bipartitionLeafset",
+                "frequency",
+                ]
+
+        #### EXTENDED OUTPUT: edge lengths
+        if tree_array.split_distribution.split_edge_lengths:
+            rows = []
+            for b in all_bipartitions:
+                entry = collections.OrderedDict()
+                for key in bipartition_table_keys_to_import:
+                    entry[key] = all_bipartitions[b][key]
+                # entry["bipartitionId"] = all_bipartitions[b]["bipartitionId"]
+                try:
+                    value_list = tree_array.split_distribution.split_edge_lengths[b.split_bitmask]
+                except KeyError:
+                    value_list = []
+                entry["edgeCount"] = len(value_list)
+                entry["edgeLengths"] = ",".join(str(v) for v in value_list)
+                rows.append(entry)
+            output_path = extended_output_paths["edge-lengths"]
+            messenger.info("Writing edge set to: '{}'".format(output_path))
+            with open(output_path, "w") as out:
+                writer = csv.DictWriter(
+                        out,
+                        fieldnames=bipartition_table_keys_to_import + ["edgeCount", "edgeLengths"],
+                        lineterminator=os.linesep,
+                        delimiter="\t",
+                        )
+                writer.writeheader()
+                writer.writerows(rows)
+
+        #### EXTENDED OUTPUT: node ages
+        if tree_array.split_distribution.split_node_ages:
+            rows = []
+            for b in all_bipartitions:
+                entry = collections.OrderedDict()
+                for key in bipartition_table_keys_to_import:
+                    entry[key] = all_bipartitions[b][key]
+                try:
+                    value_list = tree_array.split_distribution.split_node_ages[b.split_bitmask]
+                except KeyError:
+                    value_list = []
+                entry["nodeCount"] = len(value_list)
+                entry["nodeAges"] = ",".join(str(v) for v in value_list)
+                rows.append(entry)
+            output_path = extended_output_paths["node-ages"]
+            messenger.info("Writing edge set to: '{}'".format(output_path))
+            with open(output_path, "w") as out:
+                writer = csv.DictWriter(
+                        out,
+                        fieldnames=bipartition_table_keys_to_import + ["nodeCount", "nodeAges"],
+                        lineterminator=os.linesep,
+                        delimiter="\t",
+                        )
+                writer.writeheader()
+                writer.writerows(rows)
+
+    ###################################################
+    #  WRAP UP
+
+    messenger.info("Summarization completed")
+    messenger.info_lines(final_run_report)
+    messenger.silent = True
+
+if __name__ == '__main__':
+    try:
+        main()
+    except KeyboardInterrupt:
+        sys.exit("\n(Terminating due to user interrupt signal)")
diff --git a/dendropy/__init__.py b/dendropy/__init__.py
new file mode 100644
index 0000000..3698910
--- /dev/null
+++ b/dendropy/__init__.py
@@ -0,0 +1,196 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+import sys
+
+###############################################################################
+## Populate the 'dendropy' namespace
+
+from dendropy.dataio.nexusprocessing import get_rooting_argument
+from dendropy.datamodel.taxonmodel import Taxon
+from dendropy.datamodel.taxonmodel import TaxonNamespace
+from dendropy.datamodel.taxonmodel import TaxonNamespacePartition
+from dendropy.datamodel.taxonmodel import TaxonNamespaceMapping
+from dendropy.datamodel.taxonmodel import TaxonSet # Legacy
+from dendropy.datamodel.treemodel import Bipartition
+from dendropy.datamodel.treemodel import Edge
+from dendropy.datamodel.treemodel import Node
+from dendropy.datamodel.treemodel import Tree
+from dendropy.datamodel.treecollectionmodel import TreeList
+from dendropy.datamodel.treecollectionmodel import SplitDistribution
+from dendropy.datamodel.treecollectionmodel import TreeArray
+from dendropy.datamodel.charstatemodel import StateAlphabet
+from dendropy.datamodel.charstatemodel import DNA_STATE_ALPHABET
+from dendropy.datamodel.charstatemodel import RNA_STATE_ALPHABET
+from dendropy.datamodel.charstatemodel import NUCLEOTIDE_STATE_ALPHABET
+from dendropy.datamodel.charstatemodel import PROTEIN_STATE_ALPHABET
+from dendropy.datamodel.charstatemodel import BINARY_STATE_ALPHABET
+from dendropy.datamodel.charstatemodel import RESTRICTION_SITES_STATE_ALPHABET
+from dendropy.datamodel.charstatemodel import INFINITE_SITES_STATE_ALPHABET
+from dendropy.datamodel.charstatemodel import new_standard_state_alphabet
+from dendropy.datamodel.charmatrixmodel import CharacterMatrix
+from dendropy.datamodel.charmatrixmodel import CharacterDataSequence
+from dendropy.datamodel.charmatrixmodel import DnaCharacterMatrix
+from dendropy.datamodel.charmatrixmodel import RnaCharacterMatrix
+from dendropy.datamodel.charmatrixmodel import NucleotideCharacterMatrix
+from dendropy.datamodel.charmatrixmodel import ProteinCharacterMatrix
+from dendropy.datamodel.charmatrixmodel import RestrictionSitesCharacterMatrix
+from dendropy.datamodel.charmatrixmodel import InfiniteSitesCharacterMatrix
+from dendropy.datamodel.charmatrixmodel import StandardCharacterMatrix
+from dendropy.datamodel.charmatrixmodel import ContinuousCharacterMatrix
+from dendropy.datamodel.datasetmodel import DataSet
+from dendropy.utility.error import ImmutableTaxonNamespaceError
+from dendropy.utility.error import DataError
+from dendropy.utility.error import DataParseError
+from dendropy.utility.error import UnsupportedSchemaError
+from dendropy.utility.error import UnspecifiedSchemaError
+from dendropy.utility.error import UnspecifiedSourceError
+from dendropy.utility.error import TooManyArgumentsError
+from dendropy.utility.error import InvalidArgumentValueError
+from dendropy.utility.error import MultipleInitializationSourceError
+from dendropy.utility.error import TaxonNamespaceIdentityError
+from dendropy.utility.error import TaxonNamespaceReconstructionError
+from dendropy.utility.error import UltrametricityError
+from dendropy.utility.error import TreeSimTotalExtinctionException
+from dendropy.utility.error import SeedNodeDeletionException
+from dendropy.utility import deprecate
+
+
+###############################################################################
+## Legacy Support
+
+from dendropy.legacy import coalescent
+from dendropy.legacy import continuous
+from dendropy.legacy import treecalc
+from dendropy.legacy import popgensim
+from dendropy.legacy import popgenstat
+from dendropy.legacy import reconcile
+from dendropy.legacy import seqmodel
+from dendropy.legacy import seqsim
+from dendropy.legacy import treecalc
+from dendropy.legacy import treemanip
+from dendropy.legacy import treesim
+from dendropy.legacy import treesplit
+from dendropy.legacy import treesum
+
+###############################################################################
+## PACKAGE METADATA
+import collections
+version_info = collections.namedtuple("dendropy_version_info",
+        ["major", "minor", "micro", "releaselevel"])(
+                major=4,
+                minor=0,
+                micro=2,
+                releaselevel=""
+                )
+__project__ = "DendroPy"
+__version__ = ".".join(str(s) for s in version_info[:4] if s != "")
+__author__ = "Jeet Sukumaran and Mark T. Holder"
+__copyright__ = "Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder."
+__citation__ = "Sukumaran, J and MT Holder. 2010. DendroPy: a Python library for phylogenetic computing. Bioinformatics 26: 1569-1571."
+PACKAGE_VERSION = __version__ # for backwards compatibility (with sate)
+
+def _get_revision_object():
+    from dendropy.utility import vcsinfo
+    __revision__ = vcsinfo.Revision(repo_path=homedir())
+    return __revision__
+
+def revision_description():
+    __revision__ = _get_revision_object()
+    if __revision__.is_available:
+        revision_text = " ({})".format(__revision__)
+    else:
+        revision_text = ""
+    return revision_text
+
+def name():
+    return "{} {}{}".format(__project__, __version__, revision_description())
+
+def homedir():
+    import os
+    try:
+        try:
+            __homedir__ = __path__[0]
+        except AttributeError:
+            __homedir__ = os.path.dirname(os.path.abspath(__file__))
+        except IndexError:
+            __homedir__ = os.path.dirname(os.path.abspath(__file__))
+    except OSError:
+        __homedir__ = None
+    except:
+        __homedir__ = None
+    return __homedir__
+
+def description(dest=None):
+    import sys
+    import site
+    if dest is None:
+        dest = sys.stdout
+    fields = collections.OrderedDict()
+    fields["DendroPy version"] = name()
+    fields["DendroPy location"] = homedir()
+    fields["Python version"] = sys.version.replace("\n", "")
+    fields["Python executable"] = sys.executable
+    try:
+        fields["Python site packages"] = site.getsitepackages()
+    except:
+        pass
+    max_fieldname_len = max(len(fieldname) for fieldname in fields)
+    for fieldname, fieldvalue in fields.items():
+        dest.write("{fieldname:{fieldnamewidth}}: {fieldvalue}\n".format(
+            fieldname=fieldname,
+            fieldnamewidth=max_fieldname_len + 2,
+            fieldvalue=fieldvalue))
+
+def citation_info(include_preamble=True, width=76):
+    import textwrap
+    citation_lines = []
+    if include_preamble:
+        citation_preamble =(
+                            "If any stage of your work or analyses relies"
+                            " on code or programs from this library, either"
+                            " directly or indirectly (e.g., through usage of"
+                            " your own or third-party programs, pipelines, or"
+                            " toolkits which use, rely on, incorporate, or are"
+                            " otherwise primarily derivative of code/programs"
+                            " in this library), please cite:"
+                        )
+        citation_lines.extend(textwrap.wrap(citation_preamble, width=width))
+        citation_lines.append("")
+    citation = textwrap.wrap(
+            __citation__,
+            width=width,
+            initial_indent="  ",
+            subsequent_indent="    ",
+            )
+    citation_lines.extend(citation)
+    return citation_lines
+
+def tree_source_iter(*args, **kwargs):
+    s = "No longer supported in DendroPy 4: Instead of 'tree_source_iter()', use 'Tree.yield_from_files()' instead"
+    raise NotImplementedError(s)
+
+def multi_tree_source_iter(*args, **kwargs):
+    s = "No longer supported in DendroPy 4: Instead of 'multi_tree_source_iter()', use 'Tree.yield_from_files()' instead"
+    raise NotImplementedError(s)
+
+if __name__ == "__main__":
+    description(sys.stdout)
+
+
+
diff --git a/dendropy/__main__.py b/dendropy/__main__.py
new file mode 100644
index 0000000..baa8d3e
--- /dev/null
+++ b/dendropy/__main__.py
@@ -0,0 +1,23 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+import sys
+import dendropy
+
+if __name__ == "__main__":
+    dendropy.description(sys.stdout)
diff --git a/dendropy/calculate/__init__.py b/dendropy/calculate/__init__.py
new file mode 100644
index 0000000..442f116
--- /dev/null
+++ b/dendropy/calculate/__init__.py
@@ -0,0 +1,18 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
diff --git a/dendropy/calculate/mathfn.py b/dendropy/calculate/mathfn.py
new file mode 100644
index 0000000..a761279
--- /dev/null
+++ b/dendropy/calculate/mathfn.py
@@ -0,0 +1,36 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Some common mathematical functions.
+"""
+
+def gcd(a, b):
+    """Return greatest common divisor using Euclid's Algorithm."""
+    while b:
+        a, b = b, a % b
+    return a
+
+def lcm(a, b):
+    """Return lowest common multiple."""
+    return a * b // gcd(a, b)
+
+def LCM(*args):
+    """Return lcm of args."""
+    return reduce(lcm, args)
+
diff --git a/dendropy/calculate/popgenstat.py b/dendropy/calculate/popgenstat.py
new file mode 100644
index 0000000..7eab7d6
--- /dev/null
+++ b/dendropy/calculate/popgenstat.py
@@ -0,0 +1,414 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Population genetic statistics.
+"""
+
+import math
+import dendropy
+from dendropy.calculate import probability
+
+###############################################################################
+## internal functions: generally taking lower-level data, such as sequences etc.
+###############################################################################
+
+def _count_differences(char_sequences, state_alphabet, ignore_uncertain=True):
+    """
+    Returns pair of values: total number of pairwise differences observed between
+    all sequences, and mean number of pairwise differences pair base.
+    """
+    sum_diff = 0.0
+    mean_diff = 0.0
+    sq_diff = 0.0
+    total_counted = 0
+    comps = 0
+    if ignore_uncertain:
+        attr = "fundamental_indexes_with_gaps_as_missing"
+        states_to_ignore = set([state_alphabet.gap_state, state_alphabet.no_data_state])
+    else:
+        attr = "fundamental_indexes"
+        states_to_ignore = set()
+    for vidx, i in enumerate(char_sequences[:-1]):
+        for j in char_sequences[vidx+1:]:
+            if len(i) != len(j):
+                raise Exception("sequences of unequal length")
+            diff = 0
+            counted = 0
+            comps += 1
+            for cidx, c in enumerate(i):
+                c1 = c
+                c2 = j[cidx]
+                if c1 in states_to_ignore or c2 in states_to_ignore:
+                    continue
+                counted += 1
+                total_counted += 1
+                f1 = getattr(c1, attr)
+                f2 = getattr(c2, attr)
+                if f1 != f2:
+                    diff += 1
+            sum_diff += float(diff)
+            mean_diff += float(diff) / counted
+            sq_diff += (diff ** 2)
+    return sum_diff, mean_diff / comps, sq_diff
+
+def _nucleotide_diversity(char_sequences, state_alphabet, ignore_uncertain=True):
+    """
+    Returns $\pi$, the proportional nucleotide diversity, calculated for a
+    list of character sequences.
+    """
+    return _count_differences(char_sequences, state_alphabet, ignore_uncertain)[1]
+
+def _average_number_of_pairwise_differences(char_sequences, state_alphabet, ignore_uncertain=True):
+    """
+    Returns $k$ (Tajima 1983; Wakely 1996), calculated for a set of sequences:
+
+    k = \frac{\right(\sum \sum \k_{ij}\left)}{n \choose 2}
+
+    where $k_{ij}$ is the number of pairwise differences between the
+    $i$th and $j$th sequence, and $n$ is the number of DNA sequences
+    sampled.
+    """
+    sum_diff, mean_diff, sq_diff = _count_differences(char_sequences, state_alphabet, ignore_uncertain)
+    return sum_diff / probability.binomial_coefficient(len(char_sequences), 2)
+
+def _num_segregating_sites(char_sequences, state_alphabet, ignore_uncertain=True):
+    """
+    Returns the raw number of segregating sites (polymorphic sites).
+    """
+    s = 0
+    if ignore_uncertain:
+        attr = "fundamental_indexes_with_gaps_as_missing"
+        states_to_ignore = set([state_alphabet.gap_state, state_alphabet.no_data_state])
+    else:
+        attr = "fundamental_indexes"
+        states_to_ignore = set()
+    for i, c1 in enumerate(char_sequences[0]):
+        for v in char_sequences[1:]:
+            c2 = v[i]
+            if c1 in states_to_ignore or c2 in states_to_ignore:
+                continue
+            f1 = getattr(c1, attr)
+            f2 = getattr(c2, attr)
+            if f1 != f2:
+                s += 1
+                break
+    return s
+
+def _tajimas_d(num_sequences, avg_num_pairwise_differences, num_segregating_sites):
+
+    ### VERIFICATION ###
+    ###
+    ### Given: num_sequences = 10, num_pairwise_differences = 3.888889, num_segregating_sites = 16
+    ###  i.e.: tajimas_d(10, 3.888889, 16)  == -1.44617198561
+    ###  Then:    a1 == 2.82896825397
+    ###           a2 == 1.53976773117
+    ###           b1 == 0.407407407407
+    ###           b2 == 0.279012345679
+    ###           c1 == 0.0539216450284
+    ###           c2 == 0.0472267720013
+    ###           e1 == 0.0190605338016
+    ###           e2 == 0.0049489277699
+    ###           D ==  -1.44617198561
+
+    a1 = sum([1.0/i for i in range(1, num_sequences)])
+    a2 = sum([1.0/(i**2) for i in range(1, num_sequences)])
+    b1 = float(num_sequences+1)/(3*(num_sequences-1))
+    b2 = float(2 * ( (num_sequences**2) + num_sequences + 3 )) / (9*num_sequences*(num_sequences-1))
+    c1 = b1 - 1.0/a1
+    c2 = b2 - float(num_sequences+2)/(a1 * num_sequences) + float(a2)/(a1 ** 2)
+    e1 = float(c1) / a1
+    e2 = float(c2) / ( (a1**2) + a2 )
+    D = (
+        float(avg_num_pairwise_differences - (float(num_segregating_sites)/a1))
+        / math.sqrt(
+            (e1 * num_segregating_sites )
+          + ((e2 * num_segregating_sites) * (num_segregating_sites - 1) ))
+        )
+    return D
+
+###############################################################################
+## friendlier-functions, generally taking a CharacterMatrix
+###############################################################################
+
+def num_segregating_sites(char_matrix, ignore_uncertain=True):
+    """
+    Returns the raw number of segregating sites (polymorphic sites).
+    """
+    return _num_segregating_sites(
+            char_matrix.sequences(),
+            char_matrix.default_state_alphabet,
+            ignore_uncertain)
+
+def average_number_of_pairwise_differences(char_matrix, ignore_uncertain=True):
+    """
+    Returns $k$, calculated for a character block.
+    """
+    return _average_number_of_pairwise_differences(char_matrix.sequences(), char_matrix.default_state_alphabet, ignore_uncertain)
+
+def nucleotide_diversity(char_matrix, ignore_uncertain=True):
+    """
+    Returns $\pi$, calculated for a character block.
+    """
+    return _nucleotide_diversity(char_matrix.sequences(), char_matrix.default_state_alphabet, ignore_uncertain)
+
+def tajimas_d(char_matrix, ignore_uncertain=True):
+    """
+    Returns Tajima's D.
+    """
+    sequences = char_matrix.sequences()
+    num_sequences = len(sequences)
+    avg_num_pairwise_differences = _average_number_of_pairwise_differences(sequences, char_matrix.default_state_alphabet, ignore_uncertain=ignore_uncertain)
+    num_segregating_sites = _num_segregating_sites(
+            sequences,
+            char_matrix.default_state_alphabet,
+            ignore_uncertain=ignore_uncertain)
+    return _tajimas_d(num_sequences, avg_num_pairwise_differences, num_segregating_sites)
+
+def wattersons_theta(char_matrix, ignore_uncertain=True):
+    """
+    Returns Watterson's Theta (per sequence)
+    """
+    sequences = char_matrix.sequences()
+    num_segregating_sites = _num_segregating_sites(
+            sequences,
+            char_matrix.default_state_alphabet,
+            ignore_uncertain=ignore_uncertain)
+    a1 = sum([1.0/i for i in range(1, len(sequences))])
+    return float(num_segregating_sites) / a1
+
+###############################################################################
+## Classes
+###############################################################################
+
+class PopulationPairSummaryStatistics(object):
+
+    def __init__(self, pop1_seqs, pop2_seqs, ignore_uncertain=True):
+        self.pop1_seqs = pop1_seqs
+        self.pop2_seqs = pop2_seqs
+        self.combined_seqs = pop1_seqs + pop2_seqs
+        self.ignore_uncertain = ignore_uncertain
+        self.state_alphabet = dendropy.DNA_STATE_ALPHABET
+
+        self.average_number_of_pairwise_differences = 0
+        self.average_number_of_pairwise_differences_between = 0
+        self.average_number_of_pairwise_differences_within = 0
+        self.average_number_of_pairwise_differences_net = 0
+        self.num_segregating_sites = 0
+        self.wattersons_theta = 0.0
+        self.wakeleys_psi = 0.0
+        self.tajimas_d = 0.0
+        if self.ignore_uncertain:
+            self.state_attr = "fundamental_indexes_with_gaps_as_missing"
+            self.states_to_ignore = set([self.state_alphabet.gap_state, self.state_alphabet.no_data_state])
+        else:
+            self.state_attr = "fundamental_indexes"
+            self.states_to_ignore = set()
+        self.calc()
+
+    def calc(self):
+        """
+        Returns a summary of a set of sequences that can be partitioned into
+        the list of lists of taxa given by ``taxon_groups``.
+        """
+        diffs_x, mean_diffs_x, sq_diff_x = _count_differences(self.pop1_seqs, self.state_alphabet, self.ignore_uncertain)
+        diffs_y, mean_diffs_y, sq_diff_y = _count_differences(self.pop2_seqs, self.state_alphabet, self.ignore_uncertain)
+        d_x = diffs_x / probability.binomial_coefficient(len(self.pop1_seqs), 2)
+        d_y = diffs_y / probability.binomial_coefficient(len(self.pop2_seqs), 2)
+        d_xy = self._average_number_of_pairwise_differences_between_populations()
+        s2_x = (float(sq_diff_x) / probability.binomial_coefficient(len(self.pop1_seqs), 2) ) - (d_x ** 2)
+        s2_y = (float(sq_diff_y) / probability.binomial_coefficient(len(self.pop2_seqs), 2) ) - (d_y ** 2)
+        s2_xy = self._variance_of_pairwise_differences_between_populations(d_xy)
+        n = len(self.combined_seqs)
+        n_x = float(len(self.pop1_seqs))
+        n_y = float(len(self.pop2_seqs))
+        a = float(n * (n-1))
+        ax = float(n_x * (n_x - 1))
+        ay = float(n_y * (n_y - 1))
+        k = _average_number_of_pairwise_differences(self.combined_seqs, self.state_alphabet, self.ignore_uncertain)
+        n = len(self.combined_seqs)
+
+        # Hickerson 2006: pi #
+        self.average_number_of_pairwise_differences = k
+
+        # Hickerson 2006: pi_b #
+        self.average_number_of_pairwise_differences_between = d_xy
+
+        # Hickerson 2006: pi_w #
+        self.average_number_of_pairwise_differences_within = d_x + d_y
+
+        # Hickerson 2006: pi_net #
+        self.average_number_of_pairwise_differences_net = d_xy - (d_x + d_y)
+
+        # Hickerson 2006: S #
+        self.num_segregating_sites = _num_segregating_sites(
+                self.combined_seqs,
+                self.state_alphabet,
+                self.ignore_uncertain)
+
+        # Hickerson 2006: theta #
+        a1 = sum([1.0/i for i in range(1, n)])
+        self.wattersons_theta = float(self.num_segregating_sites) / a1
+
+        # Wakeley 1996 #
+        self.wakeleys_psi = (float(1)/(a)) * ( ax * (math.sqrt(s2_x)/d_x) + ay * (math.sqrt(s2_y)/d_y) + (2 * n_x * n_y * math.sqrt(s2_xy)/k))
+
+        # Tajima's D #
+        self.tajimas_d = _tajimas_d(n, self.average_number_of_pairwise_differences, self.num_segregating_sites)
+
+    def _average_number_of_pairwise_differences_between_populations(self):
+        """
+        Implements Eq (3) of:
+
+        Wakeley, J. 1996. Distinguishing migration from isolation using the
+        variance of pairwise differences. Theoretical Population Biology 49:
+        369-386.
+        """
+        diffs = 0
+        for sx in self.pop1_seqs:
+            for sy in self.pop2_seqs:
+                for cidx, c in enumerate(sx):
+                    c1 = c
+                    c2 = sy[cidx]
+                    if c1 in self.states_to_ignore or c2 in self.states_to_ignore:
+                        continue
+                    f1 = getattr(c1, self.state_attr)
+                    f2 = getattr(c2, self.state_attr)
+                    if f1 != f2:
+                        diffs += 1
+        dxy = float(1)/(len(self.pop1_seqs) * len(self.pop2_seqs)) * float(diffs)
+        return dxy
+
+    def _variance_of_pairwise_differences_between_populations(self, mean_diff):
+        """
+        Implements Eq (10) of:
+
+        Wakeley, J. 1996. Distinguishing migration from isolation using the
+        variance of pairwise differences. Theoretical Population Biology 49:
+        369-386.
+        """
+        ss_diffs = 0
+        for sx in self.pop1_seqs:
+            for sy in self.pop2_seqs:
+                diffs = 0
+                for cidx, c in enumerate(sx):
+                    c1 = c
+                    c2 = sy[cidx]
+                    if c1 in self.states_to_ignore or c2 in self.states_to_ignore:
+                        continue
+                    f1 = getattr(c1, self.state_attr)
+                    f2 = getattr(c2, self.state_attr)
+                    if f1 != f2:
+                        diffs += 1
+                ss_diffs += (float(diffs - mean_diff) ** 2)
+        return float(ss_diffs)/(len(self.pop1_seqs)*len(self.pop2_seqs))
+
+def derived_state_matrix(
+        char_matrix,
+        ancestral_sequence=None,
+        derived_state_alphabet=None,
+        ignore_uncertain=True,
+        ):
+    """
+    Given a list of CharDataSequence objects, and a reference ancestral sequence,
+    this returns a list of strings corresponding to the list of CharDataSequence
+    objects, where a '0' indicates the ancestral state and '1' a derived state.
+
+    e.g.
+
+        Given:
+                GGCTAATCTGA
+                GCTTTTTCTGA
+                GCTCTCTCTTC
+
+        with ancestral sequence:
+                GGTTAATCTGA
+
+        this returns:
+                0010000000
+                0000110000
+                0001110011
+    """
+    if derived_state_alphabet is None:
+        derived_state_alphabet = dendropy.StateAlphabet(
+                fundamental_states="01",
+                polymorphic_states=None,
+                ambiguous_states=None,
+                no_data_symbol="?",
+                gap_symbol="-")
+    derived_matrix = dendropy.StandardCharacterMatrix(
+            taxon_namespace=char_matrix.taxon_namespace,
+            default_state_alphabet=derived_state_alphabet)
+    if ignore_uncertain:
+        attr = "fundamental_indexes_with_gaps_as_missing"
+        states_to_ignore = set([char_matrix.default_state_alphabet.gap_state, char_matrix.default_state_alphabet.no_data_state])
+    else:
+        attr = "fundamental_indexes"
+        states_to_ignore = set()
+    if ancestral_sequence is None:
+        ancestral_sequence = char_matrix[0]
+    ancestral_fundamental_ids = []
+    for idx, c1 in enumerate(ancestral_sequence):
+        if c1 in states_to_ignore:
+            ancestral_fundamental_ids.append(None)
+        else:
+            ancestral_fundamental_ids.append(getattr(c1, attr))
+    for taxon in char_matrix:
+        s1 =  char_matrix[taxon]
+        for idx, c2 in enumerate(s1):
+            if ancestral_fundamental_ids[idx] is None or c2 in states_to_ignore:
+                derived_matrix[taxon].append(derived_matrix.default_state_alphabet["?"])
+                continue
+            f2 = getattr(c2, attr)
+            if f2 == ancestral_fundamental_ids[idx]:
+                derived_matrix[taxon].append(derived_matrix.default_state_alphabet["0"])
+            else:
+                derived_matrix[taxon].append(derived_matrix.default_state_alphabet["1"])
+    return derived_matrix
+
+def unfolded_site_frequency_spectrum(
+        char_matrix,
+        ancestral_sequence=None,
+        ignore_uncertain=False,
+        pad=True):
+    """
+    Returns the site frequency spectrum of list of CharDataSequence objects given by char_sequences,
+    with reference to the ancestral sequence given by ancestral_seq. If ancestral_seq
+    is None, then the first sequence in char_sequences is taken to be the ancestral
+    sequence.
+    """
+    dsm = derived_state_matrix(
+            char_matrix=char_matrix,
+            ancestral_sequence=ancestral_sequence,
+            derived_state_alphabet=None,
+            ignore_uncertain=ignore_uncertain,
+            )
+    sites = zip(*dsm.sequences()) # transpose
+    freqs = {}
+    if pad:
+        for i in range(len(char_matrix)+1):
+            freqs[i] = 0
+    for s in sites:
+        p = sum(1 for i in s if i.symbol == "1")
+        if p not in freqs:
+            freqs[p] = 1
+        else:
+            freqs[p] += 1
+    return freqs
+
+
diff --git a/dendropy/calculate/probability.py b/dendropy/calculate/probability.py
new file mode 100644
index 0000000..c319cbb
--- /dev/null
+++ b/dendropy/calculate/probability.py
@@ -0,0 +1,324 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Functions to calculate or draw values from various probability distributions.
+"""
+
+import math
+from dendropy.utility import GLOBAL_RNG
+
+def factorial(num):
+    """factorial(n): return the factorial of the integer num.
+    factorial(0) = 1
+    factorial(n) with n<0 is -factorial(abs(n))
+    """
+    result = 1
+    for i in range(1, abs(num)+1):
+        result *= i
+    return result
+
+def binomial_coefficient(population, sample):
+    "Returns  ``population`` choose ``sample``."
+    s = max(sample, population - sample)
+    assert s <= population
+    assert population > -1
+    if s == population:
+        return 1
+    numerator = 1
+    denominator = 1
+    for i in range(s+1, population + 1):
+        numerator *= i
+        denominator *= (i - s)
+    return numerator/denominator
+
+def binomial_rv(n, p, rng=None):
+    """
+    Returns the number of successes in a sample of ``n`` trials, with the
+    probability of success given by ``p``.  Using the BINV algorithm, as given
+    by Kachitvicyanukul, V. and B. Schmeiser. 1988. Binomial random variate
+    generation. Communications of the ACM 31: 216-222.
+    Note: *NOT* the best algorithm according to the authors of the paper (who
+    present their own as an alternative). Apart from rounding errors
+    accumulating in the loop, it may also take a long time to return a value as
+    ``n`` * ``p`` become large or even moderate (e.g., n=380 and p=0.8 still).
+    """
+    if rng is None:
+        rng = GLOBAL_RNG
+    q = 1 - p
+    s = float(p) / q
+    a = (n + 1) * s
+    r = q ** n
+    x = 0
+    u = rng.random()
+    while True:
+        if u <= r:
+            return x
+        u = u - r
+        x = x + 1
+        r = (float(a)/x - s) * r
+
+def exp_pdf(value, rate):
+    """
+    Returns the probability density for an exponential distribution
+    with an intensity of rate, evaluated at value.
+    """
+    return float(rate) * math.exp(-1.0 * rate * value)
+
+def poisson_rv(rate, rng=None):
+    """
+    Returns a random number from a Poisson distribution with rate of
+    ``rate`` (mean of 1/rate).
+    """
+    if rng is None:
+        rng = GLOBAL_RNG
+    MAX_EXPECTATION = 64.0 # larger than this and we have underflow issues
+    if rate > MAX_EXPECTATION:
+        r = rate/2.0
+        return poisson_rv(r) + poisson_rv(r)
+    L = math.exp(-1.0 * rate)
+    p = 1.0
+    k = 0.0
+    while p >= L:
+        k = k + 1.0
+        u = rng.random()
+        p = p * u
+    return int(k - 1.0)
+
+def num_poisson_events(rate, period, rng=None):
+    """
+    Returns the number of events that have occurred in a Poisson
+    process of ``rate`` over ``period``.
+    """
+    if rng is None:
+        rng = GLOBAL_RNG
+    events = 0
+    while period > 0:
+        time_to_next = rng.expovariate(1.0/rate)
+        if time_to_next <= period:
+            events = events + 1
+        period = period - time_to_next
+    return events
+
+def poisson_pmf(k, rate):
+    """
+    Returns the probability of a number, ``k``, drawn from a Poisson distribution
+    with rate parameter, ``rate`` (= 1/mean).
+    """
+    mean = 1.0/rate
+    return float((mean ** k) * math.exp(-mean))/factorial(k)
+
+def sample_multinomial(probs, rng=None):
+    """Returns the index of the probability bin in ``probs``.
+    ``probs`` is assumed to sum to 1.0 (all rounding error contributes to the
+    last bin).
+    """
+    if rng is None:
+        rng = GLOBAL_RNG
+    u = rng.random()
+    for n, i in enumerate(probs):
+        u -= i
+        if u < 0.0:
+            return n
+    return len(probs) - 1
+
+def weighted_choice(seq, weights, rng=None):
+    """
+    Selects an element out of seq, with probabilities of each element
+    given by the list ``weights`` (which must be at least as long as the
+    length of ``seq`` - 1).
+    """
+    if rng is None:
+        rng = GLOBAL_RNG
+    if weights is None:
+        weights = [1.0/len(seq) for count in range(len(seq))]
+    else:
+        weights = list(weights)
+    if len(weights) < len(seq) - 1:
+        raise Exception("Insufficient number of weights specified")
+    if len(weights) == len(seq) - 1:
+        weights.append(1 - sum(weights))
+    return seq[weighted_index_choice(weights, rng)]
+
+def weighted_index_choice(weights, rng=None):
+    """
+    (From: http://eli.thegreenplace.net/2010/01/22/weighted-random-generation-in-python/)
+    The following is a simple function to implement weighted random choice in
+    Python. Given a list of weights, it returns an index randomly, according
+    to these weights [1].
+    For example, given [2, 3, 5] it returns 0 (the index of the first element)
+    with probability 0.2, 1 with probability 0.3 and 2 with probability 0.5.
+    The weights need not sum up to anything in particular, and can actually be
+    arbitrary Python floating point numbers.
+    If we manage to sort the weights in descending order before passing them
+    to weighted_choice_sub, it will run even faster, since the random call
+    returns a uniformly distributed value and larger chunks of the total
+    weight will be skipped in the beginning.
+    """
+    if rng is None:
+        rng = GLOBAL_RNG
+    rnd = rng.random() * sum(weights)
+    for i, w in enumerate(weights):
+        rnd -= w
+        if rnd < 0:
+            return i
+
+def chisq_pdf(chisq, df):
+    """
+    Returns the probability value associated with the provided chi-square
+    value and df.  Adapted from chisq.c in Gary Perlman's Stat.
+    """
+
+    BIG = 20.0
+    def ex(x):
+        BIG = 20.0
+        if x < -BIG:
+            return 0.0
+        else:
+            return math.exp(x)
+
+    if chisq <=0 or df < 1:
+        return 1.0
+    a = 0.5 * chisq
+    if df%2 == 0:
+        even = 1
+    else:
+        even = 0
+    if df > 1:
+        y = ex(-a)
+    if even:
+        s = y
+    else:
+        s = 2.0 * zprob(-math.sqrt(chisq))
+    if (df > 2):
+        chisq = 0.5 * (df - 1.0)
+        if even:
+            z = 1.0
+        else:
+            z = 0.5
+        if a > BIG:
+            if even:
+                e = 0.0
+            else:
+                e = math.log(math.sqrt(math.pi))
+            c = math.log(a)
+            while (z <= chisq):
+                e = math.log(z) + e
+                s = s + ex(c*z-a-e)
+                z = z + 1.0
+            return s
+        else:
+            if even:
+                e = 1.0
+            else:
+                e = 1.0 / math.sqrt(math.pi) / math.sqrt(a)
+            c = 0.0
+            while (z <= chisq):
+                e = e * (a/float(z))
+                c = c + e
+                z = z + 1.0
+            return (c*y+s)
+    else:
+        return s
+
+def z_pmf(z):
+    """
+    Returns the probability value associated with the provided z-score.
+    Adapted from z.c in Gary Perlman's Stat.
+    """
+
+    Z_MAX = 6.0    # maximum meaningful z-value
+    if z == 0.0:
+        x = 0.0
+    else:
+        y = 0.5 * math.fabs(z)
+        if y >= (Z_MAX*0.5):
+            x = 1.0
+        elif (y < 1.0):
+            w = y*y
+            x = ((((((((0.000124818987 * w
+                        -0.001075204047) * w +0.005198775019) * w
+                      -0.019198292004) * w +0.059054035642) * w
+                    -0.151968751364) * w +0.319152932694) * w
+                  -0.531923007300) * w +0.797884560593) * y * 2.0
+        else:
+            y = y - 2.0
+            x = (((((((((((((-0.000045255659 * y
+                             +0.000152529290) * y -0.000019538132) * y
+                           -0.000676904986) * y +0.001390604284) * y
+                         -0.000794620820) * y -0.002034254874) * y
+                       +0.006549791214) * y -0.010557625006) * y
+                     +0.011630447319) * y -0.009279453341) * y
+                   +0.005353579108) * y -0.002141268741) * y
+                 +0.000535310849) * y +0.999936657524
+    if z > 0.0:
+                prob = ((x+1.0)*0.5)
+    else:
+                prob = ((1.0-x)*0.5)
+    return prob
+
+
+def geometric_rv(p, rng=None):
+    """Geometric distribution per Devroye, Luc. Non-Uniform Random Variate
+    Generation, 1986, p 500. http://cg.scs.carleton.ca/~luc/rnbookindex.html
+    """
+    if rng is None:
+        rng = GLOBAL_RNG
+    # p should be in (0.0, 1.0].
+    if p <= 0.0 or p > 1.0:
+        raise ValueError("p = %s: p must be in the interval (0.0, 1.0]" % p)
+    elif p == 1.0:
+        # If p is exactly 1.0, then the only possible generated value is 1.
+        # Recognizing this case early means that we can avoid a log(0.0) later.
+        # The exact floating point comparison should be fine. log(eps) works just
+        # dandy.
+        return 1
+
+    # random() returns a number in [0, 1). The log() function does not
+    # like 0.
+    U = 1.0 - rng.random()
+
+    # Find the corresponding geometric variate by inverting the uniform variate.
+    G = int(math.ceil(math.log(U) / math.log(1.0 - p)))
+    return G
+
+def hypergeometric_pmf(x, m, n, k):
+    """
+    Given a population consisting of ``m`` items of class M and ``n`` items of class N,
+    this returns the probability of observing ``x`` items of class M when sampling
+    ``k`` times without replacement from the entire population (i.e., {M,N})
+
+            p(x) = (choose(m, x) * choose(n, k-x)) / choose(m+n, k)
+    """
+    return float(binomial_coefficient(m, x) * binomial_coefficient(n, k-x))/binomial_coefficient(m+n, k)
+
+def hypergeometric_pmf(x, m, n, k):
+    """
+    Given a population consisting of ``m`` items of class M and ``n`` items of class N,
+    this returns the probability of observing ``x`` items of class M when sampling
+    ``k`` times without replacement from the entire population (i.e., {M,N})
+
+            p(x) = (choose(m, x) * choose(n, k-x)) / choose(m+n, k)
+    """
+    # following fails with 'OverflowError: long int too large to convert to
+    # float' with large numbers
+    # return float(binomial_coefficient(m, x) * binomial_coefficient(n, k-x))/binomial_coefficient(m+n, k)
+    a = math.log(binomial_coefficient(m, x))
+    b = math.log(binomial_coefficient(n, k-x))
+    c = math.log(binomial_coefficient(m+n, k))
+    return math.exp(a+b-c)
diff --git a/dendropy/calculate/statistics.py b/dendropy/calculate/statistics.py
new file mode 100644
index 0000000..68dcf7d
--- /dev/null
+++ b/dendropy/calculate/statistics.py
@@ -0,0 +1,502 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Functions to calculate some general statistics.
+"""
+
+import math
+from dendropy.calculate import probability
+from operator import itemgetter
+
+def _mean_and_variance_pop_n(values):
+    n = 0
+    s = 0.0
+    ss = 0.0
+    for v in values:
+        n += 1
+        s += v
+        ss += v*v
+    if n == 0:
+        raise IndexError("values in mean_and_variance cannot be empty")
+    mean = float(s)/n
+    var = (ss - mean*s)/n
+    return mean, var, n
+
+def mean_and_population_variance(values):
+    """Returns the mean and population variance while only passing over the
+    elements in values once."""
+    return _mean_and_variance_pop_n(values)[:2]
+
+def mean_and_sample_variance(values):
+    """Returns the mean and sample variance while only passing over the
+    elements in values once."""
+    mean, pop_var, n = _mean_and_variance_pop_n(values)
+    if n == 1:
+        return mean, float('inf')
+    samp_var = n*pop_var/(n-1)
+    return mean, samp_var
+
+def mode(values, bin_size=0.1):
+    """
+    Returns the mode of a set of values.
+    """
+    bins = {}
+    for v in values:
+        if bin_size is not None:
+            idx = int(round(float(v)/bin_size))
+        else:
+            idx = v
+        if idx in bins:
+            bins[idx] += 1
+        else:
+            bins[idx] = 1
+    sorted_bins = sorted(bins.items(), key=itemgetter(1), reverse=True)
+    max_count = sorted_bins[0][1]
+    results = [(sorted_bins[i][0] * bin_size) for i in xrange(len(sorted_bins)) if sorted_bins[i][1] >= max_count]
+    return results
+
+def median(pool):
+    """
+    Returns median of sample. From: http://wiki.python.org/moin/SimplePrograms
+    """
+    copy = sorted(pool)
+    size = len(copy)
+    if size % 2 == 1:
+        idx = int((size - 1) / 2)
+        return copy[idx]
+    else:
+        idx1 = int(size/2) - 1
+        idx2 = int(size/2)
+        return (copy[idx1] + copy[idx2]) / 2
+
+def empirical_hpd(values, conf=0.05):
+    """
+    Assuming a **unimodal** distribution, returns the 0.95 highest posterior
+    density (HPD) interval for a set of samples from a posterior distribution.
+    Adapted from ``emp.hpd`` in the "TeachingDemos" R package (Copyright Greg
+    Snow; licensed under the Artistic License).
+    """
+    conf = min([conf, 1.0 - conf])
+    n = len(values)
+    nn = int(round(n * conf))
+    x = sorted(values)
+    xx = []
+    if nn == 0:
+        raise ValueError("Sample size too small: %s" % len(values))
+    for i in range(nn):
+        Z1 = x[n-(nn-i)]
+        Z2 = x[i]
+        #print "==>", Z1, Z2
+        xx.append(Z1 - Z2)
+    m = min(xx)
+    nnn = xx.index(m)
+    #print "n=", n
+    #print "nn=", nn
+    #print "xx=", xx
+    #print "m=", m
+    #print "nnn=", n
+    return (x[nnn], x[n-nn+nnn])
+
+def empirical_cdf(values, v):
+    """
+    Returns the proportion of values in ``values`` <= ``v``.
+    """
+    count = 0.0
+    for idx, v0 in enumerate(values):
+        if v0 < v:
+            count += 1
+    return count / len(values)
+
+def quantile(values, q):
+    """
+    Returns q-th quantile.
+    """
+    values = sorted(values)
+    size = len(values)
+    idx = int(round(size * q)) - 1
+    if idx == 0:
+        raise ValueError("Sample size too small: %s" % len(values))
+    return values[idx]
+
+
+
+# http://adorio-research.org/wordpress/?p=125
+# File    quantile.py
+# Desc    computes sample quantiles
+# Author  Ernesto P. Adorio, PhD.
+#         UPDEPP (U.P. at Clarkfield)
+# Version 0.0.1 August 7. 2009
+def quantile(x, q,  qtype = 7, issorted = False):
+    from math import modf, floor
+    """
+    Args:
+       x - input data
+       q - quantile
+       qtype - algorithm
+       issorted- True if x already sorted.
+
+    Compute quantiles from input array x given q.For median,
+    specify q=0.5.
+
+    References:
+       http://reference.wolfram.com/mathematica/ref/Quantile.html
+       http://wiki.r-project.org/rwiki/doku.php?id=rdoc:stats:quantile
+
+    Author:
+	Ernesto P.Adorio Ph.D.
+	UP Extension Program in Pampanga, Clark Field.
+    """
+    if not issorted:
+        y = sorted(x)
+    else:
+        y = x
+    if not (1 <= qtype <= 9):
+       return None  # error!
+
+    # Parameters for the Hyndman and Fan algorithm
+    abcd = [(0,   0, 1, 0), # inverse empirical distrib.function., R type 1
+            (0.5, 0, 1, 0), # similar to type 1, averaged, R type 2
+            (0.5, 0, 0, 0), # nearest order statistic,(SAS) R type 3
+
+            (0,   0, 0, 1), # California linear interpolation, R type 4
+            (0.5, 0, 0, 1), # hydrologists method, R type 5
+            (0,   1, 0, 1), # mean-based estimate(Weibull method), (SPSS,Minitab), type 6
+            (1,  -1, 0, 1), # mode-based method,(S, S-Plus), R type 7
+            (1.0/3, 1.0/3, 0, 1), # median-unbiased ,  R type 8
+            (3/8.0, 0.25, 0, 1)   # normal-unbiased, R type 9.
+           ]
+
+    a, b, c, d = abcd[qtype-1]
+    n = len(x)
+    g, j = modf( a + (n+b) * q -1)
+    if j < 0:
+        return y[0]
+    elif j >= n:
+        return y[n-1]   # oct. 8, 2010 y[n]???!! uncaught  off by 1 error!!!
+
+    j = int(floor(j))
+    if g ==  0:
+       return y[j]
+    else:
+       return y[j] + (y[j+1]- y[j])* (c + d * g)
+
+def quantile_5_95(values):
+    """
+    Returns 5% and 95% quantiles.
+    """
+    values = sorted(values)
+    size = len(values)
+    idx5 = int(round(size * 0.05)) - 1
+    idx95 = int(round(size * 0.95)) - 1
+    if idx5 == 0:
+        raise ValueError("Sample size too small: %s" % len(values))
+    return values[idx5], values[idx95]
+
+def variance_covariance(data, population_variance=False):
+    """
+    Returns the Variance-Covariance matrix for ``data``.
+    From: http://www.python-forum.org/pythonforum/viewtopic.php?f=3&t=17441
+    """
+    N = len(data) # number of vectors
+    D = len(data[0]) # dimensions per vector
+    if population_variance:
+        denom = N
+    else:
+        denom = N-1.0
+
+    means = [0.0 for i in range(D)] # intialize 1xD mean vector
+    for i in range(N):
+        for j in range(D):
+            means[j] += data[i][j]
+    means = [i/N for i in means]
+    # print "Means:"," ".join(map(str,means)),"\n"
+
+    covar = [[0.0 for i in range(D)] for j in range(D)] # initialize DxD covariance matrix
+
+    for i in range(D):
+        for j in range(i+1): #  covariance symmetric, only do lower triangle of matrix
+            sum = 0.0
+            for k in range(N):
+                sum += data[k][i]*data[k][j]
+            covar[i][j] = sum/denom - means[i]*means[j]*N/denom
+
+    for j in range(D):
+            for k in range(j+1):
+                covar[k][j] = covar[j][k]
+
+    # print "covariance:"
+    # for i in range(D):
+    #     print " ".join(map(str,covar[i]))
+    # print ""
+    return covar
+
+class FishersExactTest(object):
+    """
+    Given a 2x2 table:
+
+        +---+---+
+        | a | b |
+        +---+---+
+        | c | d |
+        +---+---+
+
+    represented by a list of lists::
+
+        [[a,b],[c,d]]
+
+    this calculates the sum of the probability of this table and all others
+    more extreme under the null hypothesis that there is no association between
+    the categories represented by the vertical and horizontal axes.
+    """
+
+    def probability_of_table(table):
+        """
+        Given a 2x2 table:
+
+            +---+---+
+            | a | b |
+            +---+---+
+            | c | d |
+            +---+---+
+
+        represented by a list of lists::
+
+            [[a,b],[c,d]]
+
+        this returns the probability of this table under the null hypothesis of
+        no association between rows and columns, which was shown by Fisher to be
+        a hypergeometric distribution:
+
+            p = ( choose(a+b, a) * choose(c+d, c) ) / choose(a+b+c+d, a+c)
+
+        """
+        a = table[0][0]
+        b = table[0][1]
+        c = table[1][0]
+        d = table[1][1]
+        return probability.hypergeometric_pmf(a, a+b, c+d, a+c)
+    probability_of_table = staticmethod(probability_of_table)
+
+    def __init__(self, table):
+        self.table = table
+        self.flat_table = [table[0][0], table[0][1], table[1][0], table[1][1]]
+        self.min_value = min(self.flat_table)
+        self.max_value = max(self.flat_table)
+
+    def _rotate_cw(self, table):
+        """
+        Returns a copy of table such that all the values
+        are rotated clockwise once.
+        """
+        return [ [ table[1][0], table[0][0] ],
+                [table[1][1], table[0][1] ] ]
+
+    def _min_rotation(self):
+        """
+        Returns copy of self.table such that the smallest value is in the first
+        (upper left) cell.
+        """
+        table = [list(self.table[0]), list(self.table[1])]
+        while table[0][0] != self.min_value:
+            table = self._rotate_cw(table)
+        return table
+
+    def _max_rotation(self):
+        """
+        Returns copy of self.table such that the largest value is in the first
+        (upper left) cell.
+        """
+        table = [list(self.table[0]), list(self.table[1])]
+        while table[0][0] != self.max_value:
+            table = self._rotate_cw(table)
+        return table
+
+    def _sum_left_tail(self):
+        """
+        Returns the sum of probabilities of tables that are *more* extreme than
+        the current table.
+        """
+        # left_tail_tables = self._get_left_tail_tables()
+        # p_vals = [ self.probability_of_table(t) for t in left_tail_tables ]
+        p_vals = self._get_left_tail_probs()
+        return sum(p_vals)
+
+    def _sum_right_tail(self):
+        """
+        Returns the sum of probabilities of tables that are *less* extreme than
+        the current table.
+        """
+        # right_tail_tables = self._get_right_tail_tables()
+        # p_vals = [ self.probability_of_table(t) for t in right_tail_tables ]
+        p_vals = self._get_right_tail_probs()
+        return sum(p_vals)
+
+    def _get_left_tail_probs(self):
+        """
+        Returns list of probabilities of all tables *more* extreme than the
+        current table.
+        """
+        table = self._min_rotation()
+        row_totals = [sum(table[0]), sum(table[1])]
+        col_totals = [table[0][0] + table[1][0], table[0][1] + table[1][1]]
+        p_vals = []
+        while True:
+            table[0][0] -= 1
+            if table[0][0] < 0:
+                break
+            table[0][1] = row_totals[0] - table[0][0]
+            table[1][0] = col_totals[0] - table[0][0]
+            table[1][1] = row_totals[1] - table[1][0]
+            p_vals.append(self.probability_of_table(table))
+        return p_vals
+
+    def _get_right_tail_probs(self):
+        """
+        Returns list of probabilities of all tables *less* extreme than the
+        current table.
+        """
+        table = self._min_rotation()
+        row_totals = [sum(table[0]), sum(table[1])]
+        col_totals = [table[0][0] + table[1][0], table[0][1] + table[1][1]]
+        p_vals = []
+        while True:
+            table[0][0] += 1
+            table[0][1] = row_totals[0] - table[0][0]
+            if table[0][1] < 0:
+                break
+            table[1][0] = col_totals[0] - table[0][0]
+            if table[1][0] < 0:
+                break
+            table[1][1] = row_totals[1] - table[1][0]
+            if table[1][1] < 0:
+                break
+            p_vals.append(self.probability_of_table(table))
+        return p_vals
+
+    def _get_left_tail_tables(self):
+        """
+        Returns all tables that are *more* extreme than the current table.
+        """
+        table = self._min_rotation()
+        row_totals = [sum(table[0]), sum(table[1])]
+        col_totals = [table[0][0] + table[1][0], table[0][1] + table[1][1]]
+        left_tail_tables = []
+        while True:
+            table[0][0] -= 1
+            if table[0][0] < 0:
+                break
+            table[0][1] = row_totals[0] - table[0][0]
+            table[1][0] = col_totals[0] - table[0][0]
+            table[1][1] = row_totals[1] - table[1][0]
+            left_tail_tables.append([list(table[0]), list(table[1])])
+        return left_tail_tables
+
+    def _get_right_tail_tables(self):
+        """
+        Returns all tables that are *less* extreme than the current table.
+        """
+        table = self._min_rotation()
+        row_totals = [sum(table[0]), sum(table[1])]
+        col_totals = [table[0][0] + table[1][0], table[0][1] + table[1][1]]
+        right_tail_tables = []
+        while True:
+            table[0][0] += 1
+            table[0][1] = row_totals[0] - table[0][0]
+            if table[0][1] < 0:
+                break
+            table[1][0] = col_totals[0] - table[0][0]
+            if table[1][0] < 0:
+                break
+            table[1][1] = row_totals[1] - table[1][0]
+            if table[1][1] < 0:
+                break
+            right_tail_tables.append([list(table[0]), list(table[1])])
+        return right_tail_tables
+
+    def left_tail_p(self):
+        """
+        Returns the sum of probabilities of this table and all others more
+        extreme.
+        """
+        return self.probability_of_table(self.table) + self._sum_left_tail()
+
+    def right_tail_p(self):
+        """
+        Returns the sum of probabilities of this table and all others more
+        extreme.
+        """
+        return self.probability_of_table(self.table) + self._sum_right_tail()
+
+    def two_tail_p(self):
+        """
+        Returns the sum of probabilities of this table and all others more
+        extreme.
+        """
+        p0 = self.probability_of_table(self.table)
+        all_p_vals = self._get_left_tail_probs() + self._get_right_tail_probs()
+        p_vals = []
+        for p in all_p_vals:
+            if p <= p0:
+                p_vals.append(p)
+        return sum(p_vals) + p0
+
+def summarize(values):
+    """
+    Summarizes a sample of values:
+
+        - ``range``       : tuple pair representing minimum and maximum values
+        - ``mean``        : mean of sample
+        - ``median``      : median of sample
+        - ``var``         : (sample) variance
+        - ``sd``          : (sample) standard deviation
+        - ``hpd95``       : tuple pair representing 5% and 95% HPD
+        - ``quant_5_95``  : tuple pair representing 5% and 95% quantile
+
+    """
+    summary = {}
+    if len(values) == 0:
+        raise ValueError("No values in data")
+    try:
+        summary['range'] = (min(values), max(values))
+    except (ValueError, OverflowError):
+        summary['range'] = None
+    try:
+        summary['mean'], summary['var'] = mean_and_sample_variance(values)
+        try:
+            #summary['sd'] = math.sqrt(summary['var'])
+            summary['sd'] = summary['var'] ** 0.5
+        except ValueError:
+            summary['sd'] = 0.0
+        except OverflowError:
+            summary['sd'] = None
+    except (ValueError, OverflowError, IndexError):
+        summary['mean'], summary['var'], summary['sd'] = None, None, None
+    try:
+        summary['median'] = median(values)
+    except (ValueError, OverflowError):
+        summary['median'] = None
+    try:
+        summary['hpd95'] = empirical_hpd(values, conf=0.95)
+    except (ValueError, OverflowError):
+        summary['hpd95'] = None
+    try:
+        summary['quant_5_95'] = quantile_5_95(values)
+    except (ValueError, OverflowError):
+        summary['quant_5_95'] = None
+    return summary
diff --git a/dendropy/calculate/treecompare.py b/dendropy/calculate/treecompare.py
new file mode 100644
index 0000000..b6b1d5d
--- /dev/null
+++ b/dendropy/calculate/treecompare.py
@@ -0,0 +1,502 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Statistics, metrics, measurements, and values calculated *between* *two* trees.
+"""
+
+from math import sqrt
+from dendropy.utility import error
+
+###############################################################################
+## Public Functions
+
+def symmetric_difference(tree1, tree2, is_bipartitions_updated=False):
+    """
+    Returns *unweighted* Robinson-Foulds distance between two trees.
+
+    Trees need to share the same |TaxonNamespace| reference. The
+    bipartition bitmasks of the trees must be correct for the current tree
+    structures (by calling :meth:`Tree.encode_bipartitions()` method) or the
+    ``is_bipartitions_updated`` argument must be `False` to force recalculation
+    of bipartitions.
+
+    Parameters
+    ----------
+    tree1 : |Tree| object
+        The first tree of the two trees being compared. This must share the
+        same |TaxonNamespace| reference as ``tree2`` and must have
+        bipartitions encoded.
+    tree2 : |Tree| object
+        The second tree of the two trees being compared. This must share the
+        same |TaxonNamespace| reference as ``tree1`` and must have
+        bipartitions encoded.
+    is_bipartitions_updated : bool
+        If `False`, then the bipartitions on *both* trees will be updated
+        before comparison. If `True` then the bipartitions will only be
+        calculated for a |Tree| object if they have not been calculated
+        before, either explicitly or implicitly.
+
+    Returns
+    -------
+    d : int
+        The symmetric difference (a.k.a. the unweighted Robinson-Foulds
+        distance) between ``tree1`` and ``tree2``.
+
+    Examples
+    --------
+
+    ::
+
+        import dendropy
+        from dendropy.calculate import treecompare
+        tns = dendropy.TaxonNamespace()
+        tree1 = tree.get_from_path(
+                "t1.nex",
+                "nexus",
+                taxon_namespace=tns)
+        tree2 = tree.get_from_path(
+                "t2.nex",
+                "nexus",
+                taxon_namespace=tns)
+        tree1.encode_bipartitions()
+        tree2.encode_bipartitions()
+        print(treecompare.symmetric_difference(tree1, tree2))
+
+    """
+    t = false_positives_and_negatives(
+            tree1,
+            tree2,
+            is_bipartitions_updated=is_bipartitions_updated)
+    return t[0] + t[1]
+
+def unweighted_robinson_foulds_distance(tree1, tree2, is_bipartitions_updated=False):
+    """
+    Alias for `symmetric_difference()`.
+    """
+    return symmetric_difference(tree1, tree2, is_bipartitions_updated)
+
+def weighted_robinson_foulds_distance(
+        tree1,
+        tree2,
+        edge_weight_attr="length",
+        is_bipartitions_updated=False):
+    """
+    Returns *weighted* Robinson-Foulds distance between two trees based on
+    ``edge_weight_attr``.
+
+    Trees need to share the same |TaxonNamespace| reference. The
+    bipartition bitmasks of the trees must be correct for the current tree
+    structures (by calling :meth:`Tree.encode_bipartitions()` method) or the
+    ``is_bipartitions_updated`` argument must be `False` to force recalculation of
+    bipartitions.
+
+    Parameters
+    ----------
+    tree1 : |Tree| object
+        The first tree of the two trees being compared. This must share the
+        same |TaxonNamespace| reference as ``tree2`` and must have
+        bipartitions encoded.
+    tree2 : |Tree| object
+        The second tree of the two trees being compared. This must share the
+        same |TaxonNamespace| reference as ``tree1`` and must have
+        bipartitions encoded.
+    edge_weight_attr : string
+        Name of attribute on edges of trees to be used as the weight.
+    is_bipartitions_updated : bool
+        If `True`, then the bipartitions on *both* trees will be updated before
+        comparison. If `False` (default) then the bipartitions will only be
+        calculated for a |Tree| object if they have not been calculated
+        before, either explicitly or implicitly.
+
+    Returns
+    -------
+    d : float
+        The edge-weighted Robinson-Foulds distance between ``tree1`` and ``tree2``.
+
+    Examples
+    --------
+
+    ::
+
+        import dendropy
+        from dendropy.calculate import treecompare
+        tns = dendropy.TaxonNamespace()
+        tree1 = tree.get_from_path(
+                "t1.nex",
+                "nexus",
+                taxon_namespace=tns)
+        tree2 = tree.get_from_path(
+                "t2.nex",
+                "nexus",
+                taxon_namespace=tns)
+        tree1.encode_bipartitions()
+        tree2.encode_bipartitions()
+        print(treecompare.weighted_robinson_foulds_distance(tree1, tree2))
+
+    """
+    df = lambda length_diffs: sum([abs(i[0] - i[1]) for i in length_diffs])
+    return _bipartition_difference(tree1,
+                           tree2,
+                           dist_fn=df,
+                           edge_weight_attr=edge_weight_attr,
+                           value_type=float,
+                           is_bipartitions_updated=is_bipartitions_updated)
+
+def false_positives_and_negatives(
+        reference_tree,
+        comparison_tree,
+        is_bipartitions_updated=False):
+    """
+    Counts and returns number of false positive bipar (bipartitions found in
+    ``comparison_tree`` but not in ``reference_tree``) and false negative
+    bipartitions (bipartitions found in ``reference_tree`` but not in
+    ``comparison_tree``).
+
+    Trees need to share the same |TaxonNamespace| reference. The
+    bipartition bitmasks of the trees must be correct for the current tree
+    structures (by calling :meth:`Tree.encode_bipartitions()` method) or the
+    ``is_bipartitions_updated`` argument must be `False` to force recalculation of
+    bipartitions.
+
+    Parameters
+    ----------
+    reference_tree : |Tree| object
+        The first tree of the two trees being compared. This must share the
+        same |TaxonNamespace| reference as ``tree2`` and must have
+        bipartitions encoded.
+    comparison_tree : |Tree| object
+        The second tree of the two trees being compared. This must share the
+        same |TaxonNamespace| reference as ``tree1`` and must have
+        bipartitions encoded.
+    is_bipartitions_updated : bool
+        If `True`, then the bipartitions on *both* trees will be updated
+        before comparison. If `False` (default) then the bipartitions
+        will only be calculated for a |Tree| object if they have not been
+        calculated before, either explicitly or implicitly.
+
+    Returns
+    -------
+    t : tuple(int)
+        A pair of integers, with first integer being the number of false
+        positives and the second being the number of false negatives.
+
+    Examples
+    --------
+
+    ::
+
+        import dendropy
+        from dendropy.calculate import treecompare
+        tns = dendropy.TaxonNamespace()
+        tree1 = tree.get_from_path(
+                "t1.nex",
+                "nexus",
+                taxon_namespace=tns)
+        tree2 = tree.get_from_path(
+                "t2.nex",
+                "nexus",
+                taxon_namespace=tns)
+        tree1.encode_bipartitions()
+        tree2.encode_bipartitions()
+        print(treecompare.false_positives_and_negatives(tree1, tree2))
+
+    """
+    if reference_tree.taxon_namespace is not comparison_tree.taxon_namespace:
+        raise error.TaxonNamespaceIdentityError(reference_tree, comparison_tree)
+    if not is_bipartitions_updated:
+        reference_tree.encode_bipartitions()
+        comparison_tree.encode_bipartitions()
+    else:
+        if reference_tree.bipartition_encoding is None:
+            reference_tree.encode_bipartitions()
+        if comparison_tree.bipartition_encoding is None:
+            comparison_tree.encode_bipartitions()
+    ref_bipartitions = set(reference_tree.bipartition_encoding)
+    comparison_bipartitions = set(comparison_tree.bipartition_encoding)
+    false_positives = ref_bipartitions.difference(comparison_bipartitions)
+    false_negatives = comparison_bipartitions.difference(ref_bipartitions)
+    return len(false_positives), len(false_negatives)
+
+def euclidean_distance(
+        tree1,
+        tree2,
+        edge_weight_attr="length",
+        value_type=float,
+        is_bipartitions_updated=False):
+    """
+    Returns the Euclidean distance (a.k.a. Felsenstein's 2004 "branch length
+    distance") between two trees based on ``edge_weight_attr``.
+
+    Trees need to share the same |TaxonNamespace| reference. The
+    bipartition bitmasks of the trees must be correct for the current tree
+    structures (by calling :meth:`Tree.encode_bipartitions()` method) or the
+    ``is_bipartitions_updated`` argument must be `False` to force recalculation of
+    bipartitions.
+
+    Parameters
+    ----------
+    tree1 : |Tree| object
+        The first tree of the two trees being compared. This must share the
+        same |TaxonNamespace| reference as ``tree2`` and must have
+        bipartitions encoded.
+    tree2 : |Tree| object
+        The second tree of the two trees being compared. This must share the
+        same |TaxonNamespace| reference as ``tree1`` and must have
+        bipartitions encoded.
+    edge_weight_attr : string
+        Name of attribute on edges of trees to be used as the weight.
+    is_bipartitions_updated : bool
+        If `True`, then the bipartitions on *both* trees will be updated
+        before comparison. If `False` (default) then the bipartitions
+        will only be calculated for a |Tree| object if they have not been
+        calculated before, either explicitly or implicitly.
+
+    Returns
+    -------
+    d : int
+        The Euclidean distance between ``tree1`` and ``tree2``.
+
+    Examples
+    --------
+
+    ::
+
+        import dendropy
+        from dendropy.calculate import treecompare
+        tns = dendropy.TaxonNamespace()
+        tree1 = tree.get_from_path(
+                "t1.nex",
+                "nexus",
+                taxon_namespace=tns)
+        tree2 = tree.get_from_path(
+                "t2.nex",
+                "nexus",
+                taxon_namespace=tns)
+        tree1.encode_bipartitions()
+        tree2.encode_bipartitions()
+        print(treecompare.euclidean_distance(tree1, tree2))
+
+    """
+    df = lambda length_diffs: sqrt(sum([pow(i[0] - i[1], 2) for i in length_diffs]))
+    return _bipartition_difference(tree1,
+                           tree2,
+                           dist_fn=df,
+                           edge_weight_attr=edge_weight_attr,
+                           value_type=value_type,
+                           is_bipartitions_updated=is_bipartitions_updated)
+
+def find_missing_bipartitions(reference_tree, comparison_tree, is_bipartitions_updated=False):
+    """
+    Returns a list of bipartitions that are in ``reference_tree``, but
+    not in ``comparison_tree``.
+
+    Trees need to share the same |TaxonNamespace| reference. The
+    bipartition bitmasks of the trees must be correct for the current tree
+    structures (by calling :meth:`Tree.encode_bipartitions()` method) or the
+    ``is_bipartitions_updated`` argument must be `False` to force recalculation of
+    bipartitions.
+
+    Parameters
+    ----------
+    reference_tree : |Tree| object
+        The first tree of the two trees being compared. This must share the
+        same |TaxonNamespace| reference as ``tree2`` and must have
+        bipartitions encoded.
+    comparison_tree : |Tree| object
+        The second tree of the two trees being compared. This must share the
+        same |TaxonNamespace| reference as ``tree1`` and must have
+        bipartitions encoded.
+    is_bipartitions_updated : bool
+        If `True`, then the bipartitions on *both* trees will be updated
+        before comparison. If `False` (default) then the bipartitions
+        will only be calculated for a |Tree| object if they have not been
+        calculated before, either explicitly or implicitly.
+
+    Returns
+    -------
+    s : list[|Bipartition|]
+        A list of bipartitions that are in the first tree but not in the second.
+
+    """
+    missing = []
+    if reference_tree.taxon_namespace is not comparison_tree.taxon_namespace:
+        raise error.TaxonNamespaceIdentityError(reference_tree, comparison_tree)
+    if not is_bipartitions_updated:
+        reference_tree.encode_bipartitions()
+        comparision_tree.encode_bipartitions()
+    else:
+        if reference_tree.bipartition_encoding is None:
+            reference_tree.encode_bipartitions()
+        if comparison_tree.bipartition_encoding is None:
+            comparison_tree.encode_bipartitions()
+    for bipartition in reference_tree.bipartition_encoding:
+        if bipartition in comparison_tree.bipartition_encoding:
+            pass
+        else:
+            missing.append(bipartition)
+    return missing
+
+###############################################################################
+## Legacy
+
+def robinson_foulds_distance(tree1, tree2, edge_weight_attr="length"):
+    """
+    DEPRECATED: Use :func:``symmetric_difference`` for the common
+    unweighged Robinson-Fould's distance metric (i.e., the symmetric difference between two trees)
+    :func:``weighted_robinson_foulds_distance`` or for the RF distance as defined by Felsenstein, 2004.
+    """
+    return weighted_robinson_foulds_distance(tree1, tree2, edge_weight_attr)
+
+def mason_gamer_kellogg_score(tree1, tree2, is_bipartitions_updated=False):
+    """
+    Mason-Gamer and Kellogg. Testing for phylogenetic conflict among molecular
+    data sets in the tribe Triticeae (Gramineae). Systematic Biology (1996)
+    vol. 45 (4) pp. 524
+    """
+    if tree1.taxon_namespace is not tree2.taxon_namespace:
+        raise error.TaxonNamespaceIdentityError(tree1, tree2)
+    if not is_bipartitions_updated:
+        tree1.encode_bipartitions()
+        tree2.encode_bipartitions()
+    else:
+        if tree1.bipartition_encoding is None:
+            tree1.encode_bipartitions()
+        if tree2.bipartition_encoding is None:
+            tree2.encode_bipartitions()
+    se1 = tree1.bipartition_encoding
+    se2 = tree2.bipartition_encoding
+    bipartitions = sorted(list(set(se1.keys() + se2.keys())))
+
+###############################################################################
+## Supporting
+
+def _get_length_diffs(
+        tree1,
+        tree2,
+        edge_weight_attr="length",
+        value_type=float,
+        is_bipartitions_updated=False,
+        bipartition_length_diff_map=False):
+    """
+    Returns a list of tuples, with the first element of each tuple representing
+    the length of the branch subtending a particular bipartition on ``tree1``, and
+    the second element the length of the same branch on ``tree2``. If a
+    particular bipartition is found on one tree but not in the other, a value of zero
+    is used for the missing bipartition.
+    """
+    length_diffs = []
+    bipartition_length_diffs = {}
+    if tree1.taxon_namespace is not tree2.taxon_namespace:
+        raise error.TaxonNamespaceIdentityError(tree1, tree2)
+    if not is_bipartitions_updated:
+        tree1.encode_bipartitions()
+        tree2.encode_bipartitions()
+    else:
+        if tree1.bipartition_encoding is None:
+            tree1.encode_bipartitions()
+        if tree2.bipartition_encoding is None:
+            tree2.encode_bipartitions()
+
+    tree1_bipartition_edge_map = dict(tree2.bipartition_edge_map) # O(n*(2*bind + dict_item_cost))
+    tree2_bipartition_edge_map = tree1.bipartition_edge_map
+    for bipartition in tree2_bipartition_edge_map: # O n : 2*bind
+        edge = tree2_bipartition_edge_map[bipartition]
+        elen1 = getattr(edge, edge_weight_attr) # attr + bind
+        if elen1 is None:
+            elen1 = 0 # worst-case: bind
+        value1 = value_type(elen1) #  ctor + bind
+        try:
+            e2 = tree1_bipartition_edge_map.pop(bipartition) # attr + dict_lookup + bind
+            elen2 = getattr(e2, edge_weight_attr) # attr + bind
+            if elen2 is None:
+                # allow root edge to have bipartition with no value: raise error if not root edge
+                if e2.tail_node is None:
+                    elen2 = 0.0
+                else:
+                    raise ValueError("Edge length attribute is 'None': Tree: %s ('%s'), Split: %s" % (id(tree2), tree2.label, bipartition.leafset_as_newick_string(tree2.taxon_namespace)))
+        except KeyError: # excep
+            elen2 = 0.0
+        value2 = value_type(elen2) #  ctor + bind # best case
+        length_diffs.append((value1,value2)) # ctor + listappend
+        bipartition_length_diffs[bipartition] = length_diffs[-1]
+
+    for bipartition in tree1_bipartition_edge_map: # best-case not executed, worst case O(n) : 2*bind
+        edge = tree1_bipartition_edge_map[bipartition]
+        elen2 = getattr(edge, edge_weight_attr) # attr +  bind
+        if elen2 is None:
+            elen2 = 0
+        value2 = value_type(elen2) #  ctor + bind
+        e1 = tree2_bipartition_edge_map.get(bipartition) # attr + dict_lookup + bind
+        if e1 is None:
+            elen1 = 0.0
+        else:
+            elen1 = getattr(e1, edge_weight_attr) # attr  + bind
+            if elen1 is None:
+                # allow root edge to have bipartition with no value: raise error if not root edge
+                if e1.tail_node is None:
+                    elen1 = 0.0
+                else:
+                    raise ValueError("Edge length attribute is 'None': Tree: %s ('%s'), Split: %s" % (id(tree1), tree1.label, bipartition))
+                #elen1 = 0
+        value1 = value_type(elen1)
+        length_diffs.append((value1,value2)) # ctor + listappend
+        bipartition_length_diffs[bipartition] = length_diffs[-1]
+
+    # the numbers below do not reflect additions to the code to protect against
+    #   edges with length None
+    # loops
+    #  best-case:
+    #   O(n * (dict_lookup + 3*attr + 3*ctor + 7*bind + listappend))
+    #  worst-case:
+    #     separated: O(n * (2*dict_lookup + 4*attr + 3*ctor + 8*bind + listappend + excep) + n*(2*dict_lookup + 4*attr + 3*ctor + 8*bind + listappend))
+    #   or:
+    #     O(2n*(2*dict_lookup + 4*attr + 3*ctor + 8*bind + listappend + 0.5*excep))
+
+    # total
+    #  best-case:
+    #       O(n * (dict_lookup + 3*attr + 3*ctor + 8*bind + listappend + dict_item_cost))
+    #  worst-case:
+    #     O(2n*(2*dict_lookup + 4*attr + 3*ctor + 9*bind + listappend + 0.5*(dict_item_cost + excep))
+    if bipartition_length_diff_map:
+        return length_diffs, bipartition_length_diffs
+    else:
+        return length_diffs
+
+def _bipartition_difference(
+        tree1,
+        tree2,
+        dist_fn,
+        edge_weight_attr="length",
+        value_type=float,
+        is_bipartitions_updated=False):
+    """
+    Returns distance between two trees, each represented by a dictionary of
+    bipartitions (as bipartition_mask strings) to edges, using ``dist_fn`` to calculate the
+    distance based on ``edge_weight_attr`` of the edges. ``dist_fn`` is a function
+    that takes a list of pairs of values, where the values correspond to the edge
+    lengths of a given bipartition on tree1 and tree2 respectively.
+    """
+    length_diffs = _get_length_diffs(
+            tree1,
+            tree2,
+            edge_weight_attr=edge_weight_attr,
+            value_type=value_type,
+            is_bipartitions_updated=is_bipartitions_updated)
+    return dist_fn(length_diffs)
+
diff --git a/dendropy/calculate/treemeasure.py b/dendropy/calculate/treemeasure.py
new file mode 100644
index 0000000..59e06f9
--- /dev/null
+++ b/dendropy/calculate/treemeasure.py
@@ -0,0 +1,361 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Statistics, metrics, measurements, and values calculated on (single) trees.
+"""
+
+import math
+
+EULERS_CONSTANT = 0.5772156649015328606065120900824024310421
+
+class PatristicDistanceMatrix(object):
+    """
+    Calculates and maintains patristic distance information of taxa on a tree.
+    ``max_dist_taxa`` and ``max_dist_nodes`` will return a tuple of taxon objects
+    and corresponding nodes, respectively, that span the greatest path distance
+    in the tree. The mid-point between the two is *guaranteed* to be on the
+    closer to the first item of each pair.
+    """
+
+    def __init__(self, tree=None):
+        self.tree = None
+        self.taxon_namespace = None
+        self._pat_dists = {}
+        self._path_steps = {}
+        self.max_dist = None
+        self.max_dist_taxa = None
+        self.max_dist_nodes = None
+        self._mrca = {}
+        if tree is not None:
+            self.tree = tree
+            self.calc()
+
+    def __call__(self, taxon1, taxon2):
+        """
+        Returns patristic distance between two taxon objects.
+        """
+        if taxon1 is taxon2:
+            return 0.0
+        try:
+            return self._pat_dists[taxon1][taxon2]
+        except KeyError:
+            return self._pat_dists[taxon2][taxon1]
+
+    def mrca(self, taxon1, taxon2):
+        """
+        Returns MRCA of two taxon objects.
+        """
+        if taxon1 is taxon2:
+            return taxon1
+        try:
+            return self._mrca[taxon1][taxon2]
+        except KeyError:
+            return self._mrca[taxon2][taxon1]
+
+    def path_edge_count(self, taxon1, taxon2):
+        """
+        Returns the number of edges between two taxon objects.
+        """
+        if taxon1 is taxon2:
+            return 0
+        try:
+            return self._path_steps[taxon1][taxon2]
+        except KeyError:
+            return self._path_steps[taxon2][taxon1]
+
+    def calc(self, tree=None, create_midpoints=None, is_bipartitions_updated=False):
+        """
+        Calculates the distances. Note that the path length (in number of
+        steps) between taxa that span the root will be off by one if
+        the tree is unrooted.
+        """
+        if tree is not None:
+            self.tree = tree
+        assert self.tree is not None
+        if not is_bipartitions_updated:
+            self.tree.encode_bipartitions()
+        self.taxon_namespace = self.tree.taxon_namespace
+        self._pat_dists = {}
+        self._path_steps = {}
+        for i1, t1 in enumerate(self.taxon_namespace):
+            self._pat_dists[t1] = {}
+            self._path_steps[t1] = {}
+            self._mrca[t1] = {}
+            self.max_dist = None
+            self.max_dist_taxa = None
+            self.max_dist_nodes = None
+
+        for node in self.tree.postorder_node_iter():
+            children = node.child_nodes()
+            if len(children) == 0:
+                node.desc_paths = {node : (0,0)}
+            else:
+                node.desc_paths = {}
+                for cidx1, c1 in enumerate(children):
+                    for desc1, (desc1_plen, desc1_psteps) in c1.desc_paths.items():
+                        node.desc_paths[desc1] = (desc1_plen + c1.edge.length, desc1_psteps + 1)
+                        for c2 in children[cidx1+1:]:
+                            for desc2, (desc2_plen, desc2_psteps) in c2.desc_paths.items():
+                                self._mrca[desc1.taxon][desc2.taxon] = c1.parent_node
+                                pat_dist = node.desc_paths[desc1][0] + desc2_plen + c2.edge.length
+                                self._pat_dists[desc1.taxon][desc2.taxon] = pat_dist
+                                path_steps = node.desc_paths[desc1][1] + desc2_psteps + 1
+                                self._path_steps[desc1.taxon][desc2.taxon] = path_steps
+                                if self.max_dist is None or pat_dist > self.max_dist:
+                                    self.max_dist = pat_dist
+                                    midpoint = float(pat_dist) / 2
+                                    if midpoint - node.desc_paths[desc1][0] <= 0:
+                                        self.max_dist_nodes = (desc1, desc2)
+                                        self.max_dist_taxa = (desc1.taxon, desc2.taxon)
+                                    else:
+                                        self.max_dist_nodes = (desc2, desc1)
+                                        self.max_dist_taxa = (desc2.taxon, desc1.taxon)
+                    del(c1.desc_paths)
+
+    def distances(self):
+        """
+        Returns list of patristic distances.
+        """
+        dists = []
+        for dt in self._pat_dists.values():
+            for d in dt.values():
+                dists.append(d)
+        return dists
+
+    def sum_of_distances(self):
+        """
+        Returns sum of patristic distances on tree.
+        """
+        return sum(self.distances())
+
+def patristic_distance(tree, taxon1, taxon2, is_bipartitions_updated=False):
+    """
+    Given a tree with bipartitions encoded, and two taxa on that tree, returns the
+    patristic distance between the two. Much more inefficient than constructing
+    a PatristicDistanceMatrix object.
+    """
+    mrca = tree.mrca(taxa=[taxon1, taxon2], is_bipartitions_updated=is_bipartitions_updated)
+    dist = 0
+    n = tree.find_node(lambda x: x.taxon == taxon1)
+    while n != mrca:
+        if n.edge.length is not None:
+            dist += n.edge.length
+        n = n.parent_node
+    n = tree.find_node(lambda x: x.taxon == taxon2)
+    while n != mrca:
+        if n.edge.length is not None:
+            dist += n.edge.length
+        n = n.parent_node
+    return dist
+
+###########################################################################
+### Metrics -- Unary
+
+def B1(tree):
+    """
+    Returns the B1 statistic: the reciprocal of the sum of the maximum
+    number of nodes between each interior node and tip over all internal
+    nodes excluding root.
+    """
+    b1 = 0.0
+    nd_mi = {}
+    for nd in tree.postorder_node_iter():
+        if nd._parent_node is None:
+            continue
+        child_nodes = nd._child_nodes
+        if len(child_nodes) == 0:
+            nd_mi[nd] = 0.0
+            continue
+        mi = max(nd_mi[ch] for ch in child_nodes)
+        mi += 1
+        nd_mi[nd] = mi
+        b1 += 1.0/mi
+    return b1
+
+def colless_tree_imbalance(tree, normalize="max"):
+    """
+    Returns Colless' tree imbalance or I statistic: the sum of differences
+    of numbers of children in left and right subtrees over all internal
+    nodes. ``normalize`` specifies the normalization:
+
+        * "max" or True [DEFAULT]
+            normalized to maximum value for tree of
+            this size
+        * "yule"
+            normalized to the Yule model
+        * "pda"
+            normalized to the PDA (Proportional to Distinguishable
+            Arrangements) model
+        * None or False
+            no normalization
+
+    """
+    colless = 0.0
+    num_leaves = 0
+    subtree_leaves = {}
+    for nd in tree.postorder_node_iter():
+        if nd.is_leaf():
+            subtree_leaves[nd] = 1
+            num_leaves += 1
+        else:
+            total_leaves = 0
+            if len(nd._child_nodes) > 2:
+                raise TypeError("Colless' tree imbalance statistic requires strictly bifurcating trees")
+            left = subtree_leaves[nd._child_nodes[0]]
+            right = subtree_leaves[nd._child_nodes[1]]
+            colless += abs(right-left)
+            subtree_leaves[nd] = right + left
+    if normalize == "yule":
+        colless = float(colless - (num_leaves * math.log(num_leaves)) - (num_leaves * (EULERS_CONSTANT - 1.0 - math.log(2))))/num_leaves
+    elif normalize == "pda":
+        colless = colless / pow(num_leaves, 3.0/2)
+    elif normalize is True or normalize == "max":
+        ## note that Mooers 1995 (Evolution 49(2):379-384)
+        ## remarks that the correct normalization factor is
+        ## 2/((num_leaves - 1) * (num_leaves -2))
+        colless = colless * (2.0/(num_leaves * (num_leaves-3) + 2))
+    elif normalize is not None and normalize is not False:
+        raise TypeError("``normalization`` accepts only None, True, False, 'yule' or 'pda' as argument values")
+    return colless
+
+def pybus_harvey_gamma(tree, prec=0.00001):
+    """Returns the gamma statistic of Pybus and Harvey (2000). This statistic
+    is used to test for constancy of birth and death rates over the course of
+    a phylogeny.  Under the pure-birth process, the statistic should follow
+    a standard Normal distibution: a Normal(mean=0, variance=1).
+
+    If the lengths of different paths to the node differ by more than ``prec``,
+        then a ValueError exception will be raised indicating deviation from
+        ultrametricty.
+    Raises a Value Error if the tree is not ultrametric, is non-binary, or has
+        only 2 leaves.
+
+    As a side effect a ``age`` attribute is added to the nodes of the tree.
+
+    Pybus and Harvey. 2000. "Testing macro-evolutionary models using incomplete
+    molecular phylogenies." Proc. Royal Society Series B: Biological Sciences.
+    (267). 2267-2272
+    """
+    # the equation is given by:
+    #   T = \sum_{j=2}^n (jg_j)
+    #   C = T \sqrt{\frac{1}{12(n-2)}}
+    #   C gamma = \frac{1}{n-2}\sum_{i=2}^{n-1} (\sum_{k=2}^i kg_k) - \frac{T}{2}
+    # where n is the number of taxa, and g_2 ... g_n is the vector of waiting
+    #   times between consecutive (in time, not along a branch) speciation times.
+    node = None
+    speciation_ages = []
+    n = 0
+    if tree.seed_node.age is None:
+        tree.calc_node_ages(ultrametricity_precision=prec)
+    for node in tree.postorder_node_iter():
+        if len(node.child_nodes()) == 2:
+            speciation_ages.append(node.age)
+        else:
+            n += 1
+    if node is None:
+        raise ValueError("Empty tree encountered")
+    speciation_ages.sort(reverse=True)
+    g = []
+    older = speciation_ages[0]
+    for age in speciation_ages[1:]:
+        g.append(older - age)
+        older = age
+    g.append(older)
+    if not g:
+        raise ValueError("No internal nodes found (other than the root)")
+    assert(len(g) == (n - 1))
+    T = 0.0
+    accum = 0.0
+    for i in range(2, n):
+        list_index = i - 2
+        T += i * float(g[list_index])
+        accum += T
+    list_index = n - 2
+    T += (n) * g[list_index]
+    nmt = n - 2.0
+    numerator = accum/nmt - T/2.0
+    C = T*pow(1/(12*nmt), 0.5)
+    return numerator/C
+
+def N_bar(tree):
+    """
+    Returns the $\bar{N}$ statistic: the average number of nodes above a
+    terminal node.
+    """
+    leaf_count = 0
+    nbar = 0
+    for leaf_node in tree.leaf_node_iter():
+        leaf_count += 1
+        for parent in leaf_node.ancestor_iter(inclusive=False):
+            nbar += 1
+    return float(nbar) / leaf_count
+
+def sackin_index(tree, normalize=True):
+    """
+    Returns the Sackin's index: the sum of the number of ancestors for each
+    tip of the tree. The larger the Sackin's index, the less balanced the
+    tree. ``normalize`` specifies the normalization:
+
+        * True [DEFAULT]
+            normalized to number of leaves; this results in a value
+            equivalent to that given by Tree.N_bar()
+        * "yule"
+            normalized to the Yule model
+        * "pda"
+            normalized to the PDA (Proportional to Distinguishable
+            Arrangements) model
+        * None or False
+            no normalization
+
+    """
+    leaf_count = 0
+    num_anc = 0
+    for leaf_node in tree.leaf_node_iter():
+        leaf_count += 1
+        for parent in leaf_node.ancestor_iter(inclusive=False):
+            num_anc += 1
+    if normalize == "yule":
+        x = sum(1.0/j for j in range(2, leaf_count+1))
+        s = float(num_anc - (2 * leaf_count * x))/leaf_count
+    elif normalize == "pda":
+        s = float(num_anc)/(pow(leaf_count, 3.0/2))
+    elif normalize is True:
+        s = float(num_anc)/leaf_count
+    elif normalize is None or normalize is False:
+        s = float(num_anc)
+    elif normalize is not None and normalize is not False:
+        raise TypeError("``normalization`` accepts only None, True, False, 'yule' or 'pda' as argument values")
+    return s
+
+def treeness(tree):
+    """
+    Returns the proportion of total tree length that is taken up by
+    internal branches.
+    """
+    internal = 0.0
+    external = 0.0
+    for nd in tree.postorder_node_iter():
+        if not nd._parent_node:
+            continue
+        if nd.is_leaf():
+            external += nd.edge.length
+        else:
+            internal += nd.edge.length
+    return internal/(external + internal)
+
diff --git a/dendropy/calculate/treescore.py b/dendropy/calculate/treescore.py
new file mode 100644
index 0000000..d24b89c
--- /dev/null
+++ b/dendropy/calculate/treescore.py
@@ -0,0 +1,27 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Statistics, metrics, measurements, and values calculated on a tree with
+reference to external data of some kind under various criteria.
+"""
+
+from dendropy.model.parsimony import fitch_down_pass
+from dendropy.model.parsimony import fitch_up_pass
+
+
diff --git a/dendropy/calculate/treesum.py b/dendropy/calculate/treesum.py
new file mode 100644
index 0000000..03b0c41
--- /dev/null
+++ b/dendropy/calculate/treesum.py
@@ -0,0 +1,455 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Tree summarization and consensus tree building.
+"""
+
+import math
+import collections
+import dendropy
+from dendropy.datamodel import taxonmodel
+from dendropy.calculate.statistics import mean_and_sample_variance
+
+##############################################################################
+## TreeSummarizer
+
+class TreeSummarizer(object):
+    "Summarizes a distribution of trees."
+
+    def __init__(self, **kwargs):
+        """
+        __init__ kwargs:
+
+            - ``support_as_labels`` (boolean)
+            - ``support_as_edge_lengths`` (boolean)
+            - ``support_as_percentages`` (boolean)
+            - ``support_label_decimals`` (integer)
+        """
+        self.support_as_labels = kwargs.get("support_as_labels", True)
+        self.support_as_edge_lengths = kwargs.get("support_as_edge_lengths", False)
+        self.support_as_percentages = kwargs.get("support_as_percentages", False)
+        self.add_node_metadata = kwargs.get("add_node_metadata", True)
+        self.default_support_label_decimals = 4
+        self.support_label_decimals = kwargs.get("support_label_decimals", self.default_support_label_decimals)
+        self.weighted_splits = False
+
+    def tree_from_splits(self,
+            split_distribution,
+            min_freq=0.5,
+            rooted=None,
+            include_edge_lengths=True):
+        """Returns a consensus tree from splits in ``split_distribution``.
+
+        If include_edge_length_var is True, then the sample variance of the
+            edge length will also be calculated and will be stored as
+            a length_var attribute.
+        """
+        taxon_namespace = split_distribution.taxon_namespace
+        taxa_mask = taxon_namespace.all_taxa_bitmask()
+        if self.weighted_splits:
+            split_freqs = split_distribution.weighted_split_frequencies
+        else:
+            split_freqs = split_distribution.split_frequencies
+        if rooted is None:
+            if split_distribution.is_all_counted_trees_rooted():
+                rooted = True
+            elif split_distribution.is_all_counted_trees_strictly_unrooted:
+                rooted = False
+        #include_edge_lengths = self.support_as_labels and include_edge_lengths
+        if self.support_as_edge_lengths and include_edge_lengths:
+            raise Exception("Cannot map support as edge lengths if edge lengths are to be set on consensus tree")
+        to_try_to_add = []
+        _almost_one = lambda x: abs(x - 1.0) <= 0.0000001
+        for s in split_freqs:
+            freq = split_freqs[s]
+            if (min_freq is None) or (freq >= min_freq) or (_almost_one(min_freq) and _almost_one(freq)):
+                to_try_to_add.append((freq, s))
+        to_try_to_add.sort(reverse=True)
+        splits_for_tree = [i[1] for i in to_try_to_add]
+        con_tree = dendropy.Tree.from_split_bitmasks(
+                split_bitmasks=splits_for_tree,
+                taxon_namespace=taxon_namespace,
+                is_rooted=rooted)
+        con_tree.encode_bipartitions()
+
+        if include_edge_lengths:
+            split_edge_lengths = {}
+            for split, edges in split_distribution.split_edge_lengths.items():
+                if len(edges) > 0:
+                    mean, var = mean_and_sample_variance(edges)
+                    elen = mean
+                else:
+                    elen = None
+                split_edge_lengths[split] = elen
+        else:
+            split_edge_lengths = None
+
+        for node in con_tree.postorder_node_iter():
+            split = node.edge.bipartition.split_bitmask
+            if split in split_freqs:
+                self.map_split_support_to_node(node=node, split_support=split_freqs[split])
+            if include_edge_lengths and split in split_distribution.split_edge_lengths:
+                edges = split_distribution.split_edge_lengths[split]
+                if len(edges) > 0:
+                    mean, var = mean_and_sample_variance(edges)
+                    elen = mean
+                else:
+                    elen = None
+                node.edge.length = elen
+
+        return con_tree
+
+    def compose_support_label(self, split_support_freq):
+        "Returns an appropriately composed and formatted support label."
+        if self.support_as_percentages:
+            if self.support_label_decimals <= 0:
+                support_label = "%d" % round(split_support_freq * 100, 0)
+            else:
+                support_label_template = "%%0.%df" % self.support_label_decimals
+                support_label = support_label_template % round(split_support_freq * 100,
+                    self.support_label_decimals)
+        else:
+            if self.support_label_decimals <= 0:
+                support_label_decimals = self.default_support_label_decimals
+            else:
+                support_label_decimals = self.support_label_decimals
+            support_label_template = "%%0.%df" % support_label_decimals
+            support_label = support_label_template % round(split_support_freq,
+                support_label_decimals)
+        return support_label
+
+    def map_split_support_to_node(self,
+            node,
+            split_support,
+            attr_name="support"):
+        "Appropriately sets up a node."
+        if self.support_as_percentages:
+            support_value = split_support * 100
+        else:
+            support_value = split_support
+        if self.support_as_labels:
+            node.label = self.compose_support_label(split_support)
+        if self.support_as_edge_lengths:
+            node.edge.length = support_value
+        if self.add_node_metadata and attr_name:
+            setattr(node, attr_name, support_value)
+            node.annotations.drop(name=attr_name)
+            node.annotations.add_bound_attribute(attr_name,
+                    real_value_format_specifier=".{}f".format(self.support_label_decimals))
+        return node
+
+    def map_split_support_to_tree(self, tree, split_distribution):
+        "Maps splits support to the given tree."
+        if self.weighted_splits:
+            split_freqs = split_distribution.weighted_split_frequencies
+        else:
+            split_freqs = split_distribution.split_frequencies
+        tree.reindex_taxa(taxon_namespace=split_distribution.taxon_namespace)
+        assert tree.taxon_namespace is split_distribution.taxon_namespace
+        tree.encode_bipartitions()
+        for edge in tree.postorder_edge_iter():
+            split = edge.bipartition.split_bitmask
+            if split in split_freqs:
+                split_support = split_freqs[split]
+            else:
+                split_support = 0.0
+            self.map_split_support_to_node(edge.head_node, split_support)
+        return tree
+
+    def annotate_nodes_and_edges(self,
+            tree,
+            split_distribution,
+            is_bipartitions_updated=False,):
+        """
+        Summarizes edge length and age information in ``split_distribution`` for
+        each node on target tree ``tree``.
+        This will result in each node in ``tree`` being decorated with the following attributes:
+            ``age_mean``,
+            ``age_median``,
+            ``age_sd``,
+            ``age_hpd95``,
+            ``age_range``,
+        And each edge in ``tree`` being decorated with the following attributes:
+            ``length_mean``,
+            ``length_median``,
+            ``length_sd``,
+            ``length_hpd95``,
+            ``length_range``,
+        These attributes will be added to the annotations dictionary to be persisted.
+        """
+        assert tree.taxon_namespace is split_distribution.taxon_namespace
+        if not is_bipartitions_updated:
+            tree.encode_bipartitions()
+        split_edge_length_summaries = split_distribution.split_edge_length_summaries
+        split_node_age_summaries = split_distribution.split_node_age_summaries
+        fields = ['mean', 'median', 'sd', 'hpd95', 'quant_5_95', 'range']
+
+        for edge in tree.preorder_edge_iter():
+            split = edge.split_bitmask
+            nd = edge.head_node
+            for summary_name, summary_target, summary_src in [ ('length', edge, split_edge_length_summaries),
+                                               ('age', nd, split_node_age_summaries) ]:
+                if split in summary_src:
+                    summary = summary_src[split]
+                    for field in fields:
+                        attr_name = summary_name + "_" + field
+                        setattr(summary_target, attr_name, summary[field])
+                        # clear annotations, which might be associated with either nodes
+                        # or edges due to the way NEXUS/NEWICK node comments are parsed
+                        nd.annotations.drop(name=attr_name)
+                        edge.annotations.drop(name=attr_name)
+                        summary_target.annotations.add_bound_attribute(attr_name)
+                else:
+                    for field in fields:
+                        attr_name = summary_name + "_" + field
+                        setattr(summary_target, attr_name, None)
+
+    def summarize_node_ages_on_tree(self,
+            tree,
+            split_distribution,
+            set_edge_lengths=True,
+            collapse_negative_edges=False,
+            allow_negative_edges=False,
+            summarization_fn=None,
+            is_bipartitions_updated=False):
+        """
+        Sets the ``age`` attribute of nodes on ``tree`` (a |Tree| object) to the
+        result of ``summarization_fn`` applied to the vector of ages of the
+        same node on the input trees (in ``split_distribution``, a
+        `SplitDistribution` object) being summarized.
+        ``summarization_fn`` should take an iterable of floats, and return a float. If `None`, it
+        defaults to calculating the mean (``lambda x: float(sum(x))/len(x)``).
+        If ``set_edge_lengths`` is `True`, then edge lengths will be set to so that the actual node ages
+        correspond to the ``age`` attribute value.
+        If ``collapse_negative_edges`` is True, then edge lengths with negative values will be set to 0.
+        If ``allow_negative_edges`` is True, then no error will be raised if edges have negative lengths.
+        """
+        if summarization_fn is None:
+            summarization_fn = lambda x: float(sum(x))/len(x)
+        if is_bipartitions_updated:
+            tree.encode_splits()
+        #'height',
+        #'height_median',
+        #'height_95hpd',
+        #'height_range',
+        #'length',
+        #'length_median',
+        #'length_95hpd',
+        #'length_range',
+        for edge in tree.preorder_edge_iter():
+            split = edge.bipartition.split_bitmask
+            nd = edge.head_node
+            if split in split_distribution.split_node_ages:
+                ages = split_distribution.split_node_ages[split]
+                nd.age = summarization_fn(ages)
+            else:
+                # default to age of parent if split not found
+                nd.age = nd.parent_node.age
+            ## force parent nodes to be at least as old as their oldest child
+            if collapse_negative_edges:
+                for child in nd.child_nodes():
+                    if child.age > nd.age:
+                        nd.age = child.age
+        if set_edge_lengths:
+            tree.set_edge_lengths_from_node_ages(allow_negative_edges=allow_negative_edges)
+        return tree
+
+    def summarize_edge_lengths_on_tree(self,
+            tree,
+            split_distribution,
+            summarization_fn=None,
+            is_bipartitions_updated=False):
+        """
+        Sets the lengths of edges on ``tree`` (a |Tree| object) to the mean
+        lengths of the corresponding edges on the input trees (in
+        ``split_distribution``, a `SplitDistribution` object) being
+        summarized.
+        ``summarization_fn`` should take an iterable of floats, and return a float. If `None`, it
+        defaults to calculating the mean (``lambda x: float(sum(x))/len(x)``).
+        """
+        if summarization_fn is None:
+            summarization_fn = lambda x: float(sum(x))/len(x)
+        if not is_bipartitions_updated:
+            tree.encode_bipartitions()
+        for edge in tree.postorder_edge_iter():
+            split = edge.bipartition.split_bitmask
+            if (split in split_distribution.split_edge_lengths
+                    and split_distribution.split_edge_lengths[split]):
+                lengths = split_distribution.split_edge_lengths[split]
+                edge.length = summarization_fn(lengths)
+            elif (split in split_distribution.split_edge_lengths
+                    and not split_distribution.split_edge_lengths[split]):
+                # no input trees had any edge lengths for this split
+                edge.length = None
+            else:
+                # split on target tree that was not found in any of the input
+                # trees
+                edge.length = 0.0
+        return tree
+
+        ## here we add the support values and/or edge lengths for the terminal taxa ##
+        for node in leaves:
+            if not is_rooted:
+                split = node.edge.split_bitmask
+            else:
+                split = node.edge.leafset_bitmask
+            self.map_split_support_to_node(node, 1.0)
+            if include_edge_lengths:
+                elen = split_distribution.split_edge_lengths.get(split, [0.0])
+                if len(elen) > 0:
+                    mean, var = mean_and_sample_variance(elen)
+                    node.edge.length = mean
+                    if include_edge_length_var:
+                        node.edge.length_var = var
+                else:
+                    node.edge.length = None
+                    if include_edge_length_var:
+                        node.edge.length_var = None
+        #if include_edge_lengths:
+            #self.map_edge_lengths_to_tree(tree=con_tree,
+            #        split_distribution=split_distribution,
+            #        summarization_fn=summarization_fn,
+            #        include_edge_length_var=False)
+        return con_tree
+
+    def count_splits_on_trees(self, tree_iterator, split_distribution=None, is_bipartitions_updated=False):
+        """
+        Given a list of trees file, a SplitsDistribution object (a new one, or,
+        if passed as an argument) is returned collating the split data in the files.
+        """
+        if split_distribution is None:
+            split_distribution = dendropy.SplitDistribution()
+        taxon_namespace = split_distribution.taxon_namespace
+        for tree_idx, tree in enumerate(tree_iterator):
+            if taxon_namespace is None:
+                assert(split_distribution.taxon_namespace is None)
+                split_distribution.taxon_namespace = tree.taxon_namespace
+                taxon_namespace = tree.taxon_namespace
+            else:
+                assert(taxon_namespace is tree.taxon_namespace)
+            split_distribution.count_splits_on_tree(tree,
+                    is_bipartitions_updated=is_bipartitions_updated)
+        return split_distribution
+
+    def consensus_tree(self, trees, min_freq=0.5, is_bipartitions_updated=False):
+        """
+        Returns a consensus tree of all trees in ``trees``, with minumum frequency
+        of split to be added to the consensus tree given by ``min_freq``.
+        """
+        taxon_namespace = trees[0].taxon_namespace
+        split_distribution = dendropy.SplitDistribution(taxon_namespace=taxon_namespace)
+        self.count_splits_on_trees(trees,
+                split_distribution=split_distribution,
+                is_bipartitions_updated=is_bipartitions_updated)
+        tree = self.tree_from_splits(split_distribution, min_freq=min_freq)
+        return tree
+
+##############################################################################
+## Convenience Wrappers
+
+def consensus_tree(trees, min_freq=0.5, is_bipartitions_updated=False, **kwargs):
+    """
+    Returns a consensus tree of all trees in ``trees``, with minumum frequency
+    of split to be added to the consensus tree given by ``min_freq``.
+    """
+    tsum = TreeSummarizer(**kwargs)
+    return tsum.consensus_tree(trees,
+            min_freq=min_freq,
+            is_bipartitions_updated=is_bipartitions_updated)
+
+##############################################################################
+## TreeCounter
+
+class TopologyCounter(object):
+    """
+    Tracks frequency of occurrences of topologies.
+    """
+
+    def hash_topology(tree):
+        """
+        Set of all splits on tree: default topology hash.
+        """
+        return frozenset(tree.bipartition_encoding)
+    hash_topology = staticmethod(hash_topology)
+
+    def __init__(self):
+        self.topology_hash_map = {}
+        self.total_trees_counted = 0
+
+    def update_topology_hash_map(self,
+            src_map):
+        """
+        Imports data from another counter.
+        """
+        trees_counted = 0
+        for topology_hash in src_map:
+            if topology_hash not in self.topology_hash_map:
+                self.topology_hash_map[topology_hash] = src_map[topology_hash]
+            else:
+                self.topology_hash_map[topology_hash] = self.topology_hash_map[topology_hash] + src_map[topology_hash]
+            self.total_trees_counted += src_map[topology_hash]
+
+    def count(self,
+            tree,
+            is_bipartitions_updated=False):
+        """
+        Logs/registers a tree.
+        """
+        if not is_bipartitions_updated:
+            tree.encode_bipartitions()
+        topology = self.hash_topology(tree)
+        if topology not in self.topology_hash_map:
+            self.topology_hash_map[topology] = 1
+        else:
+            self.topology_hash_map[topology] = self.topology_hash_map[topology] + 1
+        self.total_trees_counted += 1
+
+    def calc_hash_freqs(self):
+        """
+        Returns an ordered dictionary (collections.OrderedDict) of topology hashes mapped
+        to a tuple, (raw numbers of occurrences, proportion of total trees
+        counted) in (descending) order of the proportion of occurrence.
+        """
+        t_freqs = collections.OrderedDict()
+        count_topology = [(v, k) for k, v in self.topology_hash_map.items()]
+        count_topology.sort(reverse=True)
+        for count, topology_hash in count_topology:
+            freq = float(count) / self.total_trees_counted
+            t_freqs[topology_hash] = (count, freq)
+        return t_freqs
+
+    def calc_tree_freqs(self, taxon_namespace, is_rooted=False):
+        """
+        Returns an ordered dictionary (collections.OrderedDict) of DendroPy trees mapped
+        to a tuple, (raw numbers of occurrences, proportion of total trees
+        counted) in (descending) order of the proportion of occurrence.
+        """
+        hash_freqs = self.calc_hash_freqs()
+        tree_freqs = collections.OrderedDict()
+        for topology_hash, (count, freq) in hash_freqs.items():
+            tree = dendropy.Tree.from_bipartition_encoding(
+                bipartition_encoding=topology_hash,
+                taxon_namespace=taxon_namespace,
+                is_rooted=is_rooted)
+            tree_freqs[tree] = (count, freq)
+        return tree_freqs
+
+## TreeCounter
+##############################################################################
+
diff --git a/dendropy/dataio/__init__.py b/dendropy/dataio/__init__.py
new file mode 100644
index 0000000..bc74eb1
--- /dev/null
+++ b/dendropy/dataio/__init__.py
@@ -0,0 +1,106 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+import collections
+from dendropy.dataio import newickreader
+from dendropy.dataio import newickwriter
+from dendropy.dataio import newickyielder
+from dendropy.dataio import fastareader
+from dendropy.dataio import fastawriter
+from dendropy.dataio import nexusreader
+from dendropy.dataio import nexuswriter
+from dendropy.dataio import nexusyielder
+from dendropy.dataio import nexmlreader
+from dendropy.dataio import nexmlwriter
+from dendropy.dataio import nexmlyielder
+from dendropy.dataio import phylipreader
+from dendropy.dataio import phylipwriter
+from dendropy.utility import container
+
+_IOServices = collections.namedtuple(
+        "_IOServices",
+        ["reader", "writer", "tree_yielder"]
+        )
+
+_IO_SERVICE_REGISTRY = container.CaseInsensitiveDict()
+_IO_SERVICE_REGISTRY["newick"] = _IOServices(newickreader.NewickReader, newickwriter.NewickWriter, newickyielder.NewickTreeDataYielder)
+_IO_SERVICE_REGISTRY["nexus"] = _IOServices(nexusreader.NexusReader, nexuswriter.NexusWriter, nexusyielder.NexusTreeDataYielder)
+_IO_SERVICE_REGISTRY["nexus/newick"] = _IOServices(None, None, nexusyielder.NexusNewickTreeDataYielder)
+_IO_SERVICE_REGISTRY["nexml"] = _IOServices(nexmlreader.NexmlReader, nexmlwriter.NexmlWriter, nexmlyielder.NexmlTreeDataYielder)
+_IO_SERVICE_REGISTRY["fasta"] = _IOServices(fastareader.FastaReader, fastawriter.FastaWriter, None)
+_IO_SERVICE_REGISTRY["dnafasta"] = _IOServices(fastareader.DnaFastaReader, fastawriter.FastaWriter, None)
+_IO_SERVICE_REGISTRY["rnafasta"] = _IOServices(fastareader.RnaFastaReader, fastawriter.FastaWriter, None)
+_IO_SERVICE_REGISTRY["proteinfasta"] = _IOServices(fastareader.ProteinFastaReader, fastawriter.FastaWriter, None)
+_IO_SERVICE_REGISTRY["phylip"] = _IOServices(phylipreader.PhylipReader, phylipwriter.PhylipWriter, None)
+
+def get_reader(schema, **kwargs):
+    try:
+        reader_type =_IO_SERVICE_REGISTRY[schema].reader
+        if reader_type is None:
+            raise KeyError
+        reader = reader_type(**kwargs)
+        return reader
+    except KeyError:
+        raise NotImplementedError("'{}' is not a supported data reading schema".format(schema))
+
+def get_writer(
+        schema,
+        **kwargs):
+    try:
+        writer_type =_IO_SERVICE_REGISTRY[schema].writer
+        if writer_type is None:
+            raise KeyError
+        writer = writer_type(**kwargs)
+        return writer
+    except KeyError:
+        raise NotImplementedError("'{}' is not a supported data writing schema".format(schema))
+
+def get_tree_yielder(
+        files,
+        schema,
+        taxon_namespace,
+        tree_type,
+        **kwargs):
+    try:
+        yielder_type =_IO_SERVICE_REGISTRY[schema].tree_yielder
+        if yielder_type is None:
+            raise KeyError
+        yielder = yielder_type(
+                files=files,
+                taxon_namespace=taxon_namespace,
+                tree_type=tree_type,
+                **kwargs)
+        return yielder
+    except KeyError:
+        raise NotImplementedError("'{}' is not a supported data yielding schema".format(schema))
+
+def register_service(schema, reader=None, writer=None, tree_yielder=None):
+    global _IO_SERVICE_REGISTRY
+    _IO_SERVICE_REGISTRY[schema] = _IOServices(reader, writer, tree_yielder)
+
+def register_reader(schema, reader):
+    global _IO_SERVICE_REGISTRY
+    try:
+        current = _IO_SERVICE_REGISTRY[schema]
+        register_service(schema=schema,
+                reader=reader,
+                writer=current.writer,
+                tree_yielder=current.tree_yielder)
+    except KeyError:
+        register_service(schema=schema, reader=reader)
+
diff --git a/dendropy/dataio/fastareader.py b/dendropy/dataio/fastareader.py
new file mode 100644
index 0000000..d2a7620
--- /dev/null
+++ b/dendropy/dataio/fastareader.py
@@ -0,0 +1,145 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Implementation of FASTA-format data reader.
+"""
+
+from dendropy.dataio import ioservice
+from dendropy.utility.error import DataParseError
+from dendropy.utility import deprecate
+
+class FastaReader(ioservice.DataReader):
+    "Encapsulates loading and parsing of a FASTA format file."
+
+    def __init__(self, **kwargs):
+        """
+        Keyword Arguments
+        -----------------
+        data_type: str
+            When reading into a |DataSet| object, the type of data must be
+            specified: "dna", "rna", "protein", "restriction", "infinite",
+            "standard", or "continuous".
+        default_state_alphabet: |StateAlphabet| instance
+            A |StateAlphabet| object to be used to manage the alphabet of the
+            characters (|StandardCharacterMatrix| **only**).
+        """
+        ioservice.DataReader.__init__(self)
+        self.data_type = kwargs.pop("data_type", None)
+        self.default_state_alphabet = kwargs.pop("default_state_alphabet", None)
+        if self.default_state_alphabet is not None:
+            if self.data_type is None:
+                self.data_type = "standard"
+            elif self.data_type != "standard":
+                raise ValueError("Cannot specify 'default_state_alphabet' with data type of '{}'".format(self.data_type))
+        self.check_for_unused_keyword_arguments(kwargs)
+
+    def _read(self,
+            stream,
+            taxon_namespace_factory=None,
+            tree_list_factory=None,
+            char_matrix_factory=None,
+            state_alphabet_factory=None,
+            global_annotations_target=None):
+        taxon_namespace = taxon_namespace_factory(label=None)
+        if self.data_type is None:
+            raise TypeError("Data type must be specified for this schema")
+        if self.data_type == "standard" and self.default_state_alphabet is not None:
+            char_matrix = char_matrix_factory(
+                    self.data_type,
+                    label=None,
+                    taxon_namespace=taxon_namespace,
+                    default_state_alphabet=self.default_state_alphabet,
+                    )
+        else:
+            char_matrix = char_matrix_factory(
+                    self.data_type,
+                    label=None,
+                    taxon_namespace=taxon_namespace)
+        symbol_state_map = char_matrix.default_state_alphabet.full_symbol_state_map
+        curr_vec = None
+        curr_taxon = None
+        for line_index, line in enumerate(stream):
+            s = line.strip()
+            if not s:
+                continue
+            if s.startswith('>'):
+                name = s[1:].strip()
+                curr_taxon = taxon_namespace.require_taxon(label=name)
+                if curr_taxon in char_matrix:
+                    raise DataParseError(message="FASTA error: Repeated sequence name ('{}') found".format(name), line_num=line_index + 1, stream=stream)
+                if curr_vec is not None and len(curr_vec) == 0:
+                    raise DataParseError(message="FASTA error: Expected sequence, but found another sequence name ('{}')".format(name), line_num=line_index + 1, stream=stream)
+                curr_vec = char_matrix[curr_taxon]
+            elif curr_vec is None:
+                raise DataParseError(message="FASTA error: Expecting a lines starting with > before sequences", line_num=line_index + 1, stream=stream)
+            else:
+                states = []
+                for col_ind, c in enumerate(s):
+                    c = c.strip()
+                    if not c:
+                        continue
+                    try:
+                        state = symbol_state_map[c]
+                    except KeyError:
+                        raise DataParseError(message="Unrecognized sequence symbol '{}'".format(c), line_num=line_index + 1, col_num=col_ind + 1, stream=stream)
+                    states.append(state)
+                curr_vec.extend(states)
+        product = self.Product(
+                taxon_namespaces=None,
+                tree_lists=None,
+                char_matrices=[char_matrix])
+        return product
+
+
+class DnaFastaReader(FastaReader):
+
+    def __init__(self, **kwargs):
+        deprecate.dendropy_deprecation_warning(
+                preamble="Deprecated since DendroPy 4:",
+                old_construct="d = dendropy.CharacterMatrix.get_from_path(schema='dnafasta', ...)\nd = dendropy.DataSet.get_from_path(schema='dnafasta', ...)",
+                new_construct="d = dendropy.DnaCharacterMatrix.get(path=..., schema='fasta', ...)\nd = dendropy.DataSet.get(path=..., schema='fasta', data_type='dna', ...)",
+                stacklevel=7)
+        # raise TypeError("'dnafasta' is no longer a supported schema: use 'schema=\"fasta\"' with the 'DnaCharacterMatrix.get()' method instead or 'schema=\"fasta\"' and 'data_type=\"dna\" with the 'DataSet.get()' or 'DataSet.read()' methods")
+        kwargs["data_type"] = "dna"
+        FastaReader.__init__(self, **kwargs)
+
+class RnaFastaReader(FastaReader):
+
+    def __init__(self, **kwargs):
+        deprecate.dendropy_deprecation_warning(
+                preamble="Deprecated since DendroPy 4:",
+                old_construct="d = dendropy.CharacterMatrix.get_from_path(schema='rnafasta', ...)\nd = dendropy.DataSet.get_from_path(schema='rnafasta', ...)",
+                new_construct="d = dendropy.RnaCharacterMatrix.get(path=..., schema='fasta', ...)\nd = dendropy.DataSet.get(path=..., schema='fasta', data_type='rna', ...)",
+                stacklevel=7)
+        # raise TypeError("'rnafasta' is no longer a supported schema: use 'schema=\"fasta\"' with the 'RnaCharacterMatrix.get()' method instead or 'schema=\"fasta\"' and 'data_type=\"dna\" with the 'DataSet.get()' or 'DataSet.read()' methods")
+        kwargs["data_type"] = "rna"
+        FastaReader.__init__(self, **kwargs)
+
+class ProteinFastaReader(FastaReader):
+
+    def __init__(self, **kwargs):
+        deprecate.dendropy_deprecation_warning(
+                preamble="Deprecated since DendroPy 4:",
+                old_construct="d = dendropy.CharacterMatrix.get_from_path(schema='proteinfasta', ...)\nd = dendropy.DataSet.get_from_path(schema='proteinfasta', ...)",
+                new_construct="d = dendropy.ProteinCharacterMatrix.get(path=..., schema='fasta', ...)\nd = dendropy.DataSet.get(path=..., schema='fasta', data_type='protein', ...)",
+                stacklevel=7)
+        # raise TypeError("'proteinfasta' is no longer a supported schema: use 'schema=\"fasta\"' with the 'ProteinCharacterMatrix.get()' method instead or 'schema=\"fasta\"' and 'data_type=\"dna\" with the 'DataSet.get()' or 'DataSet.read()' methods")
+        kwargs["data_type"] = "protein"
+        FastaReader.__init__(self, **kwargs)
+
diff --git a/dendropy/dataio/fastawriter.py b/dendropy/dataio/fastawriter.py
new file mode 100644
index 0000000..c3e0aab
--- /dev/null
+++ b/dendropy/dataio/fastawriter.py
@@ -0,0 +1,74 @@
+# !/usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Implementation of FASTA-format data writer.
+"""
+
+from dendropy.dataio import ioservice
+
+class FastaWriter(ioservice.DataWriter):
+    """
+    Formatter for FASTA writer
+    """
+
+    def __init__(self, **kwargs):
+        """
+
+        Keyword Arguments
+        -----------------
+
+        wrap: boolean, default: `True`
+            If `False`, then sequences are written out as single, unbroken lines.
+            Defaults to `True`: wraps sequences at 70 colums.
+        """
+        ioservice.DataWriter.__init__(self)
+        self.wrap = kwargs.get("wrap", True)
+        self.wrap_width = kwargs.get("wrap_width", 70)
+
+    def _write(self,
+            stream,
+            taxon_namespaces=None,
+            tree_lists=None,
+            char_matrices=None,
+            global_annotations_target=None):
+        for char_matrix in char_matrices:
+            if (self.attached_taxon_namespace is not None
+                    and char_matrix.taxon_namespace is not self.attached_taxon_namespace):
+                continue
+            self._write_char_matrix(stream, char_matrix)
+
+    def _write_char_matrix(self, stream, char_matrix):
+        for taxon in char_matrix:
+            stream.write(">{}\n".format(taxon.label))
+            seq = char_matrix[taxon]
+            if self.wrap:
+                col_count = 0
+                for c in seq:
+                    if col_count == self.wrap_width:
+                        stream.write("\n")
+                        col_count = 0
+                    stream.write(str(c))
+                    col_count += 1
+            else:
+                s = "".join("{}".format(c) for c in seq)
+                stream.write("{}\n".format(s))
+            stream.write("\n\n")
+
+
+
diff --git a/dendropy/dataio/ioservice.py b/dendropy/dataio/ioservice.py
new file mode 100644
index 0000000..8fcea52
--- /dev/null
+++ b/dendropy/dataio/ioservice.py
@@ -0,0 +1,569 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+import sys
+import collections
+import warnings
+from dendropy.datamodel import taxonmodel
+from dendropy.utility import deprecate
+if not (sys.version_info.major >= 3 and sys.version_info.minor >= 4):
+    from dendropy.utility.filesys import pre_py34_open as open
+
+###############################################################################
+## IOService
+
+class IOService(object):
+    """
+    Base class for all readers/writers.
+    """
+
+    @staticmethod
+    def attached_taxon_set_deprecation_warning():
+        deprecate.dendropy_deprecation_warning(
+                old_construct="attached_taxon_set",
+                new_construct="attached_taxon_namespace",
+                stacklevel=5)
+
+    def __init__(self):
+        self.attached_taxon_namespace = None
+
+    def _get_attached_taxon_set(self):
+        IOService.attached_taxon_set_deprecation_warning()
+        return self.attached_taxon_namespace
+    def _set_attached_taxon_set(IOService, v):
+        IOService.attached_taxon_set_deprecation_warning()
+        self.attached_taxon_namespace = v
+    def _del_attached_taxon_set(IOService):
+        IOService.attached_taxon_set_deprecation_warning()
+    attached_taxon_set = property(_get_attached_taxon_set, _set_attached_taxon_set, _del_attached_taxon_set)
+
+    def check_for_unused_keyword_arguments(self, kwargs_dict):
+        ignore_unrecognized_keyword_arguments = kwargs_dict.pop("ignore_unrecognized_keyword_arguments", False)
+        attach_taxon_namespace, taxon_namespace = taxonmodel.process_attached_taxon_namespace_directives(kwargs_dict)
+        if attach_taxon_namespace or (taxon_namespace is not None):
+            self.attached_taxon_namespace = taxon_namespace
+        if kwargs_dict and not ignore_unrecognized_keyword_arguments:
+            raise TypeError("Unrecognized or unsupported arguments: {}".format(kwargs_dict))
+
+###############################################################################
+## DataReader
+
+class DataReader(IOService):
+    """
+    Base class for all readers.
+
+    Consumes a stream and builds or composes corresponding phylogenetic data
+    object or objects. Abstract class, to be implemented by derived classes
+    specializing in particular data formats.
+    """
+
+    Product = collections.namedtuple(
+            "product",
+            ["taxon_namespaces", "tree_lists", "char_matrices"]
+            )
+
+    def __init__(self):
+        IOService.__init__(self)
+
+    def _read(self,
+            stream,
+            taxon_namespace_factory=None,
+            tree_list_factory=None,
+            char_matrix_factory=None,
+            state_alphabet_factory=None,
+            global_annotations_target=None):
+        """
+        Deriving classes should implement this method to build a data product
+        from the information in ``stream`` using the provided factory functions.
+
+        Parameters
+        ----------
+
+        stream : file or file-like object
+            Source of data.
+
+        taxon_namespace_factory : function object
+            A function that takes one named argument, ``label``, and returns a
+            |TaxonNamespace| object to be used for each distinct block of
+            operational taxonomic unit concept definitions encountered in the
+            data source.
+
+            The function will be called as::
+
+                tns = taxon_namespace_factory(label="label")
+
+            In the simplest case, a new |TaxonNamespace| object can be created
+            for each block of taxon definitions in the data source by setting
+            the factory function to::
+
+                taxon_namespace_factory = TaxonNamespace
+
+            If all data objects are to be organized into a DataSet object,
+            then:
+
+                taxon_namespace_factory = dataset.new_taxon_namespace
+
+            If all data objects should reference the *same* |TaxonNamespace|
+            object, then:
+
+                taxon_namespace_factory = lambda label : taxon_namespace
+
+            where ``taxon_namespace`` is an existing |TaxonNamespace| object that
+            should be used.
+
+            If ``taxon_namespace_factor`` is `None`, then no tree data will be
+            parsed.
+
+        tree_list_factory : function object
+            A function that takes two named arguments, ``label`` and
+            ``taxon_namespace``, and returns a |TreeList| or equivalent object to
+            be used to manage each distinct collection of trees in the data
+            source.
+
+            The function will be called as::
+
+                tns = taxon_namespace_factory(label="label")
+                tlist = tree_list_factory(label="label", taxon_namespace=tns)
+
+            In the simplest case, a new |TreeList| object can be created for
+            each block of tree definitions in the data source by setting the
+            factory function to::
+
+                tree_list_factory = TreeList
+
+            If all data objects are to be organized into a DataSet object,
+            then:
+
+                tree_list = dataset.new_tree_list
+
+            If all Tree data objects instantiated should be accessioned into
+            the *same* |TreeList| object, then:
+
+                taxon_namespace_factory = lambda label : tree_list.taxon_namespace
+                tree_list_factory = lambda label, taxon_namespace : tree_list
+
+            where ``tree_list`` is an existing |TreeList| object that should be
+            used.
+
+        char_matrix_factory : function object
+            A function that takes two named arguments, ``label`` and
+            ``taxon_namespace``, and returns a |CharacterMatrix| or equivalent object to
+            be used to manage each aligment or distinct set of sequences in the data
+            source.
+
+            The function will be called as::
+
+                tns = taxon_namespace_factory(label="label")
+                cm = char_matrix_factory(label="label", taxon_namespace=tns)
+
+            In the simplest case, a new |CharacterMatrix| object can be created for
+            each alignment or set of sequences in the data source by setting the
+            factory function to, for e.g.::
+
+                char_matrix_factory = DnaCharacterMatrix
+
+            If all data objects are to be organized into a DataSet object,
+            then:
+
+                char_matrix = dataset.new_char_matrix
+
+            If ``char_matrix_factory`` is `None`, then no character data will be
+            parsed.
+
+        state_alphabet_factory : function object
+            A function that takes all the arguments of |StateAlphabet|
+            and returns a properly configured instance.
+
+        global_annotations_target : |Annotable| object
+            Any object that will be the target (or subject, in the grammatical
+            sense) of general metadata or annotations in the data source. If
+            `None`, then such metadata or annotations will not be stored.
+
+        Returns
+        -------
+
+        A `Product` object : a ``namedtuple`` with the following attributes:
+            "taxon_namespaces", "tree_lists", "char_matrices".
+
+        """
+        raise NotImplementedError
+
+    def read_dataset(self,
+            stream,
+            dataset,
+            taxon_namespace=None,
+            exclude_trees=False,
+            exclude_chars=False,
+            state_alphabet_factory=None):
+        """
+        Populates the given |DataSet| object from external data source.
+
+        Parameters
+        ----------
+
+        stream : file or file-like object
+            Source of data.
+        dataset : |DataSet| object
+            The target |DataSet| to populate/build.
+        exclude_trees : boolean, default: False
+            If set to `True`, tree data will not be read from the source.
+        exclude_chars : boolean, default: False
+            If set to `True`, character data will not be read from the source.
+
+        """
+        if taxon_namespace is not None:
+            taxon_namespace_factory = lambda label : taxon_namespace
+            if (dataset.attached_taxon_namespace is not None
+                    and dataset.attached_taxon_namespace is not taxon_namespace):
+                raise ValueError("'taxon_namespace' (or 'taxon_set') keyword argument value must be the same as 'dataset.attached_taxon_namespace' if both are not 'None'")
+            self.attached_taxon_namespace = taxon_namespace
+        elif dataset.attached_taxon_namespace is not None:
+            taxon_namespace_factory = lambda label : dataset.attached_taxon_namespace
+            self.attached_taxon_namespace = dataset.attached_taxon_namespace
+        else:
+            taxon_namespace_factory = dataset.new_taxon_namespace
+        if exclude_trees:
+            tree_list_factory = None
+        else:
+            tree_list_factory = dataset.new_tree_list
+        if exclude_chars:
+            char_matrix_factory = None
+        else:
+            char_matrix_factory = dataset.new_char_matrix
+        product = self._read(stream=stream,
+                taxon_namespace_factory=taxon_namespace_factory,
+                tree_list_factory=tree_list_factory,
+                char_matrix_factory=char_matrix_factory,
+                state_alphabet_factory=state_alphabet_factory,
+                global_annotations_target=dataset)
+        return product
+
+    def read_tree_lists(self,
+            stream,
+            taxon_namespace_factory,
+            tree_list_factory,
+            global_annotations_target=None):
+        """
+        Reads tree data from source into tree objects.
+
+        With data schemas that support the concept of multiple distinct blocks
+        or sets of trees (e.g. NEXUS or NeXML), each tree block will be
+        accessioned into a separate |TreeList| instantiated by calling
+        `tree_list_factory(label)`. If trees should be accessioned into the
+        same |TreeList|, then this can be coerced by, e.g.::
+
+            t = TreeList()
+            reader.read_tree_lists(
+                stream=stream,
+                taxon_namespace_factory=lambda x: t.taxon_namespace,
+                tree_list_factory=lambda x : t)
+
+        Parameters
+        ----------
+
+        stream : file or file-like object
+            Source of data.
+
+        taxon_namespace_factory : function object
+            A function that takes one named argument, ``label``, and returns a
+            |TaxonNamespace| object to be used for each distinct block of
+            operational taxonomic unit concept definitions encountered in the
+            data source.
+
+            The function will be called as::
+
+                tns = taxon_namespace_factory(label="label")
+
+            In the simplest case, a new |TaxonNamespace| object can be created
+            for each block of taxon definitions in the data source by setting
+            the factory function to::
+
+                taxon_namespace_factory = TaxonNamespace
+
+            If all data objects are to be organized into a DataSet object,
+            then:
+
+                taxon_namespace_factory = dataset.new_taxon_namespace
+
+            If all data objects should reference the *same* |TaxonNamespace|
+            object, then:
+
+                taxon_namespace_factory = lambda label : taxon_namespace
+
+            where ``taxon_namespace`` is an existing |TaxonNamespace| object that
+            should be used.
+
+            If ``taxon_namespace_factor`` is `None`, then no tree data will be
+            parsed.
+
+        tree_list_factory : function object
+            A function that takes two named arguments, ``label`` and
+            ``taxon_namespace``, and returns a |TreeList| or equivalent object to
+            be used to manage each distinct collection of trees in the data
+            source.
+
+            The function will be called as::
+
+                tns = taxon_namespace_factory(label="label")
+                tlist = tree_list_factory(label="label", taxon_namespace=tns)
+
+            In the simplest case, a new |TreeList| object can be created for
+            each block of tree definitions in the data source by setting the
+            factory function to::
+
+                tree_list_factory = TreeList
+
+            If all data objects are to be organized into a DataSet object,
+            then:
+
+                tree_list = dataset.new_tree_list
+
+            If all Tree data objects instantiated should be accessioned into
+            the *same* |TreeList| object, then:
+
+                taxon_namespace_factory = lambda label : tree_list.taxon_namespace
+                tree_list_factory = lambda label, taxon_namespace : tree_list
+
+            where ``tree_list`` is an existing |TreeList| object that should be
+            used.
+
+        global_annotations_target : |Annotable| object
+            Any object that will be the target (or subject, in the grammatical
+            sense) of general metadata or annotations in the data source. If
+            `None`, then such metadata or annotations will not be stored.
+
+        Returns
+        -------
+        List of |TreeList| objects.
+
+        """
+        # ``product`` is a namedtuple("DataReaderProducts", ["taxon_namespaces", "tree_lists", "char_matrices"])
+        product = self._read(stream=stream,
+                taxon_namespace_factory=taxon_namespace_factory,
+                tree_list_factory=tree_list_factory,
+                char_matrix_factory=None,
+                state_alphabet_factory=None,
+                global_annotations_target=global_annotations_target)
+        return product.tree_lists
+
+    def read_char_matrices(self,
+            stream,
+            taxon_namespace_factory,
+            char_matrix_factory,
+            state_alphabet_factory,
+            global_annotations_target=None):
+        product = self._read(stream=stream,
+                taxon_namespace_factory=taxon_namespace_factory,
+                tree_list_factory=None,
+                char_matrix_factory=char_matrix_factory,
+                state_alphabet_factory=state_alphabet_factory,
+                global_annotations_target=global_annotations_target)
+        return product.char_matrices
+
+###############################################################################
+## DataWriter
+
+class DataWriter(IOService):
+    """
+    Base class for all writers.
+
+    Writes a DendroPy phylogenetic data object to a stream. Abstract class, to
+    be implemented by derived classes specializing in particular data formats.
+    """
+
+    def __init__(self, **kwargs):
+        """
+        Constructs and configures a `DataWriter` object by "harvesting" keyword
+        arguments and setting state accordingly. Keyword arguments recognized
+        and processed will be removed from the keyword argument dictionary.
+
+        Parameters
+        ----------
+
+        **kwargs : schema- and implementation-specific keyword arguments
+
+        """
+        IOService.__init__(self)
+
+    def _write(self,
+            stream,
+            taxon_namespaces=None,
+            tree_lists=None,
+            char_matrices=None,
+            global_annotations_target=None):
+        """
+        Deriving classes should implement this method to information to ``stream``
+        in schema-specific formatting.
+
+        Parameters
+        ----------
+
+        stream : file or file-like object
+            Destination for data.
+        taxon_namespaces : Iterable of |TaxonNamespace| objects
+            Collection of |TaxonNamespace| objects to be serialized.
+        tree_lists : Iterable of |TreeList| objects
+            Collection of |TreeList| objects to be serialized.
+        char_matrices : Iterable of |CharacterMatrix| objects
+            Collection of |CharacterMatrix| objects to be serialized.
+        global_annotations_target : |Annotable| object
+            Any object that will be the source (or subject, in the grammatical
+            sense) of general metadata or annotations for the data. If
+            `None`, then such metadata or annotations will not be stored.
+
+        """
+        raise NotImplementedError
+
+    def write_dataset(self,
+            dataset,
+            stream,
+            exclude_trees,
+            exclude_chars,
+            ):
+        """
+        Writes the given |DataSet| object to the file-like object ``stream``.
+
+        stream : file or file-like object
+            Destination for data.
+        dataset : |DataSet| object
+            The |DataSet| to write.
+        exclude_trees : boolean, default: False
+            If set to `True`, tree data will not be written to the destination.
+        exclude_chars : boolean, default: False
+            If set to `True`, character data will not be written to the destination.
+        global_annotations_target : |Annotable| object
+            Any object that will be the source (or subject, in the grammatical
+            sense) of general metadata or annotations for the data. If
+            `None`, then such metadata or annotations will not be stored.
+        """
+        tree_lists = dataset.tree_lists if not exclude_trees else None
+        char_matrices = dataset.char_matrices if not exclude_chars else None
+        self.attached_taxon_namespace = dataset.attached_taxon_namespace
+        self._write(
+                stream=stream,
+                taxon_namespaces=dataset.taxon_namespaces,
+                tree_lists=tree_lists,
+                char_matrices=char_matrices,
+                global_annotations_target=dataset)
+
+    def write_tree_list(self, tree_list, stream):
+        self._write(
+                stream=stream,
+                taxon_namespaces=None,
+                tree_lists=[tree_list],
+                char_matrices=None,
+                global_annotations_target=None)
+
+    def write_tree_lists(self, tree_lists, stream):
+        self._write(
+                stream=stream,
+                taxon_namespaces=None,
+                tree_lists=tree_lists,
+                char_matrices=None,
+                global_annotations_target=None)
+
+    def write_char_matrices(self, char_matrix_list, stream):
+        self._write(
+                stream=stream,
+                taxon_namespaces=None,
+                tree_lists=None,
+                char_matrices=char_matrix_list,
+                global_annotations_target=None)
+
+    def write_char_matrix(self, char_matrix, stream):
+        self._write(
+                stream=stream,
+                taxon_namespaces=None,
+                tree_lists=None,
+                char_matrices=[char_matrix],
+                global_annotations_target=None)
+
+###############################################################################
+## DataYielder
+
+class DataYielder(IOService):
+
+    def __init__(self, files=None):
+        IOService.__init__(self)
+        self.files = files
+        self._current_file_index = None
+        self._current_file = None
+        self._current_file_name = None
+
+    def reset(self):
+        self.current_file_index = None
+        self.current_file = None
+        self.current_file_name = None
+
+    def _get_current_file_index(self):
+        return self._current_file_index
+    current_file_index = property(_get_current_file_index)
+
+    def _get_current_file(self):
+        return self._current_file
+    current_file = property(_get_current_file)
+
+    def _get_current_file_name(self):
+        return self._current_file_name
+    current_file_name = property(_get_current_file_name)
+
+    def __iter__(self):
+        for current_file_index, current_file in enumerate(self.files):
+            self._current_file_index = current_file_index
+            for item in self.iterate_over_file(current_file):
+                yield item
+
+    def iterate_over_file(self, current_file):
+        if isinstance(current_file, str):
+            self._current_file = open(current_file, "r")
+            self._current_file_name = current_file
+        else:
+            self._current_file = current_file
+            try:
+                self._current_file_name = self.current_file.name
+            except AttributeError:
+                self._current_file_name = None
+        if hasattr(self._current_file, "__exit__"):
+            with self._current_file:
+                for item in self._yield_items_from_stream(stream=self._current_file):
+                    yield item
+        else:
+            # StringIO does not support ``with``
+            for item in self._yield_items_from_stream(stream=self._current_file):
+                yield item
+        self._current_file = None
+
+###############################################################################
+## DataYielder
+
+class TreeDataYielder(DataYielder):
+
+    def __init__(self,
+            files=None,
+            taxon_namespace=None,
+            tree_type=None):
+        DataYielder.__init__(self, files=files)
+        self.taxon_namespace = taxon_namespace
+        assert self.taxon_namespace is not None
+        self.attached_taxon_namespace = self.taxon_namespace
+        self.tree_type = tree_type
+
+    def tree_factory(self):
+        return self.tree_type(taxon_namespace=self.taxon_namespace)
+
+
diff --git a/dendropy/dataio/newick.py b/dendropy/dataio/newick.py
new file mode 100644
index 0000000..442f116
--- /dev/null
+++ b/dendropy/dataio/newick.py
@@ -0,0 +1,18 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
diff --git a/dendropy/dataio/newickreader.py b/dendropy/dataio/newickreader.py
new file mode 100644
index 0000000..2dd14d3
--- /dev/null
+++ b/dendropy/dataio/newickreader.py
@@ -0,0 +1,672 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Parsing of NEWICK-format tree from a stream.
+"""
+
+import re
+import warnings
+try:
+    from StringIO import StringIO # Python 2 legacy support: StringIO in this module is the one needed (not io)
+except ImportError:
+    from io import StringIO # Python 3
+from dendropy.utility import error
+from dendropy.utility import deprecate
+from dendropy.dataio import tokenizer
+from dendropy.dataio import nexusprocessing
+from dendropy.dataio import ioservice
+
+##############################################################################
+## NewickReader
+
+class NewickReader(ioservice.DataReader):
+    """
+    Parser for NEWICK-formatted data.
+    """
+
+    _default_rooting_directive = None
+    _default_tree_weight = 1.0
+
+    class NewickReaderError(error.DataParseError):
+        def __init__(self, message,
+                line_num=None,
+                col_num=None,
+                stream=None):
+            error.DataParseError.__init__(self,
+                    message=message,
+                    line_num=line_num,
+                    col_num=col_num,
+                    stream=stream)
+
+    class NewickReaderInvalidTokenError(NewickReaderError):
+        def __init__(self,
+                message,
+                line_num=None,
+                col_num=None,
+                stream=None):
+            NewickReader.NewickReaderError.__init__(self,
+                    message=message,
+                    line_num=line_num,
+                    col_num=col_num,
+                    stream=stream)
+
+    class NewickReaderMalformedStatementError(NewickReaderError):
+        def __init__(self,
+                message,
+                line_num=None,
+                col_num=None,
+                stream=None):
+            NewickReader.NewickReaderError.__init__(self,
+                    message=message,
+                    line_num=line_num,
+                    col_num=col_num,
+                    stream=stream)
+
+    class NewickReaderIncompleteTreeStatementError(NewickReaderMalformedStatementError):
+        def __init__(self,
+                message,
+                line_num=None,
+                col_num=None,
+                stream=None):
+            NewickReader.NewickReaderMalformedStatementError.__init__(self,
+                    message=message,
+                    line_num=line_num,
+                    col_num=col_num,
+                    stream=stream)
+
+    class NewickReaderInvalidValueError(NewickReaderError):
+        def __init__(self,
+                message,
+                line_num=None,
+                col_num=None,
+                stream=None):
+            NewickReader.NewickReaderError.__init__(self,
+                    message=message,
+                    line_num=line_num,
+                    col_num=col_num,
+                    stream=stream)
+
+    class NewickReaderDuplicateTaxonError(NewickReaderError):
+        def __init__(self,
+                message,
+                line_num=None,
+                col_num=None,
+                stream=None):
+            detailed = ("Multiple occurrences of the same taxa on trees are not"
+            " supported: trees with duplicate node labels can only be"
+            " processed if the labels are not parsed as operational taxonomic"
+            " unit concepts but instead as simply node labels by specifying"
+            " 'suppress_internal_node_taxa=True, suppress_leaf_node_taxa=True'."
+            " Duplicate taxon labels: {}").format(message)
+            NewickReader.NewickReaderError.__init__(self,
+                    message=detailed,
+                    line_num=line_num,
+                    col_num=col_num,
+                    stream=stream)
+
+    def __init__(self, **kwargs):
+        """
+        Keyword Arguments
+        -----------------
+        rooting : string, {['default-unrooted'], 'default-rooted', 'force-unrooted', 'force-rooted'}
+            Specifies how trees in the data source should be intepreted with
+            respect to their rooting:
+
+                'default-unrooted' [default]:
+                    All trees are interpreted as unrooted unless a '[&R]'
+                    comment token explicitly specifies them as rooted.
+                'default-rooted'
+                    All trees are interpreted as rooted unless a '[&U]'
+                    comment token explicitly specifies them as unrooted.
+                'force-unrooted'
+                    All trees are unconditionally interpreted as unrooted.
+                'force-rooted'
+                    All trees are unconditionally interpreted as rooted.
+
+        edge_length_type : type, default: ``float``
+            Specifies the type of the edge lengths (``int`` or ``float``). Tokens
+            interpreted as branch lengths will be cast to this type.
+            Defaults to ``float``.
+        suppress_edge_lengths : boolean, default: `False`
+            If `True`, edge length values will not be processed. If `False`,
+            edge length values will be processed.
+        extract_comment_metadata : boolean, default: `True`
+            If `True` (default), any comments that begin with '&' or '&&' will
+            be parsed and stored as part of the annotation set of the
+            corresponding object (accessible through the ``annotations``
+            attribute of the object). This requires that the comment
+            contents conform to a particular format (NHX or BEAST: 'field =
+            value'). If `False`, then the comments will not be parsed,
+            but will be instead stored directly as elements of the ``comments``
+            list attribute of the associated object.
+        store_tree_weights : boolean, default: `False`
+            If `True`, process the tree weight (e.g. "[&W 1/2]") comment
+            associated with each tree, if any. Defaults to `False`.
+        encode_splits : boolean, default: `False`
+            If `True`, split hash bitmasks will be calculated and attached to
+            the edges.
+        finish_node_fn : function object, default: `None`
+            If specified, this function will be applied to each node after
+            it has been constructed.
+        case_sensitive_taxon_labels : boolean, default: `False`
+            If `True`, then taxon labels are case sensitive (e.g., "P.regius"
+            and "P.REGIUS" wil be treated as different operation taxonomic
+            unit concepts). Otherwise, taxon label intepretation will be made
+            without regard for case.
+        preserve_underscores : boolean, default: `False`
+            If `True`, unquoted underscores in labels will *not* converted to
+            spaces. Defaults to `False`: all underscores not protected by
+            quotes will be converted to spaces.
+        suppress_internal_node_taxa : boolean, default: `True`
+            If `False`, internal node labels will be instantantiated into
+            |Taxon| objects. If `True`, internal node labels
+            will *not* be instantantiated as strings.
+        suppress_leaf_node_taxa : boolean, default: `False`
+            If `False`, leaf (external) node labels will be instantantiated
+            into |Taxon| objects. If `True`, leaff (external) node
+            labels will *not* be instantantiated as strings.
+        terminating_semicolon_required : boolean, default: `True`
+            If `True` [default], then a tree statement that does not end in a
+            semi-colon is an error. If `False`, then no error will be raised.
+        ignore_unrecognized_keyword_arguments : boolean, default: `False`
+            If `True`, then unsupported or unrecognized keyword arguments will
+            not result in an error. Default is `False`: unsupported keyword
+            arguments will result in an error.
+        """
+
+        # base
+        ioservice.DataReader.__init__(self)
+
+        self._rooting = None
+        ## (TEMPORARY and UGLY!!!!) Special handling for legacy signature
+        if "as_unrooted" in kwargs or "as_rooted" in kwargs or "default_as_rooted" in kwargs or "default_as_unrooted" in kwargs:
+            import collections
+            legacy_kw = ("as_unrooted", "as_rooted", "default_as_rooted", "default_as_unrooted")
+            legacy_kw_str = ", ".join("'{}'".format(k) for k in legacy_kw)
+            if "rooting" in kwargs:
+                raise ValueError("Cannot specify 'rooting' keyword argument in conjunction with any of the (legacy) keyword arguments ({}). Use 'rooting' alone.".format(legacy_kw_str))
+            specs = collections.Counter(k for k in kwargs.keys() if k in legacy_kw)
+            if sum(specs.values()) > 1:
+                raise ValueError("Cannot specify more than one of {{ {} }} at the same time".format(legacy_kw_str))
+            kw = list(specs.keys())[0]
+            if kw == "as_unrooted":
+                if kwargs[kw]:
+                    corrected = "force-unrooted"
+                else:
+                    corrected = "force-rooted"
+            elif kw == "as_rooted":
+                if kwargs[kw]:
+                    corrected = "force-rooted"
+                else:
+                    corrected = "force-unrooted"
+            elif kw == "default_as_unrooted":
+                if kwargs[kw]:
+                    corrected = "default-unrooted"
+                else:
+                    corrected = "default-rooted"
+            elif kw == "default_as_rooted":
+                if kwargs[kw]:
+                    corrected = "default-rooted"
+                else:
+                    corrected = "default-unrooted"
+            msg = StringIO()
+            # error.critical_deprecation_alert("\n{}\nUse of keyword argument '{}={}' is deprecated; use 'rooting=\"{}\"' instead".format(msg.getvalue(), kw, kwargs[kw], corrected),
+            #         stacklevel=4)
+            deprecate.dendropy_deprecation_warning(
+                    preamble="Deprecated since DendroPy 4:",
+                    old_construct="{}={}".format(kw, kwargs[kw]),
+                    new_construct="rooting='{}'".format(corrected),
+                    stacklevel=7)
+            kwargs.pop(kw)
+            kwargs["rooting"] = corrected
+        if "allow_duplicate_taxon_labels" in kwargs:
+            raise ValueError(
+                "'allow_duplicate_taxon_labels' is no longer"
+                " supported: trees with duplicate node labels can only be"
+                " processed if the labels are not parsed as operational taxonomic"
+                " unit concepts but instead as simply node labels by specifying"
+                " 'suppress_internal_node_taxa=True, suppress_leaf_node_taxa=True'."
+            )
+        # self.rooting = kwargs.pop("rooting", "default-unrooted")
+        self.rooting = kwargs.pop("rooting", self.__class__._default_rooting_directive)
+        self.edge_length_type = kwargs.pop("edge_length_type", float)
+        self.suppress_edge_lengths = kwargs.pop("suppress_edge_lengths", False)
+        self.extract_comment_metadata = kwargs.pop('extract_comment_metadata', True)
+        self.store_tree_weights = kwargs.pop("store_tree_weights", False)
+        self.default_tree_weight = kwargs.pop("default_tree_weight", self.__class__._default_tree_weight)
+        self.finish_node_fn = kwargs.pop("finish_node_fn", None)
+        self.case_sensitive_taxon_labels = kwargs.pop('case_sensitive_taxon_labels', False)
+        self.preserve_unquoted_underscores = kwargs.pop('preserve_underscores', False)
+        self.suppress_internal_node_taxa = kwargs.pop("suppress_internal_node_taxa", True)
+        self.suppress_leaf_node_taxa = kwargs.pop("suppress_external_node_taxa", False) # legacy (will be deprecated)
+        self.suppress_leaf_node_taxa = kwargs.pop("suppress_leaf_node_taxa", self.suppress_leaf_node_taxa)
+        self.terminating_semicolon_required = kwargs.pop("terminating_semicolon_required", True)
+        self.check_for_unused_keyword_arguments(kwargs)
+
+        # per-tree book-keeping
+        self._tree_statement_complete = None
+        self._parenthesis_nesting_level = None
+        self._seen_taxa = None
+
+    def tree_iter(self,
+            stream,
+            taxon_symbol_mapper,
+            tree_factory):
+        """
+        Iterator that yields trees in NEWICK-formatted source.
+
+        Parameters
+        ----------
+        stream : file or file-like object
+            A file or file-like object opened for reading.
+        taxon_namespace : |TaxonNamespace|
+            Operational taxonomic unit namespace to use for taxon management.
+        tree_factory : function object
+            A function that returns a new |Tree| object when called
+            without arguments.
+
+        Returns
+        -------
+        iter : :py`collections.Iterator` [|Tree|]
+            An iterator yielding |Tree| objects constructed based on
+            data in ``stream``.
+        """
+        nexus_tokenizer = nexusprocessing.NexusTokenizer(stream,
+                preserve_unquoted_underscores=self.preserve_unquoted_underscores)
+        while True:
+            tree = self._parse_tree_statement(
+                    nexus_tokenizer=nexus_tokenizer,
+                    tree_factory=tree_factory,
+                    taxon_symbol_map_fn=taxon_symbol_mapper.require_taxon_for_symbol)
+            yield tree
+            if tree is None:
+                raise StopIteration
+
+    def _read(self,
+            stream,
+            taxon_namespace_factory=None,
+            tree_list_factory=None,
+            char_matrix_factory=None,
+            state_alphabet_factory=None,
+            global_annotations_target=None):
+        taxon_namespace = taxon_namespace_factory(label=None)
+        tree_list = tree_list_factory(label=None, taxon_namespace=taxon_namespace)
+        taxon_symbol_mapper = nexusprocessing.NexusTaxonSymbolMapper(
+                taxon_namespace=taxon_namespace,
+                enable_lookup_by_taxon_number=False,
+                case_sensitive=self.case_sensitive_taxon_labels)
+        tree_factory = tree_list.new_tree
+        for tree in self.tree_iter(stream=stream,
+                taxon_symbol_mapper=taxon_symbol_mapper,
+                tree_factory=tree_factory):
+            pass
+        product = self.Product(
+                taxon_namespaces=None,
+                tree_lists=[tree_list],
+                char_matrices=None)
+        return product
+
+    def _get_rooting(self):
+        """
+        Get rooting interpretation configuration.
+        """
+        return self._rooting
+    def _set_rooting(self, val):
+        """
+        Set rooting interpretation configuration.
+        """
+        if val not in ("force-unrooted", "force-rooted", "default-unrooted", "default-rooted", None,):
+            raise ValueError("Unrecognized rooting directive: '{}'".format(val))
+        self._rooting = val
+    rooting = property(_get_rooting, _set_rooting)
+
+    def _parse_tree_statement(self,
+            nexus_tokenizer,
+            tree_factory,
+            taxon_symbol_map_fn):
+        """
+        Parses a single tree statement from a token stream and constructs a
+        corresponding Tree object. Expects that the first non-comment and
+        non-semi-colon token to be found, including the current token, to be
+        the parenthesis that opens the tree statement. When complete, the
+        current token will be the token immediately following the semi-colon,
+        if any.
+        """
+        current_token = nexus_tokenizer.current_token
+        tree_comments = nexus_tokenizer.pull_captured_comments()
+        while (current_token == ";" or current_token is None) and not nexus_tokenizer.is_eof():
+            current_token = nexus_tokenizer.require_next_token()
+            tree_comments = nexus_tokenizer.pull_captured_comments()
+        if nexus_tokenizer.is_eof():
+            return None
+        if current_token != "(":
+            # allow for possibility of single node tree, e.g.: T0:10;
+            self._parenthesis_nesting_level = 0
+            # raise NewickReader.NewickReaderInvalidTokenError(
+            #         message="Expecting '{}' but found '{}'".format("(", current_token),
+            #         line_num=nexus_tokenizer.token_line_num,
+            #         col_num=nexus_tokenizer.token_column_num,
+            #         stream=nexus_tokenizer.src)
+        else:
+            self._parenthesis_nesting_level = 1
+        tree = tree_factory()
+        self._process_tree_comments(tree, tree_comments, nexus_tokenizer)
+        self._tree_statement_complete = False
+        self._seen_taxa = set()
+        self._parse_tree_node_description(
+                nexus_tokenizer=nexus_tokenizer,
+                tree=tree,
+                current_node=tree.seed_node,
+                taxon_symbol_map_fn=taxon_symbol_map_fn,
+                is_internal_node=None)
+        current_token = nexus_tokenizer.current_token
+        if not self._tree_statement_complete:
+            raise NewickReader.NewickReaderIncompleteTreeStatementError(
+                    message="Incomplete or improperly-terminated tree statement (last character read was '{}' instead of a semi-colon ';')".format(nexus_tokenizer.current_token),
+                    line_num=nexus_tokenizer.token_line_num,
+                    col_num=nexus_tokenizer.token_column_num,
+                    stream=nexus_tokenizer.src)
+        self._seen_taxa = None
+        self._parenthesis_nesting_level = None
+        self._tree_statement_complete = None
+        while current_token == ";" and not nexus_tokenizer.is_eof():
+            nexus_tokenizer.clear_captured_comments()
+            current_token = nexus_tokenizer.next_token()
+        return tree
+
+    def _process_tree_comments(self, tree, tree_comments, nexus_tokenizer):
+        # NOTE: this also unconditionally sets the tree rootedness and
+        # weighting if no comment indicating these are found; for this to work
+        # in the current implementation, this method must be called once and
+        # exactly once per tree.
+        if not tree_comments:
+            tree.is_rooted = self._parse_tree_rooting_state("")
+            if self.store_tree_weights:
+                tree.weight = self.default_tree_weight
+            return
+        rooting_token_found = False
+        weighting_token_found = False
+        for comment in tree_comments:
+            stripped_comment = comment.strip()
+            if stripped_comment in ["&u", "&U", "&r", "&R"]:
+                tree.is_rooted = self._parse_tree_rooting_state(stripped_comment)
+                rooting_token_found = True
+            elif (self.store_tree_weights
+                    and (stripped_comment.startswith("&W ") or stripped_comment.startswith("&w "))
+                    ):
+                try:
+                    weight_expression = stripped_comment[2:]
+                    if not weight_expression:
+                        raise ValueError
+                    we_parts = weight_expression.split("/")
+                    if len(we_parts) > 2:
+                        raise ValueError
+                        # raise NewickReader.NewickReaderInvalidValueError(
+                        #         message="Invalid tree weight expression: '{}'".format(weight_expression),
+                        #         line_num=nexus_tokenizer.token_line_num,
+                        #         col_num=nexus_tokenizer.token_column_num,
+                        #         stream=nexus_tokenizer.src)
+                    elif len(we_parts) == 2:
+                        x = float(we_parts[0])
+                        y = float(we_parts[1])
+                        tree.weight = x/y
+                    else:
+                        tree.weight = float(we_parts[0])
+                    weighting_token_found = True
+                except ValueError:
+                    exc = NewickReader.NewickReaderInvalidValueError(
+                            message="Invalid tree weight expression: '{}'".format(stripped_comment),
+                            line_num=nexus_tokenizer.token_line_num,
+                            col_num=nexus_tokenizer.token_column_num,
+                            stream=nexus_tokenizer.src)
+                    exc.__context__ = None # Python 3.0, 3.1, 3.2
+                    exc.__cause__ = None # Python 3.3, 3.4
+                    raise exc
+            elif self.extract_comment_metadata and comment.startswith("&"):
+                annotations = nexusprocessing.parse_comment_metadata_to_annotations(
+                    comment=comment)
+                if annotations:
+                    tree.annotations.update(annotations)
+                else:
+                    tree.comments.append(comment)
+            else:
+                tree.comments.append(comment)
+        if not rooting_token_found:
+            tree.is_rooted = self._parse_tree_rooting_state("")
+        if self.store_tree_weights and not weighting_token_found:
+            tree.weight = self.default_tree_weight
+
+    def _parse_tree_rooting_state(self, rooting_comment=None):
+        """
+        Returns rooting state for tree with given rooting comment token, taking
+        into account ``rooting`` configuration.
+        """
+        if self._rooting == "force-unrooted":
+            return False
+        elif self._rooting == "force-rooted":
+            return True
+        elif rooting_comment == "&R" or rooting_comment == "&r":
+            return True
+        elif rooting_comment == "&U" or rooting_comment == "&u":
+            return False
+        elif self._rooting == "default-rooted":
+            return True
+        elif self._rooting == "default-unrooted":
+            return False
+        elif self._rooting is None:
+            return None
+        else:
+            raise TypeError("Unrecognized rooting directive: '{}'".format(self._rooting))
+
+    def _parse_tree_node_description(
+            self,
+            nexus_tokenizer,
+            tree,
+            current_node,
+            taxon_symbol_map_fn,
+            is_internal_node=None):
+        """
+        Assuming that the iterator is currently sitting on a parenthesis that
+        opens a node with children or the label of a leaf node, this will
+        populate the node ``node`` appropriately (label, edge length, comments,
+        metadata etc.) and recursively parse and add the node's
+        children. When complete, the token will be the token immediately
+        following the end of the node or tree statement if this is the root
+        node, i.e. the token *following* the closing parenthesis of the node or
+        the semi-colon terminating a tree statement.
+        """
+        current_node_comments = nexus_tokenizer.pull_captured_comments()
+        if nexus_tokenizer.current_token == "(":
+            # self._parenthesis_nesting_level += 1 # handled by calling code
+            nexus_tokenizer.require_next_token()
+            node_created = False
+            while True:
+                if nexus_tokenizer.current_token == ",":
+                    if not node_created: #184
+                        # no node has been created yet: ',' designates a
+                        # preceding blank node
+                        new_node = tree.node_factory()
+                        nexusprocessing.process_comments_for_item(item=new_node,
+                                item_comments=nexus_tokenizer.pull_captured_comments(),
+                                extract_comment_metadata=self.extract_comment_metadata)
+                        self._finish_node(new_node)
+                        current_node.add_child(new_node)
+                        ## node_created = True # do not flag node as created to allow for an extra node to be created in the event of (..,)
+                    nexus_tokenizer.require_next_token()
+                    while nexus_tokenizer.current_token == ",": #192
+                        # another blank node
+                        new_node = tree.node_factory()
+                        nexusprocessing.process_comments_for_item(item=new_node,
+                                item_comments=nexus_tokenizer.pull_captured_comments(),
+                                extract_comment_metadata=self.extract_comment_metadata)
+                        self._finish_node(new_node)
+                        current_node.add_child(new_node)
+                        # node_created = True; # do not flag node as created: extra node needed in the event of (..,)
+                        nexus_tokenizer.require_next_token()
+                    if not node_created and nexus_tokenizer.current_token == ")": #200
+                        # end of node
+                        new_node = tree.node_factory();
+                        nexusprocessing.process_comments_for_item(item=new_node,
+                                item_comments=nexus_tokenizer.pull_captured_comments(),
+                                extract_comment_metadata=self.extract_comment_metadata)
+                        self._finish_node(new_node)
+                        current_node.add_child(new_node)
+                        node_created = True;
+                elif nexus_tokenizer.current_token == ")": #206
+                    # end of child nodes
+                    self._parenthesis_nesting_level -= 1
+                    nexus_tokenizer.require_next_token()
+                    break
+                else: #210
+                    # assume child nodes: a leaf node (if a label) or
+                    # internal (if a parenthesis)
+                    if nexus_tokenizer.current_token == "(":
+                        self._parenthesis_nesting_level += 1
+                        is_new_internal_node = True
+                    else:
+                        is_new_internal_node = False
+                    new_node = tree.node_factory();
+                    nexusprocessing.process_comments_for_item(item=new_node,
+                            item_comments=nexus_tokenizer.pull_captured_comments(),
+                            extract_comment_metadata=self.extract_comment_metadata)
+                    self._parse_tree_node_description(
+                            nexus_tokenizer=nexus_tokenizer,
+                            tree=tree,
+                            current_node=new_node,
+                            taxon_symbol_map_fn=taxon_symbol_map_fn,
+                            is_internal_node=is_new_internal_node,
+                            )
+                    current_node.add_child(new_node);
+                    node_created = True;
+        label_parsed = False
+        self._tree_statement_complete = False
+        if is_internal_node is None:
+            # Initial call using ``seed_node`` does not set ``is_internal_node`` to
+            # `True` or `False`, explicitly, but rather `None`. If this is the
+            # case, the rest of the tree has be constructed, and we simply look
+            # at whether there are children or not to determine if it is an
+            # internal node. This approach allows for a single-tip tree.
+            if current_node._child_nodes:
+                is_internal_node = True
+        if current_node_comments is None:
+            current_node_comments = []
+        while True:
+            cc = nexus_tokenizer.pull_captured_comments()
+            if cc is not None:
+                current_node_comments.extend(cc)
+            if nexus_tokenizer.current_token == ":": #246
+                nexus_tokenizer.require_next_token()
+                if not self.suppress_edge_lengths:
+                    try:
+                        edge_length = self.edge_length_type(nexus_tokenizer.current_token)
+                    except ValueError:
+                        raise NewickReader.NewickReaderMalformedStatementError(
+                                message="Invalid edge length: '{}'".format(nexus_tokenizer.current_token),
+                                line_num=nexus_tokenizer.token_line_num,
+                                col_num=nexus_tokenizer.token_column_num,
+                                stream=nexus_tokenizer.src)
+                    current_node.edge.length = edge_length
+                try:
+                    nexus_tokenizer.require_next_token()
+                except tokenizer.Tokenizer.UnexpectedEndOfStreamError as e:
+                    message = e.message + ". (Perhaps the terminating semicolon for the tree statement is missing? If so, add a semicolon to the tree statement or specify 'terminating_semicolon_required=False' to allow for missing semicolons)"
+                    raise tokenizer.Tokenizer.UnexpectedEndOfStreamError(
+                            message=message,
+                            line_num=e.line_num,
+                            col_num=e.col_num,
+                            stream=e.stream)
+
+            elif nexus_tokenizer.current_token == ")": #253
+                # closing of parent token
+                # self._parenthesis_nesting_level -= 1 # handled by calling code
+                nexusprocessing.process_comments_for_item(item=current_node,
+                        item_comments=current_node_comments,
+                        extract_comment_metadata=self.extract_comment_metadata)
+                self._finish_node(current_node)
+                return current_node
+            elif nexus_tokenizer.current_token == ";": #256
+                # end of tree statement
+                self._tree_statement_complete = True
+                nexus_tokenizer.next_token()
+                break
+            elif nexus_tokenizer.current_token == ",": #260
+                # end of this node
+                nexusprocessing.process_comments_for_item(item=current_node,
+                            item_comments=current_node_comments,
+                            extract_comment_metadata=self.extract_comment_metadata)
+                self._finish_node(current_node)
+                return current_node
+            elif nexus_tokenizer.current_token == "(": #263
+                # start of another node or tree without finishing this
+                # node
+                self._parenthesis_nesting_level += 1
+                raise NewickReader.NewickReaderMalformedStatementError(
+                        message="Malformed tree statement",
+                        line_num=nexus_tokenizer.token_line_num,
+                        col_num=nexus_tokenizer.token_column_num,
+                        stream=nexus_tokenizer.src)
+            else: #267
+                # label
+                if label_parsed: #269
+                    raise NewickReader.NewickReaderMalformedStatementError(
+                            message="Expecting ':', ')', ',' or ';' after reading label but found '{}'".format(nexus_tokenizer.current_token),
+                            line_num=nexus_tokenizer.token_line_num,
+                            col_num=nexus_tokenizer.token_column_num,
+                            stream=nexus_tokenizer.src)
+                else:
+                    # Label
+                    label = nexus_tokenizer.current_token
+                    if ( (is_internal_node and self.suppress_internal_node_taxa)
+                            or ((not is_internal_node) and self.suppress_leaf_node_taxa) ):
+                        current_node.label = label
+                    else:
+                        node_taxon = taxon_symbol_map_fn(label)
+                        if node_taxon in self._seen_taxa:
+                            raise NewickReader.NewickReaderDuplicateTaxonError(
+                                    message=node_taxon.label,
+                                    line_num=nexus_tokenizer.token_line_num,
+                                    col_num=nexus_tokenizer.token_column_num,
+                                    stream=nexus_tokenizer.src)
+                        self._seen_taxa.add(node_taxon)
+                        current_node.taxon = node_taxon
+                    label_parsed = True;
+                    # nexus_tokenizer.require_next_token()
+                    try:
+                        nexus_tokenizer.require_next_token()
+                    except tokenizer.Tokenizer.UnexpectedEndOfStreamError:
+                        if self.terminating_semicolon_required:
+                            raise
+                        else:
+                            break
+        ## if we are here, we have reached the end of the tree
+        if self._parenthesis_nesting_level != 0:
+            raise NewickReader.NewickReaderMalformedStatementError(
+                    message="Unbalanced parentheses at tree statement termination: balance index = {}".format(self._parenthesis_nesting_level),
+                    line_num=nexus_tokenizer.token_line_num,
+                    col_num=nexus_tokenizer.token_column_num,
+                    stream=nexus_tokenizer.src)
+        nexusprocessing.process_comments_for_item(item=current_node,
+                item_comments=current_node_comments,
+                extract_comment_metadata=self.extract_comment_metadata)
+        self._finish_node(current_node)
+        return current_node
+
+    def _finish_node(self, node):
+        if self.finish_node_fn is not None:
+            self.finish_node_fn(node)
diff --git a/dendropy/dataio/newickwriter.py b/dendropy/dataio/newickwriter.py
new file mode 100644
index 0000000..318edfd
--- /dev/null
+++ b/dendropy/dataio/newickwriter.py
@@ -0,0 +1,420 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Writing of Newick-format tree to a stream.
+"""
+
+import re
+import warnings
+try:
+    from StringIO import StringIO # Python 2 legacy support: StringIO in this module is the one needed (not io)
+except ImportError:
+    from io import StringIO # Python 3
+from dendropy.utility import error
+from dendropy.dataio import tokenizer
+from dendropy.dataio import nexusprocessing
+from dendropy.dataio import ioservice
+
+##############################################################################
+## NewickReader
+
+class NewickWriter(ioservice.DataWriter):
+    """
+    Formatter for Newick data.
+    """
+
+    def __init__(self, **kwargs):
+        """
+
+        Keyword Arguments
+        -----------------
+        suppress_leaf_taxon_labels : boolean, default: `False`
+            If `True`, then taxon labels will not be rendered for leaves.
+            Default is `False`: render leaf taxon labels. See notes below for
+            details.
+        suppress_leaf_node_labels : boolean, default: `True`
+            If `False`, then node labels (if available) will be printed for
+            leaves. Defaults to `True`: do not render leaf node labels. See
+            notes below for details.
+        suppress_internal_taxon_labels : boolean, default: `False`
+            If `True`, then taxon labels will not be printed for internal
+            nodes. Default is `False`: print taxon labels for internal nodes.
+            See notes below for details.
+        suppress_internal_node_labels : boolean, default: `False`
+            If `True`, then node labels will not be printed for internal nodes.
+            Default is `False`: print node labels for internal nodes. See notes
+            below for details.
+        suppress_rooting : boolean, default: `False`
+            If `True`, will not write rooting token ('[&R]' or '[&U]').
+            Default is `False`: rooting token will be written.
+        suppress_edge_lengths : boolean, default: `False`
+            If `True`, will not write edge lengths. Default is `False`: edge
+            lengths will be written.
+        unquoted_underscores : boolean, default: `False`
+            If `True`, labels with underscores will not be quoted, which will
+            mean that they will be interpreted as spaces if read again ("soft"
+            underscores).  If `False`, then labels with underscores
+            will be quoted, resulting in "hard" underscores.  Default is
+            `False`.
+        preserve_spaces : boolean, default: `False`
+            If `True`, spaces will not be replaced with underscores in labels
+            (which means any labels containing spaces will have to be quoted).
+            Default is `False`: spaces will be converted to underscores.
+            False.
+        store_tree_weights : boolean, default: `False`
+            If `True`, tree weights are written. Default is `False`: tree
+            weights will not be written.
+        taxon_token_map : boolean or dict or `None`, default: `None`.
+            If not `False` or `None`, a "TRANSLATE" statement will be written
+            and referenced in tree statements (instead of using the taxon
+            labels). If `True`, then a default translate statement will
+            be used, with tokens given by the taxon indexes. If a dictionary is
+            given, then the keys should be |Taxon| objects and the
+            values should be the token (strings).
+        suppress_annotations : boolean, default: `True`
+            If `False`, metadata annotations will be written out as special
+            comments. Defaults to `True`: metadata annotations will be ignored.
+        annotations_as_nhx : boolean, default: `False`
+            If `True`, and if ``suppress_annotations`` is `False`, will write
+            annotation as NHX statements. Default is `False`: annotations
+            will not be written as NHX statements.
+        suppress_item_comments : boolean, default: `True`
+            If `False`, any additional comments associated with trees, nodes,
+            edges, etc. will be written. Default is `True`: comments will be
+            ignored.
+        node_label_element_separator : string, default: ' '
+            If both ``suppress_leaf_taxon_labels`` and
+            ``suppress_leaf_node_labels`` are `False`, then this will be the
+            string used to join them. Defaults to ' ' (space).
+        node_label_compose_fn : function object or `None`, default: `None`
+            If not `None`, should be a function that takes a |Node|
+            object as an argument and returns the string to be used to
+            represent the node in the tree statement. The return value from
+            this function is used unconditionally to print a node
+            representation in a tree statement, by-passing the default
+            labelling function, ignoring ``suppress_leaf_taxon_labels``,
+            ``suppress_leaf_node_labels=True``, ``suppress_internal_taxon_labels``,
+            ``suppress_internal_node_labels``, etc. Defaults to `None`.
+        edge_label_compose_fn : function object or `None`, default: `None`
+            If not `None`, should be a function that takes an Edge object as
+            an argument, and returns the string to be used to represent the
+            edge length in the tree statement.
+        real_value_format_specifier : string, default: ''
+            Format specification for real/float values. Will be applied to edge
+            lengths (if ``edge_label_compose_fn`` is not given) as well as
+            annotations. The format specifier should be given in Python's
+            string format specification mini-language. E.g. ".8f", ".4E",
+            "8.4f".
+        ignore_unrecognized_keyword_arguments : boolean, default: `False`
+            If `True`, then unsupported or unrecognized keyword arguments will
+            not result in an error. Default is `False`: unsupported keyword
+            arguments will result in an error.
+
+        Notes
+        -----
+
+        DendroPy distinguishes between *taxon* labels and *node*
+        labels.
+
+        In a Newick string, however, no such distinction is possible, and
+        any one node can only be rendered with a single token or symbol. Thus,
+        if there is more than one source of label available for a particular
+        node (e.g., if both ``suppress_leaf_taxon_labels`` and
+        ``suppress_leaf_node_labels`` are `False`, and a particular leaf
+        node has both a taxon *and* a label associated with it), then
+        the node symbol will be rendered as concatenation of the unsuppressed
+        candidate labels, with each candidate label separated by the value
+        given in ``node_label_element_separator``. Note that this concatenated
+        label requires special handling when being re-read to avoid being
+        interpreted as the operational taxonomic unit concept label in
+        its entirety. These defaults can all be overridden using the
+        various keywords, or a custom label can be composed for the
+        node by passing an appropriate function object via the
+        ``node_label_compose_fn`` argument.
+
+        Note that, in typical Newick usage, labels of leaf nodes represent
+        operational taxonomic unit concepts, and thus the default setting to
+        render leaf taxon labels but suppress leaf node labels. Internal node
+        labels, on the other hand, are typically used both to represent
+        operational taxonomic unit concepts (e.g., ancestral taxa) as well as
+        other concepts (e.g., support or geographic range), and thus the
+        default internal node rendering is to not to suppress either the taxon
+        labels or the node labels.
+
+        """
+        ioservice.DataWriter.__init__(self)
+        legacy = {
+                "internal_labels": "Use 'suppress_internal_taxon_labels' instead",
+                "write_rooting": "Use 'suppress_rooting' instead",
+                "quote_undescores" : "Use 'unquoted_underscores' instead",
+                "annotations_as_comments": "Use 'suppress_annotations' instead"
+                }
+        for kw in legacy:
+            if kw in kwargs:
+                raise TypeError("'{}' is no longer supported: {}".format(kw, legacy[kw]))
+        ioservice.DataWriter.__init__(self, **kwargs)
+        self.suppress_leaf_taxon_labels = kwargs.pop("suppress_leaf_taxon_labels", False)
+        self.suppress_leaf_node_labels = kwargs.pop("suppress_leaf_node_labels", True)
+        self.suppress_internal_taxon_labels = kwargs.pop("suppress_internal_taxon_labels", False)
+        # self.suppress_internal_taxon_labels = not kwargs.pop("internal_labels", not sef.suppress_internal_taxon_labels) # legacy
+        self.suppress_internal_node_labels = kwargs.pop("suppress_internal_node_labels", False)
+        # self.suppress_internal_node_labels = not kwargs.pop("internal_labels", not self.suppress_internal_node_labels) # legacy
+        self.suppress_rooting = kwargs.pop("suppress_rooting", False)
+        # self.suppress_rooting = not kwargs.pop("write_rooting", not self.suppress_rooting) # legacy
+        self.suppress_edge_lengths = kwargs.pop("suppress_edge_lengths", False)
+        # self.suppress_edge_lengths = not kwargs.pop("edge_lengths", not self.suppress_edge_lengths) # legacy
+        self.unquoted_underscores = kwargs.pop("unquoted_underscores", False)
+        # self.unquoted_underscores = not kwargs.pop('quote_underscores', not self.unquoted_underscores) # legacy
+        self.preserve_spaces = kwargs.pop("preserve_spaces", False)
+        self.store_tree_weights = kwargs.pop("store_tree_weights", False)
+        self.taxon_token_map = kwargs.pop("taxon_token_map", {})
+        self.suppress_annotations = kwargs.pop("suppress_annotations", True)
+        # self.suppress_annotations = not kwargs.pop("annotations_as_comments", not self.suppress_annotations) # legacy
+        self.annotations_as_nhx = kwargs.pop("annotations_as_nhx", False)
+        self.suppress_item_comments = kwargs.pop("suppress_item_comments", True)
+        self.suppress_item_comments = not kwargs.pop("write_item_comments", not self.suppress_item_comments)
+        self.node_label_element_separator = kwargs.pop("node_label_element_separator", ' ')
+        self.node_label_compose_fn = kwargs.pop("node_label_compose_fn", None)
+        self.edge_label_compose_fn = kwargs.pop("edge_label_compose_fn", None)
+        self._real_value_format_specifier = ""
+        self._real_value_formatter = None
+        self.real_value_format_specifier = kwargs.pop("real_value_format_specifier", self._real_value_format_specifier)
+        if self.edge_label_compose_fn is None:
+            self.edge_label_compose_fn = self._format_edge_length
+        self.check_for_unused_keyword_arguments(kwargs)
+
+    def _get_taxon_tree_token(self, taxon):
+        if self.taxon_token_map is None:
+            self.taxon_token_map = {}
+        try:
+            return self.taxon_token_map[taxon]
+        except KeyError:
+            t = str(taxon.label)
+            self.taxon_token_map[taxon] = t
+            return t
+
+    def _get_real_value_format_specifier(self):
+        return self._real_value_format_specifier
+    def _set_real_value_format_specifier(self, f):
+        if f is None:
+            f = ""
+        self._real_value_format_specifier = f
+        s = "{:" + self._real_value_format_specifier + "}"
+        self._real_value_formatter = s.format
+    real_value_format_specifier = property(_get_real_value_format_specifier, _set_real_value_format_specifier)
+
+    def _format_edge_length(self, edge):
+        """
+        Note: instance method to allow overriding.
+        """
+        return self._real_value_formatter(edge.length)
+
+    def _write(self,
+            stream,
+            taxon_namespaces=None,
+            tree_lists=None,
+            char_matrices=None,
+            global_annotations_target=None):
+        for tree_list in tree_lists:
+            if (self.attached_taxon_namespace is not None
+                    and tree_list.taxon_namespace is not self.attached_taxon_namespace):
+                continue
+            self._write_tree_list(stream, tree_list)
+
+    def _write_tree_list(self, stream, tree_list):
+        """
+        Writes a |TreeList| in Newick schema to ``stream``.
+        """
+        for tree in tree_list:
+            self._write_tree(stream, tree)
+            stream.write("\n")
+        # In Newick format, no clear way to distinguish between
+        # annotations/comments associated with tree collection and
+        # annotations/comments associated with first tree. So we place them at
+        # *end* of document.
+        if (not self.suppress_annotations) and (hasattr(tree_list, "_annotations")):
+            annotation_comments = nexusprocessing.format_item_annotations_as_comments(tree_list,
+                    nhx=self.annotations_as_nhx,
+                    real_value_format_specifier=self.real_value_format_specifier,
+                    )
+        else:
+            annotation_comments = ""
+        treelist_comments = self._compose_comment_string(tree_list)
+        stream.write("{}{}".format(
+                annotation_comments,
+                treelist_comments))
+
+    def _write_tree(self, stream, tree):
+        """
+        Composes and writes ``tree`` to ``stream``.
+        """
+        if tree.rooting_state_is_undefined or self.suppress_rooting:
+            rooting = ""
+        elif tree.is_rooted:
+            rooting = "[&R] "
+        elif not tree.is_rooted:
+            rooting = "[&U] "
+        else:
+            rooting = ""
+        if self.store_tree_weights and tree.weight is not None:
+            weight = "[&W {}] ".format(tree.weight)
+        else:
+            weight = ""
+        if not self.suppress_annotations:
+            annotation_comments = nexusprocessing.format_item_annotations_as_comments(tree,
+                    nhx=self.annotations_as_nhx,
+                    real_value_format_specifier=self.real_value_format_specifier,
+                    )
+        else:
+            annotation_comments = ""
+        tree_comments = self._compose_comment_string(tree)
+        stream.write("{}{}{}{}".format(
+                rooting,
+                weight,
+                annotation_comments,
+                tree_comments,
+                ))
+        tree.apply(
+                before_fn=lambda x: self._write_node_open(x, stream),
+                after_fn=lambda x: self._write_node_close(x, stream),
+                leaf_fn=lambda x: self._write_leaf(x, stream),
+                )
+        stream.write(";")
+
+    def _write_node_open(self, node, out):
+        if node._parent_node is None or node._parent_node._child_nodes[0] is node:
+            out.write("(")
+        else:
+            out.write(",(")
+
+    def _write_leaf(self, node, out):
+        if not (node._parent_node is None or node._parent_node._child_nodes[0] is node):
+            out.write(",")
+        self._write_node_body(node, out)
+
+    def _write_node_close(self, node, out):
+        out.write(")")
+        self._write_node_body(node, out)
+
+    def _write_node_body(self, node, out):
+        out.write(self._render_node_tag(node))
+        if node.edge and node.edge.length != None and not self.suppress_edge_lengths:
+            out.write(":{}".format(self.edge_label_compose_fn(node.edge)))
+        if not self.suppress_annotations:
+            node_annotation_comments = nexusprocessing.format_item_annotations_as_comments(node,
+                    nhx=self.annotations_as_nhx,
+                    real_value_format_specifier=self.real_value_format_specifier)
+            out.write(node_annotation_comments)
+            edge_annotation_comments = nexusprocessing.format_item_annotations_as_comments(node.edge,
+                    nhx=self.annotations_as_nhx,
+                    real_value_format_specifier=self.real_value_format_specifier)
+            out.write(edge_annotation_comments)
+        out.write(self._compose_comment_string(node))
+        out.write(self._compose_comment_string(node.edge))
+
+    def _compose_comment_string(self, item):
+        if not self.suppress_item_comments and item.comments:
+            item_comments = []
+            if isinstance(item.comments, str):
+                item.comments = [item.comments]
+            for comment in item.comments:
+                item_comments.append("[{}]".format(comment))
+            item_comment_str = "".join(item_comments)
+        else:
+            item_comment_str = ""
+        return item_comment_str
+
+    def _render_node_tag(self, node):
+        """
+        Based on current settings, the attributes of a node, and
+        whether or not the node is a leaf, returns an appropriate tag.
+        """
+        tag = None
+        if self.node_label_compose_fn:
+            tag = self.node_label_compose_fn(node)
+        else:
+            tag_parts = []
+            is_leaf = len(node.child_nodes()) == 0
+            if is_leaf:
+                if hasattr(node, 'taxon') \
+                        and node.taxon \
+                        and node.taxon.label is not None \
+                        and not self.suppress_leaf_taxon_labels:
+                    tag_parts.append(self._get_taxon_tree_token(node.taxon))
+                if hasattr(node, 'label') \
+                        and node.label \
+                        and node.label is not None \
+                        and not self.suppress_leaf_node_labels:
+                    tag_parts.append(str(node.label))
+                if len(tag_parts) > 0:
+                    tag = self.node_label_element_separator.join(tag_parts)
+                else:
+                    return "" # anonymous leaf
+            else:
+                if hasattr(node, 'taxon') \
+                        and node.taxon \
+                        and node.taxon.label is not None \
+                        and not self.suppress_internal_taxon_labels:
+                    tag_parts.append(self._get_taxon_tree_token(node.taxon))
+                if hasattr(node, 'label') \
+                        and node.label \
+                        and node.label is not None \
+                        and not self.suppress_internal_node_labels:
+                    tag_parts.append(str(node.label))
+                if len(tag_parts) > 0:
+                    tag = self.node_label_element_separator.join(tag_parts)
+                else:
+                    return ""
+        if tag:
+            tag = nexusprocessing.escape_nexus_token(tag,
+                    preserve_spaces=self.preserve_spaces,
+                    quote_underscores=not self.unquoted_underscores)
+            return tag
+        else:
+            return ""
+
+    # def _compose_node(self, node):
+    #     """
+    #     Given a DendroPy Node, this returns the Node as a Newick
+    #     statement according to the class-defined formatting rules.
+    #     """
+    #     child_nodes = node.child_nodes()
+    #     if child_nodes:
+    #         subnodes = [self._compose_node(child) for child in child_nodes]
+    #         statement = '(' + ','.join(subnodes) + ')'
+    #         if not (self.suppress_internal_taxon_labels and self.suppress_internal_node_labels):
+    #             statement = statement + self._render_node_tag(node)
+    #         if node.edge and node.edge.length != None and not self.suppress_edge_lengths:
+    #             statement =  "{}:{}".format(statement, self.edge_label_compose_fn(node.edge))
+    #     else:
+    #         statement = self._render_node_tag(node)
+    #         if node.edge and node.edge.length != None and not self.suppress_edge_lengths:
+    #             statement =  "{}:{}".format(statement, self.edge_label_compose_fn(node.edge))
+    #     if not self.suppress_annotations:
+    #         node_annotation_comments = nexusprocessing.format_item_annotations_as_comments(node,
+    #                 nhx=self.annotations_as_nhx,
+    #                 real_value_format_specifier=self.real_value_format_specifier)
+    #         edge_annotation_comments = nexusprocessing.format_item_annotations_as_comments(node.edge,
+    #                 nhx=self.annotations_as_nhx,
+    #                 real_value_format_specifier=self.real_value_format_specifier)
+    #         statement = statement + node_annotation_comments + edge_annotation_comments
+    #     edge_comment_str = self._compose_comment_string(node.edge)
+    #     node_comment_str = self._compose_comment_string(node)
+    #     statement = statement + node_comment_str + edge_comment_str
+    #     return statement
diff --git a/dendropy/dataio/newickyielder.py b/dendropy/dataio/newickyielder.py
new file mode 100644
index 0000000..d71e538
--- /dev/null
+++ b/dendropy/dataio/newickyielder.py
@@ -0,0 +1,75 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Implementation of NEWICK-schema tree iterator.
+"""
+
+import sys
+from dendropy.dataio import ioservice
+from dendropy.dataio import newickreader
+from dendropy.dataio import nexusprocessing
+
+class NewickTreeDataYielder(ioservice.TreeDataYielder):
+
+    def __init__(self,
+            files=None,
+            taxon_namespace=None,
+            tree_type=None,
+            **kwargs):
+        """
+
+        Parameters
+        ----------
+        files : iterable of sources
+            Iterable of sources, which can either be strings specifying file
+            paths or file-like objects open for reading. If a source element is
+            a string (``isinstance(i,str) == True``), then it is assumed to be
+            a path to a file. Otherwise, the source is assumed to be a file-like
+            object.
+        taxon_namespace : |TaxonNamespace| instance
+            The operational taxonomic unit concept namespace to use to manage
+            taxon definitions.
+        \*\*kwargs : keyword arguments
+            These will be passed directly to the base `newickreader.NexusReader`
+            class. See `newickreader.NexusReader` for details.
+        """
+        ioservice.TreeDataYielder.__init__(self,
+                files=files,
+                taxon_namespace=taxon_namespace,
+                tree_type=tree_type)
+        self.newick_reader = newickreader.NewickReader(**kwargs)
+
+    ###########################################################################
+    ## Implementation of DataYielder interface
+
+    def _yield_items_from_stream(self, stream):
+        nexus_tokenizer = nexusprocessing.NexusTokenizer(stream,
+                preserve_unquoted_underscores=self.newick_reader.preserve_unquoted_underscores)
+        taxon_symbol_mapper = nexusprocessing.NexusTaxonSymbolMapper(
+                taxon_namespace=self.attached_taxon_namespace,
+                enable_lookup_by_taxon_number=True,
+                case_sensitive=self.newick_reader.case_sensitive_taxon_labels)
+        while True:
+            tree = self.newick_reader._parse_tree_statement(
+                    nexus_tokenizer=nexus_tokenizer,
+                    tree_factory=self.tree_factory,
+                    taxon_symbol_map_fn=taxon_symbol_mapper.require_taxon_for_symbol)
+            if tree is None:
+                break
+            yield tree
diff --git a/dendropy/dataio/nexmlreader.py b/dendropy/dataio/nexmlreader.py
new file mode 100644
index 0000000..81afe0d
--- /dev/null
+++ b/dendropy/dataio/nexmlreader.py
@@ -0,0 +1,904 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Deserialization of NeXML-formatted data.
+"""
+
+import re
+import collections
+from dendropy.dataio import ioservice
+from dendropy.utility import container
+from dendropy.utility import textprocessing
+from dendropy.utility import error
+from dendropy.dataio import xmlprocessing
+
+SUPPORTED_NEXML_NAMESPACES = ('http://www.nexml.org/1.0', 'http://www.nexml.org/2009')
+
+def _from_nexml_tree_length_type(type_attr):
+    """
+    Given an attribute string read from a nexml tree element, returns
+    the python class of the edge length attribute.
+    """
+    if type_attr == "nex:IntTree":
+        return int
+    else:
+        return float
+
+class _AnnotationParser(object):
+
+    def __init__(self, namespace_registry=None):
+        self._namespace_registry = namespace_registry
+
+    def _parse_annotations(self, annotated, nxelement):
+        attrib = nxelement.attrib
+        xml_type = nxelement.parse_type()
+        if xml_type == 'LiteralMeta':
+            value = attrib.get("content", None)
+            key = attrib.get("property", None)
+            annotate_as_reference = False
+        else:
+            value = attrib.get("href", None)
+            key = attrib.get("rel", None)
+            annotate_as_reference = True
+        datatype_hint = attrib.get("datatype", None)
+        if key is None:
+            raise ValueError("Could not determine property/rel for meta element: %s\n%s" % (nxelement, attrib))
+        name_prefix, name = textprocessing.parse_curie_standard_qualified_name(key)
+        try:
+            namespace = self._namespace_registry.prefix_namespace_map[name_prefix]
+        except KeyError:
+            raise ValueError("CURIE-standard prefix '%s' not defined in document: %s" % (name_prefix, self._namespace_registry))
+        if datatype_hint is not None:
+            dt_prefix, dt = textprocessing.parse_curie_standard_qualified_name(datatype_hint)
+            if dt_prefix is not None:
+                try:
+                    dt_namespace = self._namespace_registry.prefix_namespace_map[dt_prefix]
+                except KeyError:
+                    raise ValueError("CURIE-standard prefix '%s' not defined in document: %s" % (dt_prefix, self._namespace_registry))
+                if dt_namespace.startswith("http://www.w3.org/2001/XMLSchema"):
+                    value = self._coerce_to_xml_schema_type(value, dt)
+                elif dt_namespace.startswith("http://www.nexml.org/1.0") or dt_namespace.startswith("http://www.nexml.org/2009"):
+                    value = self._coerce_to_nexml_type(value, dt)
+                elif dt_namespace.startswith("http://dendropy.org") or dt_namespace.startswith("http://packages.python.org/DendroPy"):
+                    value = self._coerce_to_dendropy_type(value, dt)
+        a = annotated.annotations.add_new(
+                name=name,
+                value=value,
+                datatype_hint=datatype_hint,
+                name_prefix=name_prefix,
+                namespace=namespace,
+                name_is_prefixed=False,
+                annotate_as_reference=annotate_as_reference)
+        top_annotations = [i for i in nxelement.findall_annotations()]
+        for annotation in top_annotations:
+            self._parse_annotations(a, annotation)
+
+    def _coerce_to_xml_schema_type(self, value, type_name):
+        if type_name in ("boolean"):
+            if value.lower() in ("1", "t", "true", "y", "yes",):
+                return True
+            else:
+                return False
+        elif type_name in ("decimal", "float", "double"):
+            coerce_type = float
+        elif type_name in ("byte", "int", "integer", "long", "negativeInteger", "nonNegativeInteger", "nonPositiveInteger", "short", "unsignedInt", "unsignedLong", "unsignedShort"):
+            coerce_type = int
+        else:
+            return value
+        return self._coerce_type(value, coerce_type)
+
+    def _coerce_to_dendropy_type(self, value, type_name):
+        if type_name in ("decimalRange"):
+            try:
+                value = [float(v) for v in value.split()]
+            except KeyError:
+                pass
+        return value
+
+    def _coerce_to_nexml_type(self, value, type_name):
+        if type_name in ("ContinuousSeq"):
+            try:
+                value = [float(v) for v in value.split()]
+            except KeyError:
+                pass
+        return value
+
+    def _coerce_type(self, value, to_type):
+        try:
+            value = to_type(value)
+        except ValueError:
+            pass
+        return value
+
+
+class NexmlElement(xmlprocessing.XmlElement):
+
+    def __init__(self, element, default_namespace=None):
+        # if default_namespace is None:
+        #     default_namespace = NexmlReader.DEFAULT_NEXML_NAMESPACE
+        # else:
+        #     default_namespace = default_namespace
+        xmlprocessing.XmlElement.__init__(self,
+                element=element,
+                default_namespace=default_namespace)
+        self.type_parse_pattern = re.compile(r"([A-Za-z0-9]+?):(.+)")
+
+    ## Annotations ##
+
+    def findall_annotations(self):
+        return self.namespaced_findall("meta")
+
+    ## TaxonSet Elements ##
+
+    def iter_otus(self):
+        return self.namespaced_getiterator("otus")
+
+    def findall_otu(self):
+        return self.namespaced_findall("otu")
+
+    ## Characters ##
+
+    def iter_characters(self):
+        return self.namespaced_getiterator("characters")
+
+    def find_char_format(self):
+        return self.namespaced_find("format")
+
+    def find_char_matrix(self):
+        return self.namespaced_find("matrix")
+
+    def findall_multistate_member(self):
+        return self.namespaced_findall("member")
+
+    def findall_polymorphic_state_set(self):
+        return self.namespaced_findall("polymorphic_state_set")
+
+    def findall_uncertain_state_set(self):
+        return self.namespaced_findall("uncertain_state_set")
+
+    def findall_char_state(self):
+        return self.namespaced_findall("state")
+
+    def findall_char_states(self):
+        return self.namespaced_findall("states")
+
+    def findall_char(self):
+        return self.namespaced_findall("char")
+
+    def findall_char_row(self):
+        return self.namespaced_findall("row")
+
+    def findall_char_cell(self):
+        return self.namespaced_findall("cell")
+
+    def find_char_seq(self):
+        return self.namespaced_findtext("seq")
+
+    ## Trees ##
+
+    def iter_trees(self):
+        return self.namespaced_getiterator("trees")
+
+    def findall_tree(self):
+        return self.namespaced_findall("tree")
+
+    def findall_node(self):
+        return self.namespaced_findall("node")
+
+    def findall_edge(self):
+        return self.namespaced_findall("edge")
+
+    def find_rootedge(self):
+        return self.namespaced_find("rootedge")
+
+    def parse_type(self):
+        type_value = self._element.get('{http://www.w3.org/2001/XMLSchema-instance}type', None)
+        if type_value is None:
+            raise ValueError("Type not specified for element '%s'" % self._element.get("id", None))
+        m = self.type_parse_pattern.match(type_value)
+        if m:
+            return m.groups()[1]
+        else:
+            return type_value
+
+class NexmlReader(ioservice.DataReader, _AnnotationParser):
+    "Implements thinterface for handling NEXML files."
+
+    ###########################################################################
+    ## Life-cycle and Setup
+
+    DEFAULT_NEXML_NAMESPACE = "http://www.nexml.org/2009"
+
+    def __init__(self, **kwargs):
+        """
+        Keyword Arguments
+        -----------------
+
+        default_namespace : str
+            Default namespace to use for elements.
+        case_sensitive_taxon_labels: boolean, default: `False`
+            If `True`, then case is respected when matching taxon names.
+            Default is `False`: case is ignored.
+        ignore_unrecognized_keyword_arguments : boolean, default: `False`
+            If `True`, then unsupported or unrecognized keyword arguments will
+            not result in an error. Default is `False`: unsupported keyword
+            arguments will result in an error.
+
+        """
+        _AnnotationParser.__init__(self)
+        ioservice.DataReader.__init__(self)
+        self.default_namespace = kwargs.pop("default_namespace", NexmlReader.DEFAULT_NEXML_NAMESPACE)
+        self.case_sensitive_taxon_labels = kwargs.pop('case_sensitive_taxon_labels', False)
+
+        ### NOTE: following are not actually supported.
+        ### They are here because some tests automatically add include them on calls.
+        ### TODO: remove these keywords from generic tests
+        self.suppress_internal_node_taxa = kwargs.pop("suppress_internal_node_taxa", True)
+        self.suppress_leaf_node_taxa = kwargs.pop("suppress_external_node_taxa", False) # legacy (will be deprecated)
+        self.suppress_leaf_node_taxa = kwargs.pop("suppress_leaf_node_taxa", self.suppress_leaf_node_taxa)
+        self.data_type = kwargs.pop("data_type", None)
+
+        self.check_for_unused_keyword_arguments(kwargs)
+
+        # Set up parsing meta-variables
+        self._id_taxon_namespace_map = {}
+        self._id_taxon_map = {}
+        self._taxon_namespace_factory = None
+        self._tree_list_factory = None
+        self._char_matrix_factory = None
+        self._state_alphabet_factory = None
+        self._global_annotations_target = None
+        self._taxon_namespaces = []
+        self._char_matrices = []
+        self._tree_lists = []
+        self._product = None
+
+    ###########################################################################
+    ## Reader Interface
+
+    def _read(self,
+            stream,
+            taxon_namespace_factory=None,
+            tree_list_factory=None,
+            char_matrix_factory=None,
+            state_alphabet_factory=None,
+            global_annotations_target=None):
+        xml_doc = xmlprocessing.XmlDocument(file_obj=stream,
+                subelement_factory=self._subelement_factory)
+        self._namespace_registry = xml_doc.namespace_registry
+        self._taxon_namespace_factory = taxon_namespace_factory
+        self._tree_list_factory = tree_list_factory
+        self._char_matrix_factory = char_matrix_factory
+        self._state_alphabet_factory = state_alphabet_factory
+        self._global_annotations_target = global_annotations_target
+        self._parse_document(xml_doc)
+        self._product = self.Product(
+                taxon_namespaces=self._taxon_namespaces,
+                tree_lists=self._tree_lists,
+                char_matrices=self._char_matrices)
+        return self._product
+
+    ###########################################################################
+    ## Support
+
+    def _subelement_factory(self, element):
+        return NexmlElement(element, default_namespace=self.default_namespace)
+
+    ###########################################################################
+    ## Data Management
+
+    def _new_taxon_namespace(self, label=None):
+        if self.attached_taxon_namespace is not None:
+            return self.attached_taxon_namespace
+        taxon_namespace = self._taxon_namespace_factory(label=label)
+        self._taxon_namespaces.append(taxon_namespace)
+        return taxon_namespace
+
+    def _new_char_matrix(self, data_type, taxon_namespace, label=None, **kwargs):
+        char_matrix = self._char_matrix_factory(
+                data_type,
+                taxon_namespace=taxon_namespace,
+                label=label,
+                **kwargs)
+        self._char_matrices.append(char_matrix)
+        return char_matrix
+
+    def _new_state_alphabet(self, *args, **kwargs):
+        return self._state_alphabet_factory(*args, **kwargs)
+
+    def _new_tree_list(self, taxon_namespace, label=None):
+        tree_list = self._tree_list_factory(
+                taxon_namespace=taxon_namespace,
+                label=label)
+        self._tree_lists.append(tree_list)
+        return tree_list
+
+    ###########################################################################
+    ## Parsers
+
+    ## Following methods are class-specific ###
+
+    def _parse_document(self, xml_doc):
+        xml_root = xml_doc.root
+        self._parse_taxon_namespaces(xml_root)
+        if self._char_matrix_factory is not None:
+            self._parse_char_matrices(xml_root)
+        if self._tree_list_factory is not None:
+            self._parse_tree_lists(xml_root)
+        if self._global_annotations_target is not None:
+            top_annotations = [i for i in xml_root.findall_annotations()]
+            for annotation in top_annotations:
+                self._parse_annotations(self._global_annotations_target, annotation)
+
+    def _parse_taxon_namespaces(self, xml_root):
+        for nxtaxa in xml_root.iter_otus():
+            taxon_namespace_label = nxtaxa.get('label', None)
+            taxon_namespace = self._new_taxon_namespace(label=taxon_namespace_label)
+            taxon_namespace_id = nxtaxa.get('id', id(taxon_namespace))
+            self._id_taxon_namespace_map[taxon_namespace_id] = taxon_namespace
+            annotations = [i for i in nxtaxa.findall_annotations()]
+            for annotation in annotations:
+                self._parse_annotations(taxon_namespace, annotation)
+            if self.case_sensitive_taxon_labels:
+                label_taxon_map = {}
+            else:
+                label_taxon_map = container.OrderedCaselessDict()
+            if self.attached_taxon_namespace is not None:
+                for t in taxon_namespace:
+                    label_taxon_map[t.label] = t
+            for idx, nxtaxon in enumerate(nxtaxa.findall_otu()):
+                taxon = None
+                taxon_label = nxtaxon.get('label', None)
+                taxon_oid = nxtaxon.get('id', id(nxtaxon))
+                if taxon_label is not None and self.attached_taxon_namespace is not None:
+                    # taxon = label_taxon_map.get_taxon(
+                    #         label=taxon_label,
+                    #         case_sensitive=self.case_sensitive_taxon_labels)
+                    try:
+                        taxon = label_taxon_map[taxon_label]
+                    except KeyError:
+                        taxon = None
+                if taxon is None:
+                    taxon = taxon_namespace.new_taxon(label=taxon_label)
+                annotations = [i for i in nxtaxon.findall_annotations()]
+                for annotation in annotations:
+                    self._parse_annotations(taxon, annotation)
+                self._id_taxon_map[(taxon_namespace_id, taxon_oid)] = taxon
+
+    def _parse_char_matrices(self, xml_root):
+        nxc = _NexmlCharBlockParser(self._namespace_registry,
+                self._id_taxon_namespace_map,
+                self._id_taxon_map,
+                self._new_char_matrix,
+                self._state_alphabet_factory)
+        for char_matrix_element in xml_root.iter_characters():
+            nxc.parse_char_matrix(char_matrix_element)
+
+    def _parse_tree_lists(self, xml_root):
+        for trees_idx, trees_element in enumerate(xml_root.iter_trees()):
+            self._parse_tree_list(trees_element, trees_idx, True)
+
+    def _parse_tree_list(self, nxtrees, trees_idx=None, add_to_tree_list=True):
+        trees_id = nxtrees.get('id', "Trees" + str(trees_idx))
+        trees_label = nxtrees.get('label', None)
+        otus_id = nxtrees.get('otus', None)
+        if otus_id is None:
+            raise Exception("Taxa block not specified for trees block '{}'".format(otus_id))
+        taxon_namespace = self._id_taxon_namespace_map.get(otus_id, None)
+        if not taxon_namespace:
+            raise Exception("Tree block '{}': Taxa block '{}' not found".format(trees_id, otus_id))
+        tree_list = self._new_tree_list(
+                label=trees_label,
+                taxon_namespace=taxon_namespace)
+        annotations = [i for i in nxtrees.findall_annotations()]
+        for annotation in annotations:
+            self._parse_annotations(tree_list, annotation)
+        tree_parser = _NexmlTreeParser(
+                id_taxon_map=self._id_taxon_map,
+                annotations_processor_fn=self._parse_annotations,
+                )
+        for tree_element in nxtrees.findall_tree():
+            tree_obj = tree_list.new_tree()
+            tree_parser.build_tree(tree_obj, tree_element, otus_id)
+
+class _NexmlTreeParser(object):
+
+    EdgeInfo = collections.namedtuple(
+            "EdgeInfo",
+            ["edge_id", "label", "tail_node_id", "head_node_id", "length", "annotations"]
+            )
+
+    def __init__(self,
+            id_taxon_map,
+            annotations_processor_fn,
+            ):
+        self._id_taxon_map = id_taxon_map
+        self._process_annotations = annotations_processor_fn
+
+    def build_tree(self, tree_obj, tree_element, otus_id):
+        tree_obj.label = tree_element.get('label', '')
+        tree_type_attr = tree_element.get('{http://www.w3.org/2001/XMLSchema-instance}type')
+        tree_obj.length_type = _from_nexml_tree_length_type(tree_type_attr)
+        tree_obj.is_rooted = False # default unless explicitly set
+        annotations = [i for i in tree_element.findall_annotations()]
+        for annotation in annotations:
+            self._process_annotations(tree_obj, annotation)
+        unparented_node_set, node_id_map, root_node_list = self._parse_nodes(
+                tree_element,
+                tree_obj.node_factory,
+                otus_id)
+        if len(root_node_list) > 1:
+            raise Exception("Multiple root nodes defined but the DendroPy tree model currently only supported single-root trees")
+        elif len(root_node_list) == 1:
+            tree_obj.seed_node = root_node_list[0]
+            tree_obj.is_rooted = True
+        edges_info = self._parse_edges_info(tree_element, length_type=tree_obj.length_type)
+        for edge_info in edges_info:
+            if edge_info.head_node_id is None:
+                raise Exception("Edge '{}' does not specify a head (target) node".format(edge_info.edge_id, edge_info.head_node_id))
+            try:
+                head_node = node_id_map[edge_info.head_node_id]
+            except KeyError:
+                raise Exception("Edge '{}' specifies a non-defined head (target) node '{}'".format(edge_info.edge_id, edge_info.head_node_id))
+            if edge_info.tail_node_id is not None:
+                try:
+                    tail_node = node_id_map[edge_info.tail_node_id]
+                except KeyError:
+                    raise Exception("Edge '{}' specifies a non-defined tail (source) node '{}'".format(edge_info.edge_id, edge_info.tail_node_id))
+                head_node.parent_node = tail_node
+                assert head_node.edge.tail_node is tail_node
+                unparented_node_set.remove(head_node)
+            else:
+                assert head_node.parent_node is None
+            self._set_edge_details(head_node.edge, edge_info)
+
+        # find node(s) without parent
+        # unparented_node_sets = []
+        # for node in nodes.values():
+        #     if node.parent_node == None:
+        #         unparented_node_sets.append(node)
+
+        # If one unparented_node_sets node found, this is the root: we use
+        # it as the tree head node. If multiple unparented_node_sets nodes
+        # are found, then we add them all as child_nodes of the
+        # existing head node. If none, then we have some sort of
+        # cyclicity, and we are not dealing with a tree.
+        if len(unparented_node_set) == 1:
+            unparented_node = unparented_node_set.pop()
+            if root_node_list:
+                if root_node_list[0] is not unparented_node:
+                    raise Exception("Tree already has an explictly defined root node, but node without parent found: {}".format(unparented_node))
+            else:
+                tree_obj.seed_node = unparented_node
+        elif len(unparented_node_sets) > 1:
+            for node in unparented_node_set:
+                tree_obj.seed_node.add_child(node)
+        else:
+            raise Exception("Structural error: tree must be acyclic.")
+
+        root_edge_element = tree_element.find_rootedge()
+        if root_edge_element is not None:
+            root_edge_info = self._parse_edge_info(root_edge_element, tree_obj.length_type)
+            root_node = node_id_map[root_edge_info.head_node_id]
+            if root_node is not tree_obj.seed_node:
+                raise Exception("Root edge does not subtend root node")
+            root_edge = root_node.edge
+            self._set_edge_details(root_edge, root_edge_info)
+            # tree_obj.is_rooted = True
+
+        return tree_obj
+
+    def _parse_nodes(self, tree_element, node_factory, otus_id):
+        """
+        Given an XmlElement representation of a NEXML tree element,
+        (`nex:tree`) this will return a dictionary of DendroPy Node
+        objects with the node_id as the key.
+        """
+        node_set = set()
+        node_id_map = {}
+        root_node_list = []
+        for nxnode in tree_element.findall_node():
+            node_id = nxnode.get('id', None)
+            node = node_factory()
+            node_id_map[node_id] = node
+            node_set.add(node)
+            node_id_map[node_id].label = nxnode.get('label', None)
+            taxon_id = nxnode.get('otu', None)
+            if taxon_id is not None:
+                try:
+                    taxon = self._id_taxon_map[(otus_id, taxon_id)]
+                except KeyError:
+                    raise Exception("Taxon with id '{}' not defined '{}'".format(taxon_id, otus_id))
+                node_id_map[node_id].taxon = taxon
+            annotations = [i for i in nxnode.findall_annotations()]
+            for annotation in annotations:
+                self._process_annotations(node_id_map[node_id], annotation)
+            rooting_state = nxnode.get('root', None)
+            if rooting_state is not None and rooting_state.lower() in ("1", "t", "true"):
+                root_node_list.append(node)
+        return node_set, node_id_map, root_node_list
+
+    def _parse_edges_info(self, tree_element, length_type):
+        edges_info = []
+        for nxedge in tree_element.findall_edge():
+            edges_info.append(self._parse_edge_info(nxedge, length_type))
+        return edges_info
+
+    def _parse_edge_info(self, nxedge, length_type):
+        edge_id = nxedge.get('id', None)
+        edge_label = nxedge.get('label', None)
+        tail_node_id = nxedge.get('source', None)
+        # assert tail_node_id is not None
+        head_node_id = nxedge.get('target', None)
+        # assert head_node_id is not None
+        edge_length_str = nxedge.get('length', 0.0)
+        edge_length = None
+        try:
+            edge_length = length_type(edge_length_str)
+        except ValueError:
+            msg = "Edge {} 'length' attribute is not of type {}: '{}'".format(
+                    edge_id, str(length_type), edge_length_str)
+            raise Exception(msg)
+        annotations = [i for i in nxedge.findall_annotations()]
+        e = _NexmlTreeParser.EdgeInfo(
+                edge_id=edge_id,
+                label=edge_label,
+                tail_node_id=tail_node_id,
+                head_node_id=head_node_id,
+                length=edge_length,
+                annotations=annotations)
+        return e
+
+    def _set_edge_details(self, edge, edge_info):
+        edge.length = edge_info.length
+        edge.label = edge_info.label
+        for annotation in edge_info.annotations:
+            self._process_annotations(edge, annotation)
+
+class _NexmlCharBlockParser(_AnnotationParser):
+    "Parses an XmlElement representation of NEXML taxa blocks."
+
+    def __init__(self,
+            namespace_registry,
+            id_taxon_namespace_map,
+            id_taxon_map,
+            char_matrix_factory,
+            state_alphabet_factory,
+            ):
+        _AnnotationParser.__init__(self, namespace_registry)
+        self._id_taxon_namespace_map = id_taxon_namespace_map
+        self._id_taxon_map = id_taxon_map
+        self._char_matrix_factory = char_matrix_factory
+        self._state_alphabet_factory = state_alphabet_factory
+
+        self._id_state_alphabet_map = {}
+        self._id_state_map = {}
+        self._id_chartype_map = {}
+        self._char_types = []
+        self._chartype_id_to_pos_map = {}
+
+    def parse_char_matrix(self, nxchars):
+        """
+        Given an XmlElement representing a nexml characters block, this
+        instantiates and returns a corresponding DendroPy CharacterMatrix object.
+        """
+
+        # clear
+        self._id_state_alphabet_map = {}
+        self._id_state_map = {}
+        self._id_chartype_map = {}
+        self._char_types = []
+        self._chartype_id_to_pos_map = {}
+
+        # initiaiize
+        label = nxchars.get('label', None)
+        char_matrix_oid = nxchars.get('oid', '')
+
+        # set up taxa
+        otus_id = nxchars.get('otus', None)
+        if otus_id is None:
+            raise Exception("Character Block %s (\"%s\"): Taxon namespace not specified" % (char_matrix_oid, char_matrix.label))
+        taxon_namespace = self._id_taxon_namespace_map.get(otus_id, None)
+        if not taxon_namespace:
+            raise Exception("Character Block %s (\"%s\"): Specified taxon namespace not found" % (char_matrix_oid, char_matrix.label))
+
+        # character matrix instantiation
+        nxchartype = nxchars.parse_type()
+        extra_kwargs = {}
+        if nxchartype.startswith('Dna'):
+            data_type = "dna"
+        elif nxchartype.startswith('Rna'):
+            data_type = "rna"
+        elif nxchartype.startswith('Protein'):
+            data_type = "protein"
+        elif nxchartype.startswith('Restriction'):
+            data_type = "restriction"
+        elif nxchartype.startswith('Standard'):
+            data_type = "standard"
+            extra_kwargs["default_state_alphabet"] = None
+        elif nxchartype.startswith('Continuous'):
+            data_type = "continuous"
+        else:
+            raise Exception("Character Block %s (\"%s\"): Character type '%s' not supported" % (char_matrix_oid, char_matrix.label, nxchartype))
+        char_matrix = self._char_matrix_factory(
+                data_type,
+                taxon_namespace=taxon_namespace,
+                label=label,
+                **extra_kwargs)
+
+        # annotation processing
+        annotations = [i for i in nxchars.findall_annotations()]
+        for annotation in annotations:
+            self._parse_annotations(char_matrix, annotation)
+
+        # get state mappings
+        nxformat = nxchars.find_char_format()
+        if nxformat is not None:
+            self.parse_characters_format(nxformat, data_type, char_matrix)
+        elif data_type == "standard":
+            self.create_standard_character_alphabet(char_matrix)
+
+        nxmatrix = nxchars.find_char_matrix()
+        annotations = [i for i in nxmatrix.findall_annotations()]
+        for annotation in annotations:
+            self._parse_annotations(char_matrix.taxon_seq_map, annotation)
+        for nxrow in nxmatrix.findall_char_row():
+            row_id = nxrow.get('id', None)
+            label = nxrow.get('label', None)
+            taxon_id = nxrow.get('otu', None)
+            try:
+                taxon = self._id_taxon_map[(otus_id, taxon_id)]
+            except KeyError:
+                raise error.DataParseError(message='Character Block %s (\"%s\"): Taxon with id "%s" not defined in taxa block "%s"' % (char_matrix.oid, char_matrix.label, taxon_id, otus_id))
+
+            character_vector = char_matrix.new_sequence(taxon=taxon)
+            annotations = [i for i in nxrow.findall_annotations()]
+            for annotation in annotations:
+                self._parse_annotations(character_vector, annotation)
+
+            if data_type == "continuous":
+                if nxchartype.endswith('Seqs'):
+                    seq = nxrow.find_char_seq()
+                    if seq is not None:
+                        seq = seq.replace('\n\r', ' ').replace('\r\n', ' ').replace('\n', ' ').replace('\r',' ')
+                        col_idx = -1
+                        for char in seq.split(' '):
+                            char = char.strip()
+                            if char:
+                                col_idx += 1
+                                if len(self._char_types) <= col_idx:
+                                    raise error.DataParseError(message="Character column/type ('<char>') not defined for character in position"\
+                                        + " %d (matrix = '%s' row='%s', taxon='%s')" % (col_idx+1, char_matrix.oid, row_id, taxon.label))
+                                character_vector.append(character_value=float(char), character_type=self._char_types[col_idx])
+                else:
+                    for nxcell in nxrow.findall_char_cell():
+                        chartype_id = nxcell.get('char', None)
+                        if chartype_id is None:
+                            raise error.DataParseError(message="'char' attribute missing for cell: cell markup must indicate character column type for character"\
+                                        + " (matrix = '%s' row='%s', taxon='%s')" % (char_matrix.oid, row_id, taxon.label))
+                        if chartype_id not in self._id_chartype_map:
+                            raise error.DataParseError(message="Character type ('<char>') with id '%s' referenced but not found for character" % chartype_id \
+                                        + " (matrix = '%s' row='%s', taxon='%s')" % (char_matrix.oid, row_id, taxon.label))
+                        chartype = self._id_chartype_map[chartype_id]
+                        pos_idx = self._char_types.index(chartype)
+#                         column = id_chartype_map[chartype_id]
+#                         state = column.state_id_map[cell.get('state', None)]
+                        # annotations = [i for i in nxcell.findall_annotations]
+                        # for annotation in annotations:
+                        #     self._parse_annotations(cell, annotation)
+                        character_vector.append(character_value=float(nxcell.get('state')),
+                                character_type=chartype)
+            else:
+                if nxchartype.endswith('Seqs'):
+                    seq = nxrow.find_char_seq()
+                    if seq is not None:
+                        seq = seq.replace(' ', '').replace('\n', '').replace('\r', '')
+                        col_idx = -1
+                        for char in seq:
+                            col_idx += 1
+                            state_alphabet = char_matrix.character_types[col_idx].state_alphabet
+                            try:
+                                state = state_alphabet[char]
+                            except KeyError:
+                                raise error.DataParseError(message="Character Block row '%s', character position %s: State with symbol '%s' in sequence '%s' not defined" \
+                                        % (row_id, col_idx, char, seq))
+                            if len(self._char_types) <= col_idx:
+                                raise error.DataParseError(message="Character column/type ('<char>') not defined for character in position"\
+                                    + " %d (row='%s', taxon='%s')" % (col_idx+1, row_id, taxon.label))
+                            character_type = self._char_types[col_idx]
+                            character_vector.append(character_value=state,
+                                    character_type=character_type)
+                else:
+                    for nxcell in nxrow.findall_char_cell():
+                        chartype_id = nxcell.get('char', None)
+                        if chartype_id is None:
+                            raise error.DataParseError(message="'char' attribute missing for cell: cell markup must indicate character column type for character"\
+                                        + " (matrix = '%s' row='%s', taxon='%s')" % (char_matrix_oid, row_id, taxon.label))
+                        if chartype_id not in self._id_chartype_map:
+                            raise error.DataParseError(message="Character type ('<char>') with id '%s' referenced but not found for character" % chartype_id \
+                                        + " (matrix = '%s' row='%s', taxon='%s')" % (char_matrix_oid, row_id, taxon.label))
+                        chartype = self._id_chartype_map[chartype_id]
+                        state_alphabet = self._id_chartype_map[chartype_id].state_alphabet
+                        pos_idx = self._chartype_id_to_pos_map[chartype_id]
+                        state = self._id_state_map[ (state_alphabet, nxcell.get('state', None)) ]
+                        character_vector.set_at(pos_idx,
+                                character_value=state,
+                                character_type=chartype)
+                        # self._id_state_alphabet_map = {}
+                        # self._id_state_map = {}
+                        # self._id_chartype_map = {}
+
+            char_matrix[taxon] = character_vector
+
+        # if fixed_state_alphabet:
+        #     char_matrix.remap_to_default_state_alphabet_by_symbol(purge_other_state_alphabets=True)
+
+    def parse_ambiguous_state(self, nxstate, state_alphabet):
+        """
+        Parses an XmlElement represent an ambiguous discrete character state,
+        ("uncertain_state_set")
+        and returns a corresponding StateAlphabetElement object.
+        """
+        state_oid = nxstate.get('id', None)
+        state_symbol = nxstate.get('symbol', None)
+        token = nxstate.get('token', None)
+        member_states = []
+        for nxmember in nxstate.findall_multistate_member():
+            member_state_id = nxmember.get('state', None)
+            member_state = self._id_state_map[ (state_alphabet, member_state_id) ]
+            member_states.append(member_state)
+        state = state_alphabet.new_multistate(symbol=state_symbol,
+                state_denomination=state_alphabet.AMBIGUOUS_STATE,
+                member_states=member_states)
+        assert (state_alphabet, state_oid) not in self._id_state_map
+        self._id_state_map[ (state_alphabet, state_oid) ] = state
+        if token is not None:
+            state_alphabet.new_symbol_synonym(token, state_symbol)
+        return state
+
+    def parse_polymorphic_state(self, nxstate, state_alphabet):
+        """
+        Parses an XmlElement represent a polymorphic discrete character state,
+        ("polymorphic_state_set")
+        and returns a corresponding StateAlphabetElement object.
+        """
+        state_oid = nxstate.get('id', None)
+        state_symbol = nxstate.get('symbol', None)
+        token = nxstate.get('token', None)
+        member_states = []
+        for nxmember in nxstate.findall_multistate_member():
+            member_state_id = nxmember.get('state', None)
+            member_state = self._id_state_map[ (state_alphabet, member_state_id) ]
+            member_states.append(member_state)
+        for nxambiguous in nxstate.findall_uncertain_state_set():
+            member_states.append(self.parse_ambiguous_state(nxstate, state_alphabet))
+        state = state_alphabet.new_multistate(symbol=state_symbol,
+                state_denomination=state_alphabet.POLYMORPHIC_STATE,
+                member_states=member_states)
+        assert (state_alphabet, state_oid) not in self._id_state_map
+        self._id_state_map[ (state_alphabet, state_oid) ] = state
+        if token is not None:
+            state_alphabet.new_symbol_synonym(token, state_symbol)
+        return state
+
+    def parse_state_alphabet(self, nxstates):
+        """
+        Given an XmlElement representing a nexml definition of (discrete or standard) states
+        ("states"), this returns a corresponding StateAlphabet object.
+        """
+        state_alphabet_oid = nxstates.get("id", None)
+        state_alphabet = self._state_alphabet_factory()
+        state_alphabet.autocompile_lookup_tables = False
+        self._id_state_alphabet_map[state_alphabet_oid] = state_alphabet
+        for nxstate in nxstates.findall_char_state():
+            state_oid = nxstate.get('id', None)
+            state_symbol = nxstate.get('symbol', None)
+            state = state_alphabet.new_fundamental_state(symbol=state_symbol)
+            token = nxstate.get('token', None)
+            assert (state_alphabet, state_oid) not in self._id_state_map
+            self._id_state_map[ (state_alphabet, state_oid) ] = state
+            if token is not None:
+                state_alphabet.new_symbol_synonym(token, state_symbol)
+        for nxstate in nxstates.findall_uncertain_state_set():
+            self.parse_ambiguous_state(nxstate, state_alphabet)
+        for nxstate in nxstates.findall_polymorphic_state_set():
+            self.parse_polymorphic_state(nxstate, state_alphabet)
+        state_alphabet.autocompile_lookup_tables = True
+        state_alphabet.compile_lookup_mappings()
+        return state_alphabet
+
+    def parse_characters_format(self, nxformat, data_type, char_matrix):
+        """
+        Given an XmlElement schema element ("format"), this parses the
+        state definitions (if any) and characters (column definitions, if any),
+        and populates the given char_matrix accordingly.
+        """
+        # if data_type == "standard":
+        #     for nxstates in nxformat.findall_char_states():
+        #         char_matrix.state_alphabets.append(self.parse_state_alphabet(nxstates))
+        # else:
+        #     pass
+        if data_type in ("dna", "rna", "protein", "restriction"):
+            # fixed alphabet: map to existing states
+            for nxstates in nxformat.findall_char_states():
+                state_alphabet_oid = nxstates.get("id", None)
+                if state_alphabet_oid is not None:
+                    self._id_state_alphabet_map[state_alphabet_oid] = char_matrix.default_state_alphabet
+                for nxstate in nxstates.findall_char_state():
+                    state_oid = nxstate.get('id', None)
+                    label = nxstate.get('label', None)
+                    symbol = nxstate.get('symbol', None)
+                    token = nxstate.get('token', None)
+                    try:
+                        state = char_matrix.default_state_alphabet[symbol]
+                    except KeyError:
+                        raise Exception("'{}' is not a recognized symbol for the state alphabet for the '{}' data type".format(symbol, data_type))
+                    assert (char_matrix.default_state_alphabet, state_oid) not in self._id_state_map
+                    self._id_state_map[ (char_matrix.default_state_alphabet, state_oid) ] = state
+                for nxstate in nxstates.findall_polymorphic_state_set():
+                    state_oid = nxstate.get('id', None)
+                    symbol = nxstate.get('symbol', None)
+                    try:
+                        state = char_matrix.default_state_alphabet[symbol]
+                    except KeyError:
+                        raise Exception("'{}' is not a recognized symbol for the state alphabet for the '{}' data type".format(symbol, data_type))
+                    assert (char_matrix.default_state_alphabet, state_oid) not in self._id_state_map
+                    self._id_state_map[ (char_matrix.default_state_alphabet, state_oid) ] = state
+                for nxstate in nxstates.findall_uncertain_state_set():
+                    state_oid = nxstate.get('id', None)
+                    symbol = nxstate.get('symbol', None)
+                    try:
+                        state = char_matrix.default_state_alphabet[symbol]
+                    except KeyError:
+                        raise Exception("'{}' is not a recognized symbol for the state alphabet for the '{}' data type".format(symbol, data_type))
+                    assert (char_matrix.default_state_alphabet, state_oid) not in self._id_state_map
+                    self._id_state_map[ (char_matrix.default_state_alphabet, state_oid) ] = state
+        else:
+            for nxstates in nxformat.findall_char_states():
+                char_matrix.state_alphabets.append(self.parse_state_alphabet(nxstates))
+        for nxchars in nxformat.findall_char():
+            col = char_matrix.new_character_type()
+            char_state_set_id = nxchars.get('states')
+            if char_state_set_id is not None:
+                state_alphabet = self._id_state_alphabet_map.get(char_state_set_id, None)
+                if state_alphabet is None:
+                    raise Exception("State set '%s' not defined" % char_state_set_id)
+                col.state_alphabet = state_alphabet
+            elif hasattr(char_matrix, "default_state_alphabet") \
+                and char_matrix.default_state_alphabet is not None:
+                col.state_alphabet = char_matrix.default_state_alphabet
+            char_matrix.character_types.append(col)
+            chartype_id = nxchars.get('id')
+            self._chartype_id_to_pos_map[chartype_id] = len(self._chartype_id_to_pos_map)
+            self._id_chartype_map[chartype_id] = col
+            self._char_types.append(col)
+
+    def create_standard_character_alphabet(self, char_matrix, symbol_list=None):
+        """
+        Returns a standard character state alphabet based on symbol_list.
+        Defaults to '0' - '9' if not specified.
+        """
+        if symbol_list is None:
+            symbol_list = [str(i) for i in xrange(10)]
+        state_alphabet = dendropy.StateAlphabet()
+        for s in symbol_list:
+            state_alphabet.append(dendropy.StateAlphabetElement(symbol=s))
+        char_matrix.state_alphabets.append(state_alphabet)
+        char_matrix.default_state_alphabet = state_alphabet
+
diff --git a/dendropy/dataio/nexmlwriter.py b/dendropy/dataio/nexmlwriter.py
new file mode 100644
index 0000000..4741652
--- /dev/null
+++ b/dendropy/dataio/nexmlwriter.py
@@ -0,0 +1,665 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Serialization of NeXML-formatted data.
+"""
+
+import json
+import textwrap
+import collections
+from dendropy.dataio import ioservice
+
+try:
+    from StringIO import StringIO # Python 2 legacy support: StringIO in this module is the one needed (not io)
+except ImportError:
+    from io import StringIO # Python 3
+
+############################################################################
+## Local Module Methods
+
+def _safe_unicode(obj, *args):
+    """ return the unicode representation of obj """
+    try:
+        return unicode(obj, *args)
+    except UnicodeDecodeError:
+        # obj is byte string
+        ascii_text = str(obj).encode('string_escape')
+        return unicode(ascii_text)
+
+def _safe_str(obj):
+    """ return the byte string representation of obj """
+    try:
+        return str(obj)
+    except UnicodeEncodeError:
+        # obj is unicode
+        return unicode(obj).encode('unicode_escape')
+
+def _protect_attr(x):
+#     return cgi.escape(x)
+    return json.dumps(_safe_str(x))
+
+def _to_nexml_indent_items(items, indent="", indent_level=0):
+    """
+    Renders list of items into a string of lines in which each line is
+    indented appropriately.
+    """
+    return '\n'.join(["%s%s" % (indent * indent_level, str(item)) \
+                     for item in items])
+
+def _to_nexml_chartype(chartype):
+    """
+    Returns nexml characters element attribute corresponding to given
+    chartype.
+    """
+#     if chartype == dendropy.DNA_CHARTYPE:
+#         return "nex:DnaSeqs"
+#     if chartype == dendropy.RNA_CHARTYPE:
+#         return "nex:RnaSeqs"
+    return None
+
+def _to_nexml_tree_length_type(length_type):
+    """
+    Returns attribute string for nexml tree type depending on whether
+    ``length_type`` is an int or a float.
+    """
+    if length_type == int:
+        return "nex:IntTree"
+    elif length_type == float:
+        return "nex:FloatTree"
+    else:
+        raise Exception('Unrecognized value class %s' % length_type)
+
+############################################################################
+## NexmlWriter
+
+class NexmlWriter(ioservice.DataWriter):
+    "Implements the DataWriter interface for handling NEXML files."
+
+    def __init__(self, **kwargs):
+        """
+
+        Keyword Arguments
+        -----------------
+
+        markup_as_sequences : boolean
+            If `True`, then character data will be marked up as sequences
+            instead of individual cells. Defaults to `False`.
+        suppress_unreferenced_taxon_namespaces: boolean, default: `False`
+            If `True`, then when writing |DataSet| objects, any
+            |TaxonNamespace| object in the DataSet's ``taxon_namespaces``
+            collection will *not* be written as a "TAXA" block if it is not
+            referenced by any character matrix (``char_matrices``) or tree list
+            (``tree_lists``).
+        ignore_unrecognized_keyword_arguments : boolean, default: `False`
+            If `True`, then unsupported or unrecognized keyword arguments will
+            not result in an error. Default is `False`: unsupported keyword
+            arguments will result in an error.
+
+        """
+
+        # base
+        ioservice.DataWriter.__init__(self)
+
+        # customization
+        self.markup_as_sequences = kwargs.pop("markup_as_sequences", False)
+        self.suppress_unreferenced_taxon_namespaces = kwargs.pop("suppress_unreferenced_taxon_namespaces", False)
+        self.check_for_unused_keyword_arguments(kwargs)
+
+        # book-keeping
+        self.indent = "    "
+        self._prefix_uri_tuples = set()
+        self._taxon_namespaces_to_write = []
+        self._taxon_namespace_id_map = {}
+        self._object_xml_id = {}
+        self._taxon_id_map = {}
+        self._node_id_map = {}
+        self._state_alphabet_id_map = {}
+        self._state_id_map = {}
+
+    def _write(self,
+            stream,
+            taxon_namespaces=None,
+            tree_lists=None,
+            char_matrices=None,
+            global_annotations_target=None):
+
+        # reset book-keeping
+        self._taxon_namespaces_to_write = []
+        self._taxon_namespace_id_map = {}
+        self._taxon_id_map = {}
+        self._node_id_map = {}
+        self._state_alphabet_id_map = {}
+        self._state_id_map = {}
+
+        # Destination:
+        # Writing to buffer instead of directly to output
+        # stream so that all namespaces referenced in metadata
+        # can be written
+        body = StringIO()
+
+        # comments and metadata
+        self._write_annotations_and_comments(global_annotations_target, body, 1)
+
+        # Taxon namespace discovery
+        candidate_taxon_namespaces = collections.OrderedDict()
+        if self.attached_taxon_namespace is not None:
+            candidate_taxon_namespaces[self.attached_taxon_namespace] = True
+        elif taxon_namespaces is not None:
+            if self.suppress_unreferenced_taxon_namespaces:
+                # preload to preserve order
+                for tns in taxon_namespaces:
+                    candidate_taxon_namespaces[tns] = False
+            else:
+                for tns in taxon_namespaces:
+                    candidate_taxon_namespaces[tns] = True
+        for data_collection in (tree_lists, char_matrices):
+            if data_collection is not None:
+                for i in data_collection:
+                    if self.attached_taxon_namespace is None or i.taxon_namespace is self.attached_taxon_namespace:
+                        candidate_taxon_namespaces[i.taxon_namespace] = True
+        self._taxon_namespaces_to_write = [tns for tns in candidate_taxon_namespaces if candidate_taxon_namespaces[tns]]
+
+        for tns in self._taxon_namespaces_to_write:
+            self._write_taxon_namespace(tns, body)
+
+        if char_matrices:
+            for char_matrix in char_matrices:
+                self._write_char_matrix(char_matrix=char_matrix, dest=body)
+
+        if tree_lists:
+            for tree_list in tree_lists:
+                self._write_tree_list(tree_list=tree_list, dest=body)
+
+        self._write_to_nexml_open(stream, indent_level=0)
+        stream.write(body.getvalue())
+        self._write_to_nexml_close(stream, indent_level=0)
+
+    def _write_taxon_namespace(self, taxon_namespace, dest, indent_level=1):
+        self._taxon_namespace_id_map[taxon_namespace] = self._get_nexml_id(taxon_namespace)
+        dest.write(self.indent * indent_level)
+        parts = []
+        parts.append('otus')
+        parts.append('id="%s"' % self._taxon_namespace_id_map[taxon_namespace])
+        if taxon_namespace.label:
+            parts.append('label=%s' % _protect_attr(taxon_namespace.label))
+        dest.write("<%s>\n" % ' '.join(parts))
+        self._write_annotations_and_comments(taxon_namespace, dest, indent_level=indent_level+1)
+        for taxon in taxon_namespace:
+            dest.write(self.indent * (indent_level+1))
+            parts = []
+            parts.append('otu')
+            self._taxon_id_map[taxon] = self._get_nexml_id(taxon)
+            parts.append('id="%s"' % self._taxon_id_map[taxon])
+            if taxon.label:
+                parts.append('label=%s' % _protect_attr(taxon.label))
+            if taxon.has_annotations or (hasattr(taxon, "comments") and taxon.comments):
+                dest.write("<%s>\n" % ' '.join(parts))
+                # self.write_extensions(taxon, dest, indent_level=indent_level+2)
+                self._write_annotations_and_comments(taxon, dest, indent_level=indent_level+2)
+                dest.write(self.indent * (indent_level+1))
+                dest.write("</otu>\n")
+            else:
+                dest.write("<%s />\n" % ' '.join(parts))
+        dest.write(self.indent * indent_level)
+        dest.write('</otus>\n')
+
+    def _write_tree_list(self, tree_list, dest, indent_level=1):
+        dest.write(self.indent * indent_level)
+        parts = []
+        parts.append('trees')
+        parts.append('id="%s"' % self._get_nexml_id(tree_list))
+        if tree_list.label:
+            parts.append('label=%s' % _protect_attr(tree_list.label))
+        parts.append('otus="%s"' % self._taxon_namespace_id_map[tree_list.taxon_namespace])
+        dest.write("<%s>\n" % ' '.join(parts))
+        if tree_list.has_annotations or (hasattr(tree_list, "comments") and tree_list.comments):
+            self._write_annotations_and_comments(tree_list, dest,
+                    indent_level=indent_level+1)
+        for tree in tree_list:
+            self._write_tree(tree=tree, dest=dest, indent_level=2)
+        dest.write(self.indent * indent_level)
+        dest.write('</trees>\n')
+
+    def _compose_state_definition(self, state, state_alphabet, indent_level, member_state=False):
+        "Writes out state definition."
+        parts = []
+        if state not in self._state_id_map:
+            self._state_id_map[state] = self._get_nexml_id(state)
+        if member_state:
+            parts.append('%s<member state="%s"/>'
+                                % (self.indent * indent_level, self._state_id_map[state]))
+        elif state.state_denomination == state_alphabet.FUNDAMENTAL_STATE:
+            parts.append('%s<state id="%s" symbol="%s" />'
+                                % (self.indent * indent_level, self._state_id_map[state], state.symbol))
+        else:
+            if state.state_denomination == state_alphabet.AMBIGUOUS_STATE:
+                tag = "uncertain_state_set"
+            else:
+                tag = "polymorphic_state_set"
+
+            parts.append('%s<%s id="%s" symbol="%s">'
+                            % (self.indent * indent_level, tag, self._state_id_map[state], state.symbol))
+            for member in state.member_states:
+                parts.extend(self._compose_state_definition(member, state_alphabet, indent_level+1, member_state=True))
+            parts.append("%s</%s>" % ((self.indent * indent_level), tag))
+        return parts
+
+    def _write_char_matrix(self, char_matrix, dest, indent_level=1):
+        dest.write(self.indent * indent_level)
+        parts = []
+        parts.append('characters')
+        parts.append('id="%s"' % self._get_nexml_id(char_matrix))
+        if char_matrix.label:
+            parts.append('label=%s' % _protect_attr(char_matrix.label))
+        parts.append('otus="%s"' % self._taxon_namespace_id_map[char_matrix.taxon_namespace])
+        if char_matrix.data_type == "dna":
+            xsi_datatype = 'nex:Dna'
+        elif char_matrix.data_type == "rna":
+            xsi_datatype = 'nex:Rna'
+        elif char_matrix.data_type == "protein":
+            xsi_datatype = 'nex:Protein'
+        elif char_matrix.data_type == "restriction":
+            xsi_datatype = 'nex:Restriction'
+        elif char_matrix.data_type == "standard":
+            xsi_datatype = 'nex:Standard'
+        elif char_matrix.data_type == "continuous":
+            xsi_datatype = 'nex:Continuous'
+        else:
+            raise Exception("Unrecognized character block data type.")
+        if self.markup_as_sequences:
+            xsi_markup = 'Seqs'
+        else:
+            xsi_markup = 'Cells'
+        xsi_type = xsi_datatype + xsi_markup
+        parts.append('xsi:type="%s"' % xsi_type)
+        dest.write("<%s>\n" % ' '.join(parts))
+
+        if char_matrix.has_annotations or (hasattr(char_matrix, "comments") and char_matrix.comments):
+            self._write_annotations_and_comments(char_matrix, dest, indent_level=indent_level+1)
+
+        cell_char_type_id_map = self._write_format_section(char_matrix, dest, indent_level=indent_level+1)
+
+        dest.write("%s<matrix>\n" % (self.indent * (indent_level+1)))
+
+        # with new data model,  char_matrix == taxon_seq_map!
+        # if char_matrix.taxon_seq_map.has_annotations:
+        #     self._write_annotations_and_comments(char_matrix.taxon_seq_map, dest, indent_level=indent_level+1)
+
+        for taxon in char_matrix:
+            char_vector = char_matrix[taxon]
+            # for col_idx, (char_value, cell_char_type, cell_annotations) in enumerate(char_vector):
+            dest.write(self.indent*(indent_level+2))
+            parts = []
+            parts.append('row')
+            parts.append('id="%s"' % self._get_nexml_id(char_vector))
+            if taxon is not None:
+                parts.append('otu="%s"' % self._taxon_id_map[taxon])
+            dest.write("<%s>\n" % ' '.join(parts))
+            if char_vector.has_annotations or (hasattr(char_vector, "comments") and char_vector.comments):
+                self._write_annotations_and_comments(char_vector, dest, indent_level=indent_level+3)
+            if self.markup_as_sequences:
+                if char_matrix.data_type in ("dna", "rna", "protein", "restriction", "aa", "amino-acid"):
+                    separator = ''
+                else:
+                    # standard or continuous
+                    separator = ' '
+                print_count = 1
+                dest.write("{}<seq>".format(self.indent * (indent_level+3)))
+                for cidx, c in enumerate(char_vector):
+                    s = str(c)
+                    if not s:
+                        raise TypeError("Character %d in char_vector '%s' does not have a symbol defined for its character state:" % (cidx, char_vector.default_oid) \
+                                    + " this matrix cannot be written in sequence format (set 'markup_as_sequences' to False)'")
+                    if print_count == 1:
+                        dest.write("\n{}".format(self.indent * (indent_level+4)))
+                    else:
+                        dest.write(separator)
+                    dest.write(s)
+                    if print_count == 58:
+                        print_count = 1
+                    else:
+                        print_count += 1
+                dest.write("\n{}</seq>\n".format(self.indent * (indent_level+3)))
+            else:
+                for col_idx, (char_value, cell_char_type, cell_annotations) in enumerate(char_vector.iter_cells()):
+                    parts = []
+                    parts.append('%s<cell' % (self.indent*(indent_level+3)))
+                    parts.append('char="%s"' % cell_char_type_id_map[ (taxon, col_idx) ])
+                    if char_matrix.data_type == "continuous":
+                        v = str(char_value)
+                    else:
+                        v = self._state_id_map[char_value]
+                    parts.append('state="%s"' % v)
+                    dest.write(' '.join(parts))
+                    if cell_annotations is not None:
+                        dest.write('>\n')
+                        self._write_annotation_set(cell_annotations, dest, indent_level=indent_level+4)
+                        dest.write('%s</cell>' % (self.indent*(indent_level+3)))
+                    else:
+                        dest.write('/>\n')
+            dest.write(self.indent * (indent_level+2))
+            dest.write('</row>\n')
+        dest.write("%s</matrix>\n" % (self.indent * (indent_level+1)))
+        dest.write(self.indent * indent_level)
+        dest.write('</characters>\n')
+
+    def _write_tree(self, tree, dest, indent_level=0):
+        """
+        Writes a single DendroPy Tree object as a NEXML nex:tree
+        element.
+        """
+        parts = []
+        parts.append('tree')
+        parts.append('id="%s"' % self._get_nexml_id(tree))
+        if hasattr(tree, 'label') and tree.label:
+            parts.append('label=%s' % _protect_attr(tree.label))
+        if hasattr(tree, 'length_type') and tree.length_type:
+            parts.append('xsi:type="%s"' % _to_nexml_tree_length_type(tree.length_type))
+        else:
+            parts.append('xsi:type="nex:FloatTree"')
+        parts = ' '.join(parts)
+        dest.write('%s<%s>\n'
+                   % (self.indent * indent_level, parts))
+        if tree.has_annotations or (hasattr(tree, "comments") and tree.comments):
+            self._write_annotations_and_comments(tree, dest,
+                    indent_level=indent_level+1)
+        for node in tree.preorder_node_iter():
+            self._write_node(
+                    node=node,
+                    dest=dest,
+                    is_root=tree.is_rooted and node is tree.seed_node,
+                    indent_level=indent_level+1)
+        for edge in tree.preorder_edge_iter():
+            self._write_edge(
+                    edge=edge,
+                    dest=dest,
+                    is_root=tree.is_rooted and node is tree.seed_node,
+                    indent_level=indent_level+1)
+        dest.write('%s</tree>\n' % (self.indent * indent_level))
+
+    def _write_to_nexml_open(self, dest, indent_level=0):
+        "Writes the opening tag for a nexml element."
+        parts = []
+        parts.append('<?xml version="1.0" encoding="ISO-8859-1"?>')
+        parts.append('<nex:nexml')
+        parts.append('%sversion="0.9"' % (self.indent * (indent_level+1)))
+        ensured_namespaces = [
+            ["", "http://www.nexml.org/2009"],
+            ["xsi", "http://www.w3.org/2001/XMLSchema-instance"],
+            ["xml", "http://www.w3.org/XML/1998/namespace"],
+            ["nex", "http://www.nexml.org/2009"],
+            ["xsd", "http://www.w3.org/2001/XMLSchema#"],
+            # ["dendropy", "http://packages.python.org/DendroPy/"],
+                ]
+        # parts.append('%sxmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"' \
+        #              % (self.indent * (indent_level+1)))
+        # parts.append('%sxmlns:xml="http://www.w3.org/XML/1998/namespace"' \
+        #              % (self.indent * (indent_level+1)))
+        parts.append('%sxsi:schemaLocation="http://www.nexml.org/2009 ../xsd/nexml.xsd"'
+                     % (self.indent * (indent_level+1)))
+        # parts.append('%sxmlns="http://www.nexml.org/2009"'
+        #              % (self.indent * (indent_level+1)))
+        # parts.append('%sxmlns:nex="http://www.nexml.org/2009"'
+        #              % (self.indent * (indent_level+1)))
+        seen_prefixes = {}
+        for prefix, uri in self._prefix_uri_tuples:
+            if prefix in seen_prefixes:
+                if seen_prefixes[prefix] != uri:
+                    raise ValueError("Prefix '%s' mapped to multiple namespaces: '%s', '%s'" % (
+                        prefix,
+                        uri,
+                        seen_prefixes[prefix]))
+            seen_prefixes[prefix] = uri
+            if prefix:
+                prefix = ":" + prefix
+            parts.append('%sxmlns%s="%s"'
+                        % (self.indent * (indent_level+1), prefix, uri))
+        for prefix, uri in ensured_namespaces:
+            if prefix in seen_prefixes:
+                continue
+            if prefix:
+                prefix = ":" + prefix
+            parts.append('%sxmlns%s="%s"'
+                        % (self.indent * (indent_level+1), prefix, uri))
+        parts.append('>\n')
+        dest.write('\n'.join(parts))
+
+    def _write_to_nexml_close(self, dest, indent_level=0):
+        "Closing tag for a nexml element."
+        dest.write('%s</nex:nexml>\n' % (self.indent*indent_level))
+
+    def _write_node(self, node, dest, is_root, indent_level=0):
+        "Writes out a NEXML node element."
+        parts = []
+        parts.append('<node')
+        self._node_id_map[node] = self._get_nexml_id(node)
+        parts.append('id="%s"' % self._node_id_map[node])
+        if hasattr(node, 'label') and node.label:
+            parts.append('label=%s' % _protect_attr(node.label))
+        if hasattr(node, 'taxon') and node.taxon:
+            parts.append('otu="%s"' % self._taxon_id_map[node.taxon])
+        if is_root:
+            parts.append('root="true"')
+        parts = ' '.join(parts)
+        dest.write('%s%s' % ((self.indent * indent_level), parts))
+        if node.has_annotations or (hasattr(node, "comments") and node.comments):
+            dest.write('>\n')
+            self._write_annotations_and_comments(node, dest, indent_level=indent_level+1)
+            dest.write('%s</node>\n' % (self.indent * indent_level))
+        else:
+            dest.write(' />\n')
+
+    def _write_edge(self, edge, dest, is_root, indent_level=0):
+        "Writes out a NEXML edge element."
+        if edge and edge.head_node:
+            parts = []
+            if edge.tail_node is not None:
+                tag = "edge"
+                parts.append('<%s' % tag)
+            else:
+                # EDGE-ON-ROOT:
+                tag = "rootedge"
+                parts.append('<%s' % tag)
+            parts.append('id="%s"' % self._get_nexml_id(edge))
+            # programmatically more efficent to do this in above
+            # block, but want to maintain this tag order ...
+            if edge.tail_node is not None:
+                parts.append('source="%s"' % self._node_id_map[edge.tail_node])
+            if edge.head_node is not None:
+                parts.append('target="%s"' % self._node_id_map[edge.head_node])
+            if hasattr(edge, 'length') and edge.length is not None:
+                parts.append('length="%s"' % edge.length)
+            if hasattr(edge, 'label') and edge.label:
+                parts.append('label=%s' % _protect_attr(edge.label))
+
+            # only write if we have more than just the 'edge' and '/' bit
+            if len(parts) > 2:
+                parts = ' '.join(parts)
+                dest.write('%s%s' % ((self.indent * indent_level), parts))
+                if edge.has_annotations or (hasattr(edge, "comments") and edge.comments):
+                    dest.write('>\n')
+                    self._write_annotations_and_comments(edge, dest, indent_level=indent_level+1)
+                    dest.write('%s</%s>\n' % ((self.indent * indent_level), tag))
+                else:
+                    dest.write(' />\n')
+
+    def _write_annotations_and_comments(self, item, dest, indent_level=0):
+        if item is not None:
+            self._write_annotations(item, dest, indent_level=indent_level)
+            self._write_comments(item, dest, indent_level=indent_level, newline=True)
+
+    def _write_comments(self, commented, dest, indent_level=0, newline=False):
+        if hasattr(commented, "comments") and commented.comments:
+            if newline:
+                post = "\n"
+            else:
+                post = ""
+            for comment in commented.comments:
+                dest.write('%s<!-- %s -->%s' % ((self.indent * indent_level),
+                    comment, post))
+
+    def _write_annotations(self, annotated, dest, indent_level=0):
+        "Writes out annotations for an Annotable object."
+        # import sys
+        if hasattr(annotated, "annotations"):
+            self._write_annotation_set(annotated.annotations, dest, indent_level)
+
+    def _write_annotation_set(self, annotation_set, dest, indent_level=0):
+        for annote in annotation_set:
+            if annote.is_hidden:
+                continue
+            dest.write(self._compose_annotation_xml(annote,
+                    indent=self.indent,
+                    indent_level=indent_level,
+                    prefix_uri_tuples=self._prefix_uri_tuples))
+            dest.write("\n")
+
+    def _compose_char_type_xml_for_continuous_type(self, indent_level, char_type_id=None):
+        if char_type_id is None:
+            char_type_id = self._get_nexml_id(object())
+        s = ('%s<char id="%s" />'
+            % ((self.indent*(indent_level)), char_type_id))
+        return char_type_id, s
+
+    def _compose_char_type_xml_for_state_alphabet(self, state_alphabet, indent_level, char_type_id=None):
+        if state_alphabet:
+            char_type_state = ' states="%s" ' % self._state_alphabet_id_map[state_alphabet]
+        else:
+            char_type_state = ' '
+        if char_type_id is None:
+            char_type_id = self._get_nexml_id(object())
+        s = ('%s<char id="%s"%s/>'
+            % ((self.indent*(indent_level)), char_type_id, char_type_state))
+        return char_type_id, s
+
+    def _compose_char_type_xml_for_character_type(self, character_type, indent_level):
+        state_alphabet = character_type.state_alphabet
+        return self._compose_char_type_xml_for_state_alphabet(
+                state_alphabet,
+                indent_level=indent_level,
+                char_type_id=self._get_nexml_id(character_type))
+
+    def _get_state_alphabet_for_char_matrix(self, char_matrix):
+        sa = None
+        if char_matrix.default_state_alphabet is not None:
+            sa = char_matrix.default_state_alphabet
+        elif len(char_matrix.state_alphabets) == 1:
+            sa = char_matrix.state_alphabets[0]
+        elif len(char_matrix.state_alphabets) > 1:
+            raise TypeError("Character cell %d for taxon '%s' does not have a state alphabet mapping given by the" % (col_idx, taxon.label)\
+                    + " 'character_type' property, and multiple state alphabets are defined for the containing" \
+                    + " character matrix with no default specified")
+        elif len(char_matrix.state_alphabets) == 0:
+            raise TypeError("Character cell %d for taxon '%s' does not have a state alphabet mapping given by the" % (col_idx, taxon.label)\
+                    + " 'character_type' property, and no state alphabets are defined for the containing" \
+                    + " character matrix")
+        return sa
+
+    def _write_format_section(self, char_matrix, dest, indent_level):
+        format_section_parts = []
+        if hasattr(char_matrix, "state_alphabets"): #isinstance(char_matrix, dendropy.StandardCharacterMatrix):
+            for state_alphabet in char_matrix.state_alphabets:
+                self._state_alphabet_id_map[state_alphabet] = self._get_nexml_id(state_alphabet)
+                format_section_parts.append('%s<states id="%s">'
+                    % (self.indent * (indent_level+1), self._state_alphabet_id_map[state_alphabet]))
+                for state in state_alphabet:
+                    if state.state_denomination == state_alphabet.FUNDAMENTAL_STATE:
+                        format_section_parts.extend(self._compose_state_definition(state, state_alphabet, indent_level+3))
+                for state in state_alphabet:
+                    if state.state_denomination == state_alphabet.POLYMORPHIC_STATE:
+                        format_section_parts.extend(self._compose_state_definition(state, state_alphabet, indent_level+3))
+                for state in state_alphabet:
+                    if state.state_denomination == state_alphabet.AMBIGUOUS_STATE:
+                        format_section_parts.extend(self._compose_state_definition(state, state_alphabet, indent_level+3))
+                format_section_parts.append('%s</states>' % (self.indent * (indent_level+1)))
+        cell_char_type_id_map = {}
+        char_type_ids_written = set()
+        for taxon in char_matrix:
+            char_vector = char_matrix[taxon]
+            for col_idx, (char_value, cell_char_type, cell_annotations) in enumerate(char_vector.iter_cells()):
+                if cell_char_type is None:
+                    if char_matrix.data_type == "continuous":
+                        char_type_id, char_type_xml = self._compose_char_type_xml_for_continuous_type(indent_level=indent_level+1)
+                    else:
+                        sa = self._get_state_alphabet_for_char_matrix(char_matrix)
+                        assert sa is not None
+                        char_type_id, char_type_xml = self._compose_char_type_xml_for_state_alphabet(sa, indent_level=indent_level+1)
+                else:
+                    char_type_id, char_type_xml = self._compose_char_type_xml_for_character_type(cell_char_type, indent_level=indent_level+1)
+                if char_type_id not in char_type_ids_written:
+                    format_section_parts.append(char_type_xml)
+                    char_type_ids_written.add(char_type_id)
+                cell_char_type_id_map[ (taxon, col_idx) ] = char_type_id
+        if format_section_parts:
+            dest.write("%s<format>\n" % (self.indent*(indent_level)))
+            dest.write(('\n'.join(format_section_parts)) + '\n')
+            dest.write("%s</format>\n" % (self.indent*(indent_level)))
+        return cell_char_type_id_map
+
+    def _get_nexml_id(self, o):
+        try:
+            return self._object_xml_id[o]
+        except KeyError:
+            oid = "d{}".format(len(self._object_xml_id))
+            self._object_xml_id[o] = oid
+            return oid
+
+    def _compose_annotation_xml(self,
+            annote,
+            indent="",
+            indent_level=0,
+            prefix_uri_tuples=None):
+        parts = ["%s<meta" % (indent * indent_level)]
+        value = annote.value
+        # if value is not None:
+        #     value = _protect_attr(value)
+        # else:
+        #     value = None
+        if isinstance(value, list) or isinstance(value, tuple):
+            value = _protect_attr(" ".join(str(v) for v in value))
+        elif value is not None:
+            value = _protect_attr(value)
+        key = annote.prefixed_name
+        # assert ":" in key
+        if annote.annotate_as_reference:
+            parts.append('xsi:type="nex:ResourceMeta"')
+            parts.append('rel="%s"' % key)
+            if value is not None:
+                parts.append('href=%s' % value)
+        else:
+            parts.append('xsi:type="nex:LiteralMeta"')
+            parts.append('property="%s"' % key)
+            if value is not None:
+                parts.append('content=%s' % value)
+            else:
+                parts.append('content=""')
+        if annote.datatype_hint:
+            parts.append('datatype="%s"'% annote.datatype_hint)
+        parts.append('id="%s"' % self._get_nexml_id(annote))
+        if prefix_uri_tuples is not None:
+            prefix_uri_tuples.add((annote.name_prefix, annote.namespace))
+        if len(annote.annotations) > 0:
+            parts.append(">")
+            for a in annote.annotations:
+                parts.append("\n" + self._compose_annotation_xml(a, indent=indent, indent_level=indent_level+1, prefix_uri_tuples=prefix_uri_tuples))
+            parts.append("\n%s</meta>" % (indent * indent_level))
+        else:
+            parts.append("/>")
+        return " ".join(parts)
+
diff --git a/dendropy/dataio/nexmlyielder.py b/dendropy/dataio/nexmlyielder.py
new file mode 100644
index 0000000..7ca98fa
--- /dev/null
+++ b/dendropy/dataio/nexmlyielder.py
@@ -0,0 +1,89 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Implementation of NEXML-schema tree iterator.
+"""
+
+import sys
+if not (sys.version_info.major >= 3 and sys.version_info.minor >= 4):
+    from dendropy.utility.filesys import pre_py34_open as open
+from dendropy.dataio import ioservice
+from dendropy.dataio import nexmlreader
+from dendropy.dataio import xmlprocessing
+
+class NexmlTreeDataYielder(
+        ioservice.TreeDataYielder,
+        nexmlreader.NexmlReader):
+
+    def __init__(self,
+            files=None,
+            taxon_namespace=None,
+            tree_type=None,
+            **kwargs):
+        """
+
+        Parameters
+        ----------
+        files : iterable of sources
+            Iterable of sources, which can either be strings specifying file
+            paths or file-like objects open for reading. If a source element is
+            a string (``isinstance(i,str) == True``), then it is assumed to be
+            a path to a file. Otherwise, the source is assumed to be a file-like
+            object.
+        taxon_namespace : |TaxonNamespace| instance
+            The operational taxonomic unit concept namespace to use to manage
+            taxon definitions.
+        \*\*kwargs : keyword arguments
+            These will be passed directly to the base `nexmlreader.NexusReader`
+            class. See `nexmlreader.NexusReader` for details.
+        """
+        ioservice.TreeDataYielder.__init__(self,
+                files=files,
+                taxon_namespace=taxon_namespace,
+                tree_type=tree_type)
+        nexmlreader.NexmlReader.__init__(self,
+                **kwargs)
+        self.attached_taxon_namespace = self.taxon_namespace
+
+    ###########################################################################
+    ## Implementation of DataYielder interface
+
+    def _yield_items_from_stream(self, stream):
+        xml_doc = xmlprocessing.XmlDocument(file_obj=stream,
+                subelement_factory=self._subelement_factory)
+        self._namespace_registry = xml_doc.namespace_registry
+        xml_root = xml_doc.root
+        self._parse_taxon_namespaces(xml_root)
+        tree_parser = nexmlreader._NexmlTreeParser(
+                id_taxon_map=self._id_taxon_map,
+                annotations_processor_fn=self._parse_annotations,
+                )
+        for trees_idx, trees_element in enumerate(xml_root.iter_trees()):
+            trees_id = trees_element.get('id', "Trees" + str(trees_idx))
+            trees_label = trees_element.get('label', None)
+            otus_id = trees_element.get('otus', None)
+            if otus_id is None:
+                raise Exception("Taxa block not specified for trees block '{}'".format(otus_id))
+            taxon_namespace = self._id_taxon_namespace_map.get(otus_id, None)
+            if not taxon_namespace:
+                raise Exception("Tree block '{}': Taxa block '{}' not found".format(trees_id, otus_id))
+            for tree_element in trees_element.findall_tree():
+                tree_obj = self.tree_factory()
+                tree_parser.build_tree(tree_obj, tree_element, otus_id)
+                yield tree_obj
diff --git a/dendropy/dataio/nexusprocessing.py b/dendropy/dataio/nexusprocessing.py
new file mode 100644
index 0000000..f8413df
--- /dev/null
+++ b/dendropy/dataio/nexusprocessing.py
@@ -0,0 +1,533 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Specialized tokenizer for processing NEXUS/Newick streams.
+"""
+
+import re
+import io
+import numbers
+import decimal
+from dendropy.dataio.tokenizer import Tokenizer
+from dendropy.utility import container
+from dendropy.datamodel import basemodel
+
+##############################################################################
+## NexusTokenizer
+
+class NexusTokenizer(Tokenizer):
+
+    def __init__(self, src,
+            preserve_unquoted_underscores=False):
+        Tokenizer.__init__(self,
+            src=src,
+            uncaptured_delimiters=list(" \t\n\r"),
+            captured_delimiters=list("{}(),;:=\\\""),
+            quote_chars="'",
+            escape_quote_by_doubling=True,
+            escape_chars="",
+            comment_begin="[",
+            comment_end="]",
+            capture_comments=True,
+            preserve_unquoted_underscores=preserve_unquoted_underscores)
+        # self.preserve_unquoted_underscores = preserve_unquoted_underscores
+
+    # def __next__(self):
+        # Tokenizer.__next__(self)
+        # if (not self.preserve_unquoted_underscores
+        #         and not self.is_token_quoted):
+        #     self.current_token = self.current_token.replace("_", " ")
+        # return self.current_token
+
+    def set_capture_eol(self, capture_eol):
+        if capture_eol:
+            try:
+                self.uncaptured_delimiters.remove("\n")
+            except ValueError:
+                pass
+            try:
+                self.uncaptured_delimiters.remove("\r")
+            except ValueError:
+                pass
+            if "\n" not in self.captured_delimiters:
+                self.captured_delimiters.append("\n")
+            if "\r" not in self.captured_delimiters:
+                self.captured_delimiters.append("\r")
+        else:
+            try:
+                self.captured_delimiters.remove("\n")
+            except ValueError:
+                pass
+            try:
+                self.captured_delimiters.remove("\r")
+            except ValueError:
+                pass
+            if "\n" not in self.uncaptured_delimiters:
+                self.uncaptured_delimiters.append("\n")
+            if "\r" not in self.uncaptured_delimiters:
+                self.uncaptured_delimiters.append("\r")
+
+    def set_hyphens_as_captured_delimiters(self, hyphens_as_captured_delimiters):
+        if hyphens_as_captured_delimiters:
+            if "-" not in self.captured_delimiters:
+                self.captured_delimiters.append("-")
+        else:
+            try:
+                self.captured_delimiters.remove("-")
+            except ValueError:
+                pass
+
+    def require_next_token_ucase(self):
+        t = self.require_next_token()
+        t = t.upper()
+        self.current_token = t
+        return t
+
+    def next_token_ucase(self):
+        try:
+            t = self.__next__()
+            t = t.upper()
+            self.current_token = t
+            return t
+        except StopIteration:
+            self.current_token = None
+            return None
+
+    def cast_current_token_to_ucase(self):
+        if self.current_token:
+            self.current_token = self.current_token.upper()
+        return self.current_token
+
+    def process_and_clear_comments_for_item(self,
+            item,
+            extract_comment_metadata):
+        process_comments_for_item(item,
+                self.captured_comments,
+                extract_comment_metadata)
+        del self.captured_comments[:]
+
+    def skip_to_semicolon(self):
+        token = self.next_token()
+        while token != ';' and not self._cur_char == "" and token != None:
+            token = self.next_token()
+
+###############################################################################
+## Taxon Handling
+
+class NexusTaxonSymbolMapper(object):
+    """
+    Manages |TaxonNamespace| and |Taxon| object look-ups when
+    parsing NEXUS and NEWICK formatted data.
+
+    Operational taxonomic unit concepts in NEXUS files can be referenced using
+    one of three types of symbols:
+
+        - the "TRANSLATE" block token
+        - the taxon label
+        - the taxon number
+
+    In the event of redundant over overdetermined symbols, the resolution order
+    is as given above.
+
+    This class encapsulates creating, looking-up and retrieving |Taxon|
+    objects corresponding to operation taxonomic unit concept references
+    encountered when reading NEXUS or NEWICK data sources from the
+    |TaxonNamespace| that it wraps and manages. It keeps track of
+    "TRANSLATE" block tokens, operational taxonomic unit labels, and
+    operational taxonomic unit indexes in mapping containers that allow for
+    quick retrieval of corresponding |Taxon| objects. The symbol look-up
+    is case-insensitive, as per NEXUS/NEWICK convention.
+
+    If a |Taxon| object is not found for a particular symbol, it will
+    create a new |Taxon| object with that symbol for its label, and
+    register it in all the other supplemental mappings appropriately.
+
+    Note that the |TaxonNamespace| object passed to this class and the
+    member |Taxon| objects should not be modified during the lifespan of
+    this class or, at least, the tenure of the management of
+    |TaxonNamespace| and member |Taxon| objects by this class.
+    This is to ensure that the various supplementatl mappings (in particular,
+    the label mapping and the taxon number mapping) are synchronized.
+    To this end, the of the |TaxonNamespace| object is locked, and all
+    |Taxon| object creation should be through this class's native
+    methods.
+    """
+
+    def __init__(self,
+            taxon_namespace,
+            enable_lookup_by_taxon_number=True,
+            case_sensitive=False):
+        self._taxon_namespace = None
+        self.taxon_namespace_original_mutability_state = None
+        self.case_sensitive = case_sensitive
+        if not self.case_sensitive:
+            self.token_taxon_map = container.CaseInsensitiveDict()
+            self.label_taxon_map = container.CaseInsensitiveDict()
+        else:
+            self.token_taxon_map = {}
+            self.label_taxon_map = {}
+        self.number_taxon_map = {}
+        self.number_taxon_label_map = {}
+        self.enable_lookup_by_taxon_number = enable_lookup_by_taxon_number
+        self._set_taxon_namespace(taxon_namespace)
+
+    def restore_taxon_namespace_mutability(self):
+        if self._taxon_namespace is not None:
+            if self.taxon_namespace_original_mutability_state is not None:
+                self._taxon_namespace.is_mutable = self.taxon_namespace_original_mutability_state
+        self.taxon_namespace_original_mutability_state = None
+
+    def __del__(self):
+        self.restore_taxon_namespace_mutability()
+
+    def _get_taxon_namespace(self):
+        return self._taxon_namespace
+    def _set_taxon_namespace(self, taxon_namespace):
+        if self._taxon_namespace is not None:
+            self.restore_taxon_namespace_mutability()
+        self._taxon_namespace = taxon_namespace
+        self.taxon_namespace_original_mutability_state = self._taxon_namespace.is_mutable
+        self._taxon_namespace.is_mutable = False
+        self.reset_supplemental_mappings()
+    taxon_namespace = property(_get_taxon_namespace, _set_taxon_namespace)
+
+    def reset_supplemental_mappings(self):
+        self.token_taxon_map.clear()
+        if not self.case_sensitive:
+            self.label_taxon_map = container.CaseInsensitiveDict(self._taxon_namespace.label_taxon_map())
+        else:
+            self.label_taxon_map = self._taxon_namespace.label_taxon_map()
+        self.number_taxon_map.clear()
+        for idx, taxon in enumerate(self._taxon_namespace):
+            s = str(idx+1)
+            self.number_taxon_map[s] = taxon
+            self.number_taxon_label_map[s] = taxon.label
+
+    def add_translate_token(self, token, taxon):
+        if not isinstance(token, str):
+            token = str(token)
+        self.token_taxon_map[token] = taxon
+
+    def lookup_taxon_symbol(self, symbol, create_taxon_if_not_found=True):
+        # if symbol in self.token_taxon_map:
+        #     return self.token_taxon_map[symbol]
+        # if symbol in self.label_taxon_map:
+        #     return self.label_taxon_map[symbol]
+        # if symbol in self.number_taxon_map:
+        #     return self.number_taxon_map[symbol]
+        if not isinstance(symbol, str):
+            symbol = str(symbol)
+        try:
+            return self.token_taxon_map[symbol]
+        except KeyError:
+            pass
+        try:
+            return self.label_taxon_map[symbol]
+        except KeyError:
+            pass
+        if self.enable_lookup_by_taxon_number:
+            try:
+                return self.number_taxon_map[symbol]
+            except KeyError:
+                pass
+        if create_taxon_if_not_found:
+            return self.new_taxon(symbol)
+        return None
+
+    def require_taxon_for_symbol(self, symbol):
+        return self.lookup_taxon_symbol(symbol=symbol, create_taxon_if_not_found=True)
+
+    def new_taxon(self, label):
+        self._taxon_namespace.is_mutable = self.taxon_namespace_original_mutability_state
+        t = self._taxon_namespace.new_taxon(label)
+        self._taxon_namespace.is_mutable = False
+        self.label_taxon_map[label] = t
+        taxon_number = str(len(self._taxon_namespace))
+        self.number_taxon_map[taxon_number] = t
+        return t
+
+    def add_taxon(self, taxon):
+        self._taxon_namespace.is_mutable = self.taxon_namespace_original_mutability_state
+        self._taxon_namespace.add_taxon(taxon)
+        self._taxon_namespace.is_mutable = False
+        self.label_taxon_map[taxon.label] = taxon
+        taxon_number = str(len(self._taxon_namespace))
+        self.number_taxon_map[taxon_number] = taxon
+        return taxon
+
+###############################################################################
+## Metadata
+
+FIGTREE_COMMENT_FIELD_PATTERN = re.compile(r'(.+?)=({.+?,.+?}|.+?)(,|$)')
+NHX_COMMENT_FIELD_PATTERN = re.compile(r'(.+?)=({.+?,.+?}|.+?)(:|$)')
+
+def parse_comment_metadata_to_annotations(
+        comment,
+        annotations=None,
+        field_name_map=None,
+        field_value_types=None,
+        strip_leading_trailing_spaces=True):
+    """
+    Returns set of |Annotation| objects corresponding to metadata
+    given in comments.
+
+    Parameters
+    ----------
+    ``comment`` : string
+        A comment token.
+    ``annotations`` : |AnnotationSet| or ``set``
+        Set of |Annotation| objects to which to add this annotation.
+    ``field_name_map`` : dict
+        A dictionary mapping field names (as given in the comment string)
+        to strings that should be used to represent the field in the
+        metadata dictionary; if not given, no mapping is done (i.e., the
+        comment string field name is used directly).
+    ``field_value_types`` : dict
+        A dictionary mapping field names (as given in the comment
+        string) to the value type (e.g. {"node-age" : float}.
+    ``strip_leading_trailing_spaces`` : boolean
+        Remove whitespace from comments.
+
+    Returns
+    -------
+    metadata : :py:``set`` [|Annotation|]
+        Set of |Annotation| objects corresponding to metadata
+        parsed.
+    """
+    if annotations is None:
+        annotations = set()
+    if field_name_map is None:
+        field_name_map = {}
+    if field_value_types is None:
+        field_value_types = {}
+    if comment.startswith("&&NHX:"):
+        pattern = NHX_COMMENT_FIELD_PATTERN
+        comment = comment[6:]
+    elif comment.startswith("&&"):
+        pattern = NHX_COMMENT_FIELD_PATTERN
+        comment = comment[2:]
+    elif comment.startswith("&"):
+        pattern = FIGTREE_COMMENT_FIELD_PATTERN
+        comment = comment[1:]
+    else:
+        # unrecognized metadata pattern
+        return annotations
+    for match_group in pattern.findall(comment):
+        key, val = match_group[:2]
+        if strip_leading_trailing_spaces:
+            key = key.strip()
+            val = val.strip()
+        if key in field_value_types:
+            value_type = field_value_types[key]
+        else:
+            value_type = None
+        if val.startswith('{'):
+            if value_type is not None:
+                val = [value_type(v) for v in val[1:-1].split(',')]
+            else:
+                val = val[1:-1].split(',')
+        elif val.startswith('"') and val.endswith('"'):
+            val = val[1:-1]
+        elif val.lower() == "false":
+            val = False
+        elif val.lower() == "true":
+            val = True
+        else:
+            if value_type is not None:
+                val = value_type(val)
+        if key in field_name_map:
+            key = field_name_map[key]
+        annote = basemodel.Annotation(
+                name=key,
+                value=val,
+                # datatype_hint=datatype_hint,
+                # name_prefix=name_prefix,
+                # namespace=namespace,
+                # name_is_prefixed=name_is_prefixed,
+                # is_attribute=False,
+                # annotate_as_reference=annotate_as_reference,
+                # is_hidden=is_hidden,
+                )
+        annotations.add(annote)
+    return annotations
+
+def process_comments_for_item(item,
+        item_comments,
+        extract_comment_metadata):
+    if not item_comments or item is None:
+        return
+    for comment in item_comments:
+        if extract_comment_metadata and comment.startswith("&"):
+            annotations = parse_comment_metadata_to_annotations(comment)
+            if annotations:
+                item.annotations.update(annotations)
+            else:
+                item.comments.append(comment)
+        else:
+            item.comments.append(comment)
+
+###############################################################################
+## NEWICK/NEXUS formatting support.
+
+def format_annotated_value(
+        value,
+        annotated_real_value_format_specifier,
+        real_value_format_specifier=None,
+        override_annotation_format_specifier=False):
+    if isinstance(value, float) or isinstance(value, decimal.Decimal):
+        if override_annotation_format_specifier and real_value_format_specifier is not None:
+            fmtspec = real_value_format_specifier
+        elif annotated_real_value_format_specifier is not None:
+            fmtspec = annotated_real_value_format_specifier
+        elif real_value_format_specifier is not None:
+            fmtspec = real_value_format_specifier
+        else:
+            fmtspec = ""
+    else:
+        if annotated_real_value_format_specifier is not None:
+            fmtspec = annotated_real_value_format_specifier
+        else:
+            fmtspec = ""
+    return "{:{fmtspec}}".format(value, fmtspec=fmtspec)
+
+def format_item_annotations_as_comments(
+        annotated,
+        nhx=False,
+        real_value_format_specifier=None,
+        override_annotation_format_specifier=False):
+    """
+    ``annotated`` - Annotated object
+    ``nhx``       - render as NHX '[&& ...]'? Otherwise as '[& ...]'
+    ``real_value_format_specifier`` - Format specification for real/float values.
+                   The format specifier should be given in Python's string
+                   format specification mini-language. E.g. ".8f", ".4E",
+                   "8.4f". If the annotation has its ``format_specifier``
+                   attribute set, then this argument is ignored for rendering
+                   that particular annotation unless
+                   ``override_annotation_format_specifier`` is `True`. Defaults to "".
+    ``override_annotation_format_specifier``
+                    If the annotation has its ``format_specifier`` attribute set,
+                    then this it will be used in preference to the
+                    ``real_value_format_specifier`` above unless this argument is
+                    `True`. Defaults to `False`.
+    """
+    if not annotated.annotations:
+        return ""
+    parts = []
+    for annote in annotated.annotations:
+        if annote.is_hidden:
+            continue
+        key = annote.name
+        value = annote.value
+        if isinstance(value, list) or isinstance(value, tuple):
+            items = []
+            for item in value:
+                items.append(format_annotated_value(
+                    value=item,
+                    annotated_real_value_format_specifier=annote.real_value_format_specifier,
+                    real_value_format_specifier=real_value_format_specifier,
+                    override_annotation_format_specifier=override_annotation_format_specifier))
+            items = ",".join(items)
+            parts.append("%s={%s}" % (key, items))
+        elif isinstance(value, dict):
+            raise TypeError("Dictionary types not supported for rendering as NEWICK-formatted metadata")
+        else:
+            x = format_annotated_value(
+                    value=value,
+                    annotated_real_value_format_specifier=annote.real_value_format_specifier,
+                    real_value_format_specifier=real_value_format_specifier,
+                    override_annotation_format_specifier=override_annotation_format_specifier)
+            parts.append("{key}={value}".format(key=key, value=x))
+    if nhx:
+        prefix = "[&&NHX:"
+        separator = ":"
+        suffix = "]"
+    else:
+        prefix = "[&"
+        separator = ","
+        suffix = "]"
+    body = separator.join(parts)
+    return prefix + body + suffix
+
+def escape_nexus_token(label, preserve_spaces=False, quote_underscores=True):
+    """
+    Properly protects a NEXUS token.
+    """
+    if label is None:
+        return ""
+    if not preserve_spaces \
+            and "_" not in label \
+            and not re.search('[\(\)\[\]\{\}\\\/\,\;\:\=\*\'\"\`\+\-\<\>\0\t\n]', label):
+        label = label.replace(' ', '_').replace('\t', '_')
+    elif re.search('[\(\)\[\]\{\}\\\/\,\;\:\=\*\'\"\`\+\-\<\>\0\t\n\r ]', label) \
+        or quote_underscores and "_" in label:
+        s = label.split("'")
+        if len(s) == 1:
+            return "'" + label + "'"
+        return "'{}'".format("''".join(s))
+    return label
+
+def bitmask_as_newick_string(split, taxon_set, preserve_spaces=False, quote_underscores=True):
+    """
+    Represents a split as a newick string.
+    """
+    taxlabels = [escape_nexus_token(label, preserve_spaces=preserve_spaces, quote_underscores=quote_underscores) for label in taxon_set.labels()]
+
+    # do not do the root
+    if split == 0 or (split == taxon_set.all_taxa_bitmask()):
+        return "({});".format(",".join(taxlabels))
+
+    idx = 0
+    left = []
+    right = []
+    while split >= 0 and idx < len(taxlabels):
+        if split & 1:
+            left.append(taxlabels[idx])
+        else:
+            right.append(taxlabels[idx])
+        idx += 1
+        split = split >> 1
+    assert ( len(left) + len(right) ) == len(taxlabels)
+    return "(({}), ({}));".format(", ".join(left), ", ".join(right))
+
+def group_ranges(L):
+    """
+    Collapses a list of integers into a list of the start and end of
+    consecutive runs of numbers. Returns a generator of generators.
+
+    >>> [list(x) for x in group_ranges([1, 2, 3, 5, 6, 8])]
+    [[1, 3], [5, 6], [8]]
+    """
+    for w, z in itertools.groupby(L, lambda x, y=itertools.count(): next(y)-x):
+        grouped = list(z)
+        yield (x for x in [grouped[0], grouped[-1]][:len(grouped)])
+
+def get_rooting_argument(**kwargs):
+    if "is_rooted" in kwargs and "is_unrooted" in kwargs:
+        raise ValueError("Must specify exactly one of 'is_rooted' or 'is_unrooted'")
+    elif "is_rooted":
+        is_rooted = kwargs["is_rooted"]
+    elif "is_unrooted":
+        is_rooted = not kwargs["is_unrooted"]
+    if is_rooted is True:
+        return "force-rooted"
+    elif is_rooted is False:
+        return "force-unrooted"
+    else:
+        return "default-unrooted"
diff --git a/dendropy/dataio/nexusreader.py b/dendropy/dataio/nexusreader.py
new file mode 100644
index 0000000..fceb1d8
--- /dev/null
+++ b/dendropy/dataio/nexusreader.py
@@ -0,0 +1,1391 @@
+
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Implementation of NEXUS-schema data reader.
+"""
+
+import re
+import collections
+from dendropy.utility import error
+from dendropy.utility import textprocessing
+from dendropy.dataio import ioservice
+from dendropy.dataio import nexusprocessing
+from dendropy.dataio import newickreader
+
+###############################################################################
+## NexusReader
+
+class NexusReader(ioservice.DataReader):
+    "Encapsulates loading and parsing of a NEXUS schema file."
+
+    class BlockTerminatedException(Exception):
+        pass
+
+    class NexusReaderError(error.DataParseError):
+        def __init__(self, message,
+                line_num=None,
+                col_num=None,
+                stream=None):
+            error.DataParseError.__init__(self,
+                    message=message,
+                    line_num=line_num,
+                    col_num=col_num,
+                    stream=stream)
+
+    class NotNexusFileError(NexusReaderError):
+        def __init__(self, message,
+                line_num=None,
+                col_num=None,
+                stream=None):
+            NexusReader.NexusReaderError.__init__(self,
+                    message=message,
+                    line_num=line_num,
+                    col_num=col_num,
+                    stream=stream)
+
+    class LinkRequiredError(NexusReaderError):
+        def __init__(self, message,
+                line_num=None,
+                col_num=None,
+                stream=None):
+            NexusReader.NexusReaderError.__init__(self,
+                    message=message,
+                    line_num=line_num,
+                    col_num=col_num,
+                    stream=stream)
+
+    class NoCharacterBlocksFoundError(NexusReaderError):
+        def __init__(self, message,
+                line_num=None,
+                col_num=None,
+                stream=None):
+            NexusReader.NexusReaderError.__init__(self,
+                    message=message,
+                    line_num=line_num,
+                    col_num=col_num,
+                    stream=stream)
+
+    class UndefinedBlockError(NexusReaderError):
+        def __init__(self, message,
+                line_num=None,
+                col_num=None,
+                stream=None):
+            NexusReader.NexusReaderError.__init__(self,
+                    message=message,
+                    line_num=line_num,
+                    col_num=col_num,
+                    stream=stream)
+
+    class MultipleBlockWithSameTitleError(NexusReaderError):
+        def __init__(self, message,
+                line_num=None,
+                col_num=None,
+                stream=None):
+            NexusReader.NexusReaderError.__init__(self,
+                    message=message,
+                    line_num=line_num,
+                    col_num=col_num,
+                    stream=stream)
+
+    class InvalidCharacterStateSymbolError(NexusReaderError):
+        def __init__(self, message,
+                line_num=None,
+                col_num=None,
+                stream=None):
+            NexusReader.NexusReaderError.__init__(self,
+                    message=message,
+                    line_num=line_num,
+                    col_num=col_num,
+                    stream=stream)
+
+    class InvalidContinuousCharacterValueError(NexusReaderError):
+        def __init__(self, message,
+                line_num=None,
+                col_num=None,
+                stream=None):
+            NexusReader.NexusReaderError.__init__(self,
+                    message=message,
+                    line_num=line_num,
+                    col_num=col_num,
+                    stream=stream)
+
+    class TooManyTaxaError(NexusReaderError):
+
+        def __init__(self,
+                taxon_namespace,
+                max_taxa,
+                label,
+                line_num=None,
+                col_num=None,
+                stream=None):
+            message = "Cannot add taxon with label '{}': Declared number of taxa ({}) already defined: {}".format(
+                            label,
+                            max_taxa,
+                            str(["{}".format(t.label) for t in taxon_namespace]))
+            NexusReader.NexusReaderError.__init__(self,
+                    message=message,
+                    line_num=line_num,
+                    col_num=col_num,
+                    stream=stream)
+
+    class UndefinedTaxonError(NexusReaderError):
+
+        def __init__(self,
+                taxon_namespace,
+                label,
+                line_num=None,
+                col_num=None,
+                stream=None):
+            message = "Taxon '{}' is not in the set of defined taxa: {}".format(
+                            label,
+                            str(["{}".format(t.label) for t in taxon_namespace]))
+            NexusReader.NexusReaderError.__init__(self,
+                    message=message,
+                    line_num=line_num,
+                    col_num=col_num,
+                    stream=stream)
+
+    class TooManyCharactersError(NexusReaderError):
+
+        def __init__(self,
+                max_characters,
+                character,
+                line_num=None,
+                col_num=None,
+                stream=None):
+            message = "Cannot add '{}' to sequence: declared sequence length ({}) will be exceeded".format(
+                    character, max_characters)
+            NexusReader.NexusReaderError.__init__(self,
+                    message=message,
+                    line_num=line_num,
+                    col_num=col_num,
+                    stream=stream)
+
+    class IncompleteBlockError(NexusReaderError):
+        def __init__(self, message,
+                line_num=None,
+                col_num=None,
+                stream=None):
+            NexusReader.NexusReaderError.__init__(self,
+                    message=message,
+                    line_num=line_num,
+                    col_num=col_num,
+                    stream=stream)
+
+    ###########################################################################
+    ## Life-cycle and Setup
+
+    def __init__(self, **kwargs):
+        """
+
+        Keyword Arguments
+        -----------------
+        rooting : string, {['default-unrooted'], 'default-rooted', 'force-unrooted', 'force-rooted'}
+            Specifies how trees in the data source should be intepreted with
+            respect to their rooting:
+
+                'default-unrooted' [default]:
+                    All trees are interpreted as unrooted unless a '[&R]'
+                    comment token explicitly specifies them as rooted.
+                'default-rooted'
+                    All trees are interpreted as rooted unless a '[&U]'
+                    comment token explicitly specifies them as unrooted.
+                'force-unrooted'
+                    All trees are unconditionally interpreted as unrooted.
+                'force-rooted'
+                    All trees are unconditionally interpreted as rooted.
+
+        edge_length_type : type, default: ``float``
+            Specifies the type of the edge lengths (``int`` or ``float``). Tokens
+            interpreted as branch lengths will be cast to this type.
+            Defaults to ``float``.
+        suppress_edge_lengths : boolean, default: `False`
+            If `True`, edge length values will not be processed. If `False`,
+            edge length values will be processed.
+        extract_comment_metadata : boolean, default: `True`
+            If `True` (default), any comments that begin with '&' or '&&' will
+            be parsed and stored as part of the annotation set of the
+            corresponding object (accessible through the ``annotations``
+            attribute of the object). This requires that the comment
+            contents conform to a particular format (NHX or BEAST: 'field =
+            value'). If `False`, then the comments will not be parsed,
+            but will be instead stored directly as elements of the ``comments``
+            list attribute of the associated object.
+        store_tree_weights : boolean, default: `False`
+            If `True`, process the tree weight (e.g. "[&W 1/2]") comment
+            associated with each tree, if any. Defaults to `False`.
+        encode_splits : boolean, default: `False`
+            If `True`, split hash bitmasks will be calculated and attached to
+            the edges.
+        finish_node_fn : function object, default: `None`
+            If specified, this function will be applied to each node after
+            it has been constructed.
+        case_sensitive_taxon_labels : boolean, default: `False`
+            If `True`, then taxon labels are case sensitive (e.g., "P.regius"
+            and "P.REGIUS" wil be treated as different operation taxonomic
+            unit concepts). Otherwise, taxon label intepretation will be made
+            without regard for case.
+        preserve_underscores : boolean, default: `False`
+            If `True`, unquoted underscores in labels will *not* converted to
+            spaces. Defaults to `False`: all underscores not protected by
+            quotes will be converted to spaces.
+        suppress_internal_node_taxa : boolean, default: `True`
+            If `False`, internal node labels will be instantantiated into
+            |Taxon| objects. If `True`, internal node labels
+            will *not* be instantantiated as strings.
+        suppress_leaf_node_taxa : boolean, default: `False`
+            If `False`, leaf (external) node labels will be instantantiated
+            into |Taxon| objects. If `True`, leaff (external) node
+            labels will *not* be instantantiated as strings.
+        terminating_semicolon_required : boolean, default: `True`
+            If `True` [default], then a tree statement that does not end in a
+            semi-colon is an error. If `False`, then no error will be raised.
+        unconstrained_taxa_accumulation_mode : bool
+            If `True`, then no error is raised even if the number of taxon
+            names defined exceeds the number of declared taxa (as specified by
+            'NTAX'). Defaults to `False`.
+        automatically_substitute_missing_taxa_blocks : bool
+            If `True` then, if a taxon namespace is linked to by title but is
+            not given in the data file, then, if one and exactly one other
+            taxon namespace has been given in the data file, this taxon
+            namespace will be used; if there are multiple taxon namespaces,
+            then if ``automatically_create_missing_taxa_blocks`` is `True` a
+            new taxon namespace will be created, otherwise an error is raised.
+            Default is `False`: if a taxon namespace is linked to by title but
+            is not given in the data file, then an error is raised.
+        automatically_create_missing_taxa_blocks : bool
+            If `True` then taxon namespaces linked to by title but not given in
+            the data file will be automatically created. If `False` taxon
+            namespaces linked to by title but not given in the data file will
+            result in error.
+        exclude_chars : bool
+            If `False`, then character data will not be read. Defaults to
+            `True`: character data will be read.
+        exclude_trees : bool
+            If `False`, then tree data will not be read. Defaults to
+            `True`: tree data will be read.
+        attached_taxon_namespace : |TaxonNamespace|
+            Unify all operational taxonomic unit definitions in this namespace.
+        ignore_unrecognized_keyword_arguments : boolean, default: `False`
+            If `True`, then unsupported or unrecognized keyword arguments will
+            not result in an error. Default is `False`: unsupported keyword
+            arguments will result in an error.
+        """
+
+        # base
+        ioservice.DataReader.__init__(self)
+
+        # Following are NEXUS-parsing specific (i.e., not used by NEWICK
+        # parsers), and need to be removed so as not to cause problems with our
+        # keyword validation scheme
+        self.exclude_chars = kwargs.pop("exclude_chars", False)
+        self.exclude_trees = kwargs.pop("exclude_trees", False)
+        self._data_type = kwargs.pop("data_type", "standard")
+        self.attached_taxon_namespace = kwargs.pop("attached_taxon_namespace", None)
+
+        # Following are undocumented for a GOOD reason! They are experimental and subject to change!
+        self.unconstrained_taxa_accumulation_mode = kwargs.pop("unconstrained_taxa_accumulation_mode", False)
+        self.automatically_create_missing_taxa_blocks = kwargs.pop("automatically_create_missing_taxa_blocks", False)
+        self.automatically_substitute_missing_taxa_blocks = kwargs.pop("automatically_substitute_missing_taxa_blocks", False)
+
+        # The following are used by NewickReader in addition to NexusReader, So
+        # they are extracted/set here and then forwarded on ...
+        self.preserve_underscores = kwargs.get('preserve_underscores', False)
+        self.case_sensitive_taxon_labels = kwargs.get('case_sensitive_taxon_labels', False)
+        self.extract_comment_metadata = kwargs.get('extract_comment_metadata', True)
+
+        # As above, but the NEXUS format default is different from the NEWICK
+        # default, so this rather convoluted approach
+        # self.extract_comment_metadata = kwargs.pop('extract_comment_metadata', True)
+        # kwargs["extract_comment_metadata"] = self.extract_comment_metadata
+
+        # Create newick handler
+        self.newick_reader = newickreader.NewickReader(**kwargs)
+
+        # Set up parsing meta-variables
+        self._interleave = False
+        self._symbols = ""
+        self._gap_char = '-'
+        self._missing_char = '?'
+        self._match_char = frozenset('.')
+        self._file_specified_ntax = None
+        self._file_specified_nchar = None
+        self._nexus_tokenizer = None
+        self._taxon_namespace_factory = None
+        self._tree_list_factory = None
+        self._char_matrix_factory = None
+        self._global_annotations_target = None
+        self._taxon_namespaces = []
+        self._char_matrices = []
+        self._tree_lists = []
+        self._product = None
+
+    ###########################################################################
+    ## Reader Implementation
+
+    def _read(self,
+            stream,
+            taxon_namespace_factory=None,
+            tree_list_factory=None,
+            char_matrix_factory=None,
+            state_alphabet_factory=None,
+            global_annotations_target=None):
+        """
+        Instantiates and returns a DataSet object based on the
+        NEXUS-formatted contents given in the file-like object ``stream``.
+        """
+        self._taxon_namespace_factory = taxon_namespace_factory
+        self._tree_list_factory = tree_list_factory
+        self._char_matrix_factory = char_matrix_factory
+        self._state_alphabet_factory = state_alphabet_factory
+        self._global_annotations_target = global_annotations_target
+        self._parse_nexus_stream(stream)
+        self._product = self.Product(
+                taxon_namespaces=self._taxon_namespaces,
+                tree_lists=self._tree_lists,
+                char_matrices=self._char_matrices)
+        return self._product
+
+    ###########################################################################
+    ## Tokenizer Control
+
+    def create_tokenizer(self, stream, **kwargs):
+        self._nexus_tokenizer = nexusprocessing.NexusTokenizer(
+                stream, **kwargs)
+        return self._nexus_tokenizer
+
+    def set_stream(self, stream):
+        return self._nexus_tokenizer.set_stream(stream)
+
+    ###########################################################################
+    ## Book-keeping Control
+
+    def _nexus_error(self, message, error_type=None):
+        if error_type is None:
+            error_type = NexusReader.NexusReaderError
+        e = error_type(
+                message=message,
+                line_num=self._nexus_tokenizer.token_line_num,
+                col_num=self._nexus_tokenizer.token_column_num,
+                stream=self._nexus_tokenizer.src)
+        return e
+
+    def _too_many_taxa_error(self, taxon_namespace, label):
+        e = NexusReader.TooManyTaxaError(
+                taxon_namespace=taxon_namespace,
+                max_taxa=self._file_specified_ntax,
+                label=label,
+                line_num=self._nexus_tokenizer.token_line_num,
+                col_num=self._nexus_tokenizer.token_column_num,
+                stream=self._nexus_tokenizer.src)
+        return e
+
+    def _undefined_taxon_error(self, taxon_namespace, label):
+        e = NexusReader.UndefinedTaxonError(
+                taxon_namespace=taxon_namespace,
+                label=label,
+                line_num=self._nexus_tokenizer.token_line_num,
+                col_num=self._nexus_tokenizer.token_column_num,
+                stream=self._nexus_tokenizer.src)
+        return e
+
+    def _too_many_characters_error(self, character):
+        e = NexusReader.TooManyCharactersError(
+                max_characters=self._file_specified_nchar,
+                character=character,
+                line_num=self._nexus_tokenizer.token_line_num,
+                col_num=self._nexus_tokenizer.token_column_num,
+                stream=self._nexus_tokenizer.src)
+        return e
+
+    def _debug_print(self, message=None, out=None):
+        import sys
+        if out is None:
+            out = sys.stdout
+        if message is None:
+            message = ""
+        else:
+            message = " --- ({})".format(message)
+        out.write("--- Current Position: Line {}, Column {}; Current token [starting at line {} and column {}]: '{}'{}\n".format(
+            self._nexus_tokenizer.current_line_num,
+            self._nexus_tokenizer.current_column_num,
+            self._nexus_tokenizer.token_line_num,
+            self._nexus_tokenizer.token_column_num,
+            self._nexus_tokenizer.current_token,
+            message))
+
+    ###########################################################################
+    ## Data Management
+
+    def _new_taxon_namespace(self, title=None):
+        if self.attached_taxon_namespace is not None:
+            return self.attached_taxon_namespace
+        taxon_namespace = self._taxon_namespace_factory(label=title)
+        self._taxon_namespaces.append(taxon_namespace)
+        return taxon_namespace
+
+    def _get_taxon_namespace(self, title=None):
+        if self.attached_taxon_namespace is not None:
+            return self.attached_taxon_namespace
+        if title is None:
+            if len(self._taxon_namespaces) == 0:
+                return self._new_taxon_namespace(title=title)
+            elif len(self._taxon_namespaces) == 1:
+                return self._taxon_namespaces[0]
+            else:
+                raise self._nexus_error("Multiple taxa blocks defined: require 'LINK' statement", NexusReader.LinkRequiredError)
+        else:
+            found = []
+            for tns in self._taxon_namespaces:
+                if tns.label is not None and tns.label.upper() == title.upper():
+                    found.append(tns)
+            if len(found) == 0:
+                if self.automatically_substitute_missing_taxa_blocks:
+                    if len(self._taxon_namespaces) == 1:
+                        return self._taxon_namespaces[0]
+                    elif not self.automatically_create_missing_taxa_blocks:
+                        raise self._nexus_error("Taxa block with title '{}' not found, and multiple taxa blocks are defined for this file: unable to automatically substitute".format(title), NexusReader.UndefinedBlockError)
+                if self.automatically_create_missing_taxa_blocks:
+                    return self._new_taxon_namespace(title=title)
+                raise self._nexus_error("Taxa block with title '{}' not found".format(title), NexusReader.UndefinedBlockError)
+            elif len(found) > 1:
+                raise self._nexus_error("Multiple taxa blocks with title '{}' defined".format(title), NexusReader.MultipleBlockWithSameTitleError)
+            return found[0]
+
+    def _get_taxon_symbol_mapper(self, taxon_namespace):
+        taxon_symbol_mapper = nexusprocessing.NexusTaxonSymbolMapper(
+                taxon_namespace=taxon_namespace,
+                enable_lookup_by_taxon_number=True,
+                case_sensitive=self.case_sensitive_taxon_labels)
+        return taxon_symbol_mapper
+
+    def _new_char_matrix(self, data_type, taxon_namespace, title=None):
+        # if data_type is None:
+        #     data_type = "standard"
+        char_matrix = self._char_matrix_factory(
+                data_type,
+                taxon_namespace=taxon_namespace,
+                label=title)
+        self._char_matrices.append(char_matrix)
+        return char_matrix
+
+    def _new_state_alphabet(self, *args, **kwargs):
+        return self._state_alphabet_factory(*args, **kwargs)
+
+    def _get_char_matrix(self, title=None):
+        if title is None:
+            if len(self._char_matrices) == 1:
+                return self._char_matrices[0]
+            elif len(self._char_matrices) == 0:
+                raise self._nexus_error("No character matrices defined", NexusReader.NoCharacterBlocksFoundError)
+            else:
+                raise self._nexus_error("Multiple character matrices defined: require 'LINK' statement", NexusReader.LinkRequiredError)
+        else:
+            found = []
+            for cm in self._char_matrices:
+                if cm.label.upper() == title.upper():
+                    found.append(cm)
+            if len(found) == 0:
+                raise self._nexus_error("Character block with title '{}' not found".format(title), NexusReader.UndefinedBlockError)
+            elif len(found) > 1:
+                raise self._nexus_error("Multiple character blocks with title '{}' defined".format(title), NexusReader.MultipleBlockWithSameTitleError)
+            return found[0]
+
+    def _new_tree_list(self, taxon_namespace, title=None):
+        tree_list = self._tree_list_factory(
+                taxon_namespace=taxon_namespace,
+                label=title)
+        self._tree_lists.append(tree_list)
+        return tree_list
+
+    def _get_tree_list(self, title=None):
+        if title is None:
+            if len(self._tree_lists) == 1:
+                return self._tree_lists[0]
+            elif len(self._tree_lists) == 0:
+                raise self._nexus_error("No tree blocks defined", NexusReader.NoCharacterBlocksFoundError)
+            else:
+                raise self._nexus_error("Multiple tree blocks defined: require 'LINK' statement", NexusReader.LinkRequiredError)
+        else:
+            found = []
+            for tlst in self._tree_lists:
+                if tlst.label.upper() == title.upper():
+                    found.append(tlst)
+            if len(found) == 0:
+                raise self._nexus_error("Trees block with title '{}' not found".format(title), NexusReader.UndefinedBlockError)
+            elif len(found) > 1:
+                raise self._nexus_error("Multiple trees blocks with title '{}' defined".format(title), NexusReader.MultipleBlockWithSameTitleError)
+            return found[0]
+
+    ###########################################################################
+    ## Main Stream Parse Driver
+
+    def _parse_nexus_stream(self, stream):
+        "Main file parsing driver."
+        if self._nexus_tokenizer is None:
+            self.create_tokenizer(stream,
+                preserve_unquoted_underscores=self.preserve_underscores)
+        else:
+            self._nexus_tokenizer.set_stream(stream)
+        token = self._nexus_tokenizer.next_token()
+        if token.upper() != "#NEXUS":
+            raise self._nexus_error("Expecting '#NEXUS', but found '{}'".format(token),
+                    NexusReader.NotNexusFileError)
+        while not self._nexus_tokenizer.is_eof():
+            token = self._nexus_tokenizer.next_token_ucase()
+            while token != None and token != 'BEGIN' and not self._nexus_tokenizer.is_eof():
+                token = self._nexus_tokenizer.next_token_ucase()
+            self._nexus_tokenizer.process_and_clear_comments_for_item(
+                    self._global_annotations_target,
+                    self.extract_comment_metadata)
+            token = self._nexus_tokenizer.next_token_ucase()
+            if token == 'TAXA':
+                self._parse_taxa_block()
+            elif token == 'CHARACTERS' or token == 'DATA':
+                self._parse_characters_data_block()
+            elif token == 'TREES':
+                self._parse_trees_block()
+            elif token in ['SETS', 'ASSUMPTIONS', 'CODONS']:
+                if not self.exclude_chars:
+                    self._nexus_tokenizer.skip_to_semicolon() # move past BEGIN command
+                    link_title = None
+                    block_title = None
+                    while not (token == 'END' or token == 'ENDBLOCK') \
+                            and not self._nexus_tokenizer.is_eof() \
+                            and not token==None:
+                        token = self._nexus_tokenizer.next_token_ucase()
+                        if token == 'TITLE':
+                            block_title = self._parse_title_statement()
+                        elif token == "LINK":
+                            link_title = self._parse_link_statement().get('characters')
+                        elif token == 'CHARSET':
+                            self._parse_charset_statement(block_title=block_title, link_title=link_title)
+                        elif token == 'BEGIN':
+                            raise self._nexus_error("'BEGIN' found without completion of previous block",
+                                    NexusReader.IncompleteBlockError)
+                    self._nexus_tokenizer.skip_to_semicolon() # move past END command
+            elif token == 'BEGIN':
+                raise self._nexus_error("'BEGIN' found without completion of previous block",
+                        NexusReader.IncompleteBlockError)
+            else:
+                # unknown block
+                token = self._consume_to_end_of_block(token)
+
+    ###########################################################################
+    ## TAXA BLOCK
+
+    def _parse_taxa_block(self):
+        token = ''
+        self._nexus_tokenizer.allow_eof = False
+        self._nexus_tokenizer.skip_to_semicolon() # move past BEGIN statement
+        title = None
+        taxon_namespace = None
+        #while not (token == 'END' or token == 'ENDBLOCK') \
+        #    and not self._nexus_tokenizer.is_eof() \
+        #    and not token==None:
+        while not (token == 'END' or token == 'ENDBLOCK'):
+            token = self._nexus_tokenizer.next_token_ucase()
+            if token == "TITLE":
+                token = self._parse_title_statement()
+                taxon_namespace = self._new_taxon_namespace(token)
+            if token == 'DIMENSIONS':
+                self._parse_dimensions_statement()
+            if token == 'TAXLABELS':
+                if taxon_namespace is None:
+                    taxon_namespace = self._new_taxon_namespace()
+                self._nexus_tokenizer.process_and_clear_comments_for_item(
+                        self._global_annotations_target,
+                        self.extract_comment_metadata)
+                self._parse_taxlabels_statement(taxon_namespace)
+        self._nexus_tokenizer.skip_to_semicolon() # move past END statement
+        self._nexus_tokenizer.allow_eof = True
+
+    def _get_taxon(self, taxon_namespace, label):
+        if not self._file_specified_ntax or len(taxon_namespace) < self._file_specified_ntax:
+            taxon = taxon_namespace.require_taxon(label=label,
+                    is_case_sensitive=self.case_sensitive_taxon_labels)
+        else:
+            taxon = taxon_namespace.get_taxon(label=label,
+                    is_case_sensitive=self.case_sensitive_taxon_labels)
+        if taxon is None:
+            raise self._too_many_taxa_error(taxon_namespace=taxon_namespace, label=label)
+        return taxon
+
+    def _parse_taxlabels_statement(self, taxon_namespace=None):
+        """
+        Processes a TAXLABELS command. Assumes that the file reader is
+        positioned right after the "TAXLABELS" token in a TAXLABELS command.
+        """
+        if taxon_namespace is None:
+            taxon_namespace = self._get_taxon_namespace()
+        token = self._nexus_tokenizer.next_token()
+        while token != ';':
+            label = token
+            # if taxon_namespace.has_taxon(label=label):
+            #     pass
+            # elif len(taxon_namespace) >= self._file_specified_ntax and not self.attached_taxon_namespace:
+            #     raise self._too_many_taxa_error(taxon_namespace=taxon_namespace, label=label)
+            # else:
+            #     taxon_namespace.require_taxon(label=label)
+            taxon = taxon_namespace.get_taxon(label=label)
+            if taxon is None:
+                if len(taxon_namespace) >= self._file_specified_ntax and not self.attached_taxon_namespace and not self.unconstrained_taxa_accumulation_mode:
+                    raise self._too_many_taxa_error(taxon_namespace=taxon_namespace, label=label)
+                taxon = taxon_namespace.new_taxon(label=label)
+            token = self._nexus_tokenizer.next_token()
+            self._nexus_tokenizer.process_and_clear_comments_for_item(taxon,
+                    self.extract_comment_metadata)
+
+    ###########################################################################
+    ## LINK/TITLE PARSERS (How Mesquite handles multiple TAXA blocks)
+
+    def _parse_title_statement(self):
+        """
+        Processes a MESQUITE 'TITLE' statement.
+        Assumes current token is 'TITLE'
+        """
+        if self._nexus_tokenizer.cast_current_token_to_ucase() != "TITLE":
+            raise self._nexus_error("Expecting 'TITLE' token, but instead found '{}'".format(token))
+        title = self._nexus_tokenizer.require_next_token()
+        sc = self._nexus_tokenizer.require_next_token()
+        if sc != ";":
+            raise self._nexus_error("Expecting ';' token, but instead found '{}'".format(sc))
+        return title
+
+    def _parse_link_statement(self):
+        """
+        Processes a MESQUITE 'LINK' statement.
+        """
+        # TODO: this is now pretty ugly
+        # need to refactor with more abstraction
+        links = {}
+        token = self._nexus_tokenizer.next_token_ucase()
+        while token != ';':
+            if token == 'TAXA':
+                token = self._nexus_tokenizer.next_token()
+                if token != "=":
+                    raise self._nexus_error("expecting '=' after link taxa")
+                token = self._nexus_tokenizer.next_token()
+                links['taxa'] = token
+                token = self._nexus_tokenizer.next_token()
+            if token == 'CHARACTERS':
+                token = self._nexus_tokenizer.next_token()
+                if token != "=":
+                    raise self._nexus_error("expecting '=' after link characters")
+                token = self._nexus_tokenizer.next_token()
+                links['characters'] = token
+                token = self._nexus_tokenizer.next_token()
+        if token != ";":
+            self._nexus_tokenizer.skip_to_semicolon()
+        return links
+
+    ###########################################################################
+    ## CHARACTER/DATA BLOCK PARSERS AND SUPPORT
+
+    def _parse_characters_data_block(self):
+        token = self._nexus_tokenizer.cast_current_token_to_ucase()
+        if token != "CHARACTERS" and token != "DATA":
+            raise self._nexus_error("Expecting 'CHARACTERS' or 'DATA' token, but instead found '{}'".format(token))
+        if self.exclude_chars:
+            self._consume_to_end_of_block(self._nexus_tokenizer.current_token)
+            return
+        self._nexus_tokenizer.skip_to_semicolon() # move past BEGIN command
+        block_title = None
+        link_title = None
+        self._data_type = "standard" # set as default
+        while (token != 'END'
+                and token != 'ENDBLOCK'
+                and not self._nexus_tokenizer.is_eof()
+                and not token==None):
+            token = self._nexus_tokenizer.next_token_ucase()
+            if token == 'TITLE':
+                block_title = self._parse_title_statement()
+            elif token == "LINK":
+                link_title = self._parse_link_statement().get('taxa')
+            elif token == 'DIMENSIONS':
+                self._parse_dimensions_statement()
+            elif token == 'FORMAT':
+                self._parse_format_statement()
+            elif token == 'MATRIX':
+                self._parse_matrix_statement(block_title=block_title, link_title=link_title)
+            elif token == 'BEGIN':
+                raise self._nexus_error("'BEGIN' found without completion of previous block",
+                        NexusReader.IncompleteBlockError)
+            # token = self._nexus_tokenizer.cast_current_token_to_ucase()
+        self._nexus_tokenizer.skip_to_semicolon() # move past END command
+
+    def _build_state_alphabet(self, char_block, symbols):
+        if self._gap_char and self._gap_char in symbols:
+            symbols = [s for s in symbols if s != self._gap_char]
+        sa = self._new_state_alphabet(
+                fundamental_states=symbols,
+                no_data_symbol=self._missing_char,
+                gap_symbol=self._gap_char,
+                case_sensitive=False)
+        char_block.state_alphabets = [sa]
+        char_block.default_state_alphabet = char_block.state_alphabets[0]
+
+    def _parse_format_statement(self):
+        """
+        Processes a FORMAT command. Assumes that the file reader is
+        positioned right after the "FORMAT" token in a FORMAT command.
+        """
+        token = self._nexus_tokenizer.require_next_token_ucase()
+        while token != ';':
+            if token == 'DATATYPE':
+                token = self._nexus_tokenizer.require_next_token_ucase()
+                if token == '=':
+                    token = self._nexus_tokenizer.require_next_token_ucase()
+                    if token == "DNA" or token == "NUCLEOTIDES":
+                        self._data_type = "dna"
+                    elif token == "RNA":
+                        self._data_type = "rna"
+                    elif token == "NUCLEOTIDE":
+                        self._data_type = "nucleotide"
+                    elif token == "PROTEIN":
+                        self._data_type = "protein"
+                    elif token == "CONTINUOUS":
+                        self._data_type = "continuous"
+                    else:
+                        # defaults to STANDARD elif token == "STANDARD":
+                        self._data_type = "standard"
+                        self._symbols = "01"
+                else:
+                    raise self._nexus_error("Expecting '=' after DATATYPE keyword")
+                token = self._nexus_tokenizer.require_next_token_ucase()
+            elif token == 'SYMBOLS':
+                token = self._nexus_tokenizer.require_next_token_ucase()
+                if token == '=':
+                    token = self._nexus_tokenizer.require_next_token_ucase()
+                    if token == '"':
+                        self._symbols = ""
+                        token = self._nexus_tokenizer.require_next_token_ucase()
+                        while token != '"':
+                            if token not in self._symbols:
+                                self._symbols = self._symbols + token
+                            token = self._nexus_tokenizer.require_next_token_ucase()
+                    else:
+                        raise self._nexus_error("Expecting '\"' before beginning SYMBOLS list")
+                else:
+                    raise self._nexus_error("Expecting '=' after SYMBOLS keyword")
+                token = self._nexus_tokenizer.require_next_token_ucase()
+            elif token == 'GAP':
+                token = self._nexus_tokenizer.require_next_token_ucase()
+                if token == '=':
+                    token = self._nexus_tokenizer.require_next_token_ucase()
+                    self._gap_char = token
+                else:
+                    raise self._nexus_error("Expecting '=' after GAP keyword")
+                token = self._nexus_tokenizer.require_next_token_ucase()
+            elif token == 'INTERLEAVE':
+                token = self._nexus_tokenizer.require_next_token_ucase()
+                if token == '=':
+                    token = self._nexus_tokenizer.require_next_token_ucase()
+                    if token.startswith("N"):
+                        self._interleave = False
+                    else:
+                        self._interleave = True
+                    token = self._nexus_tokenizer.require_next_token_ucase()
+                else:
+                    self._interleave = True
+            elif token == 'MISSING':
+                token = self._nexus_tokenizer.require_next_token_ucase()
+                if token == '=':
+                    token = self._nexus_tokenizer.require_next_token_ucase()
+                    self._missing_char = token
+                else:
+                    raise self._nexus_error("Expecting '=' after MISSING keyword")
+                token = self._nexus_tokenizer.require_next_token_ucase()
+            elif token == 'MATCHCHAR':
+                token = self._nexus_tokenizer.require_next_token_ucase()
+                if token == '=':
+                    token = self._nexus_tokenizer.require_next_token_ucase()
+                    self._match_char = frozenset([token, token.lower()])
+                else:
+                    raise self._nexus_error("Expecting '=' after MISSING keyword")
+                token = self._nexus_tokenizer.require_next_token_ucase()
+            elif token == 'BEGIN':
+                raise self._nexus_error("'BEGIN' found without completion of previous block",
+                        NexusReader.IncompleteBlockError)
+            else:
+                token = self._nexus_tokenizer.require_next_token_ucase()
+
+    def _parse_dimensions_statement(self):
+        """
+        Processes a DIMENSIONS command. Assumes that the file reader is
+        positioned right after the "DIMENSIONS" token in a DIMENSIONS command.
+        """
+        token = self._nexus_tokenizer.require_next_token_ucase()
+        while token != ';':
+            if token == 'NTAX':
+                token = self._nexus_tokenizer.require_next_token_ucase()
+                if token == '=':
+                    token = self._nexus_tokenizer.require_next_token_ucase()
+                    if token.isdigit():
+                        self._file_specified_ntax = int(token)
+                    else:
+                        raise self._nexus_error('Expecting numeric value for NTAX')
+                else:
+                    raise self._nexus_error("Expecting '=' after NTAX keyword")
+            elif token == 'NCHAR':
+                token = self._nexus_tokenizer.require_next_token_ucase()
+                if token == '=':
+                    token = self._nexus_tokenizer.require_next_token_ucase()
+                    if token.isdigit():
+                        self._file_specified_nchar = int(token)
+                    else:
+                        raise self._nexus_error("Expecting numeric value for NCHAR")
+                else:
+                    raise self._nexus_error("Expecting '=' after NCHAR keyword")
+            elif token == 'BEGIN':
+                raise self._nexus_error("'BEGIN' found without completion of previous block",
+                        NexusReader.IncompleteBlockError)
+            token = self._nexus_tokenizer.require_next_token_ucase()
+
+    def _parse_matrix_statement(self, block_title=None, link_title=None):
+        """
+        Processes a MATRIX command. Assumes that the file reader
+        is positioned right after the "MATRIX" token in a MATRIX command,
+        and that NTAX and NCHAR have been specified accurately.
+        """
+        if not self._file_specified_ntax:
+            raise self._nexus_error('NTAX must be defined by DIMENSIONS command to non-zero value before MATRIX command')
+        elif not self._file_specified_nchar:
+            raise self._nexus_error('NCHAR must be defined by DIMENSIONS command to non-zero value before MATRIX command')
+        taxon_namespace = self._get_taxon_namespace(link_title)
+        char_block = self._new_char_matrix(
+                self._data_type,
+                taxon_namespace=taxon_namespace,
+                title=block_title)
+        if self._data_type == "continuous":
+            self._process_continuous_matrix_data(char_block)
+        else:
+            self._process_discrete_matrix_data(char_block)
+
+    def _process_continuous_matrix_data(self, char_block):
+        taxon_namespace = char_block.taxon_namespace
+        token = self._nexus_tokenizer.next_token()
+        first_sequence_defined = None
+        if self._interleave:
+            try:
+                while token != ";" and not self._nexus_tokenizer.is_eof():
+                    taxon = self._get_taxon(taxon_namespace=taxon_namespace, label=token)
+                    self._read_continuous_character_values(char_block[taxon])
+                    # if first_sequence_defined is None:
+                    #     first_sequence_defined = char_block[taxon]
+                    token = self._nexus_tokenizer.next_token()
+            except NexusReader.BlockTerminatedException:
+                token = self._nexus_tokenizer.next_token()
+        else:
+            while token != ';' and not self._nexus_tokenizer.is_eof():
+                taxon = self._get_taxon(taxon_namespace=taxon_namespace, label=token)
+                self._read_continuous_character_values(char_block[taxon])
+                # if first_sequence_defined is None:
+                #     first_sequence_defined = char_block[taxon]
+                if len(char_block[taxon]) < self._file_specified_nchar:
+                    raise self._nexus_error("Insufficient characters given for taxon '{}': expecting {} but only found {} ('{}')".format(taxon.label, self._file_specified_nchar, len(char_block[taxon]), char_block[taxon].symbols_as_string()))
+                token = self._nexus_tokenizer.next_token()
+        # if self._interleave:
+        #     raise NotImplementedError("Continuous interleaved characters in NEXUS schema not yet supported")
+        # taxon_namespace = char_block.taxon_namespace
+        # token = self._nexus_tokenizer.next_token()
+        # while token != ';' and not self._nexus_tokenizer.is_eof():
+        #     taxon = self._get_taxon(taxon_namespace=taxon_namespace, label=token)
+        #     while len(char_block[taxon]) < self._file_specified_nchar and not self._nexus_tokenizer.is_eof():
+        #         # char_group = self._nexus_tokenizer.next_token(ignore_punctuation="-+")
+        #         char_group = self._nexus_tokenizer.next_token()
+        #         char_block[taxon].append(dataobject.CharacterDataCell(value=float(char_group)))
+        #     if len(char_block[taxon]) < self._file_specified_nchar:
+        #         raise self._nexus_error("Insufficient characters given for taxon '%s': expecting %d but only found %d ('%s')" \
+        #             % (taxon.label, self._file_specified_nchar, len(char_block[taxon]), char_block[taxon].symbols_as_string()))
+        #     token = self._nexus_tokenizer.next_token()
+
+    def _process_discrete_matrix_data(self, char_block):
+        if self._data_type == "standard":
+            self._build_state_alphabet(char_block, self._symbols)
+        taxon_namespace = char_block.taxon_namespace
+        token = self._nexus_tokenizer.next_token()
+        state_alphabet = char_block.default_state_alphabet
+        first_sequence_defined = None
+        if self._interleave:
+            try:
+                while token != ";" and not self._nexus_tokenizer.is_eof():
+                    taxon = self._get_taxon(taxon_namespace=taxon_namespace, label=token)
+                    self._read_character_states(char_block[taxon], state_alphabet, first_sequence_defined)
+                    if first_sequence_defined is None:
+                        first_sequence_defined = char_block[taxon]
+                    token = self._nexus_tokenizer.next_token()
+            except NexusReader.BlockTerminatedException:
+                token = self._nexus_tokenizer.next_token()
+        else:
+            while token != ';' and not self._nexus_tokenizer.is_eof():
+                taxon = self._get_taxon(taxon_namespace=taxon_namespace, label=token)
+                self._read_character_states(char_block[taxon], state_alphabet, first_sequence_defined)
+                if first_sequence_defined is None:
+                    first_sequence_defined = char_block[taxon]
+                if len(char_block[taxon]) < self._file_specified_nchar:
+                    raise self._nexus_error("Insufficient characters given for taxon '%s': expecting %d but only found %d ('%s')" \
+                        % (taxon.label, self._file_specified_nchar, len(char_block[taxon]), char_block[taxon].symbols_as_string()))
+                token = self._nexus_tokenizer.next_token()
+
+    def _get_state_for_multistate_tokens(self,
+            state_char_seq,
+            multistate_type,
+            state_alphabet):
+        try:
+            state = state_alphabet.match_state(state_char_seq,
+                    state_denomination=multistate_type)
+        except KeyError:
+            try:
+                if multistate_type == state_alphabet.AMBIGUOUS_STATE:
+                    sae = state_alphabet.new_ambiguous_state(
+                            symbol=None,
+                            member_state_symbols=state_char_seq)
+                else:
+                    sae = state_alphabet.new_polymorphic_state(
+                            symbol=None,
+                            member_state_symbols=state_char_seq)
+            except KeyError:
+                raise self._nexus_error("Unrecognized state symbols encountered in multistate sequence: '{}'".format(state_char_seq))
+            else:
+                return sae
+        else:
+            return state
+
+    ###########################################################################
+    ## TREE / TREE BLOCK PARSERS
+
+    def _parse_tree_statement(self, tree_factory, taxon_symbol_mapper):
+        """
+        Processes a TREE command. Assumes that the file reader is
+        positioned right after the "TREE" token in a TREE command.
+        Calls on the NewickStatementParser of the trees module.
+        """
+        token = self._nexus_tokenizer.next_token()
+        if token == '*':
+            token = self._nexus_tokenizer.next_token()
+        tree_name = token
+        token = self._nexus_tokenizer.next_token()
+        pre_tree_comments = self._nexus_tokenizer.pull_captured_comments()
+        if token != '=':
+            raise self._nexus_error("Expecting '=' in definition of Tree '%s' but found '%s'" % (tree_name, token))
+        tree_comments = self._nexus_tokenizer.pull_captured_comments()
+        # advance to '('; comments will be processed by newick reader
+        self._nexus_tokenizer.next_token()
+        tree = self._build_tree_from_newick_tree_string(tree_factory, taxon_symbol_mapper)
+        tree.label = tree_name
+        nexusprocessing.process_comments_for_item(tree, pre_tree_comments, self.extract_comment_metadata)
+        nexusprocessing.process_comments_for_item(tree, tree_comments, self.extract_comment_metadata)
+        # if self.extract_comment_metadata:
+        #     annotations = nexustokenizer.parse_comment_metadata(tree_comments)
+        #     for annote in annotations:
+        #         tree.annotations.add(annote)
+        #     if pre_tree_metadata_comments:
+        #         pre_tree_annotations = nexustokenizer.parse_comment_metadata(pre_tree_metadata_comments)
+        #         for annote in pre_annotations:
+        #             tree.annotations.add(annote)
+        # if tree_comments is not None and len(tree_comments) > 0:
+        #     tree.comments.extend(tree_comments)
+        # if self._nexus_tokenizer.current_token != ';':
+        #     self._nexus_tokenizer.skip_to_semicolon()
+        return tree
+
+    def _build_tree_from_newick_tree_string(self, tree_factory, taxon_symbol_mapper):
+        tree = self.newick_reader._parse_tree_statement(
+                nexus_tokenizer=self._nexus_tokenizer,
+                tree_factory=tree_factory,
+                taxon_symbol_map_fn=taxon_symbol_mapper.require_taxon_for_symbol)
+        return tree
+
+    def _parse_translate_statement(self, taxon_namespace, taxon_symbol_mapper=None):
+        """
+        Processes a TRANSLATE command. Assumes that the file reader is
+        positioned right after the "TRANSLATE" token in a TRANSLATE command.
+        """
+        token = self._nexus_tokenizer.current_token
+        if taxon_symbol_mapper is None:
+            taxon_symbol_mapper = self._get_taxon_symbol_mapper(taxon_namespace=taxon_namespace)
+        else:
+            assert taxon_symbol_mapper.taxon_namespace is taxon_namespace
+        if self._file_specified_ntax is None:
+            # Not yet parsed TAXA block: NEXUS file without TAXA block
+            # Badly-formed NEXUS file, yet widely-found in the wild
+            # Override namespace modification lock
+            taxon_namespace.is_mutable = True
+        while True:
+            translation_token = self._nexus_tokenizer.next_token()
+            if translation_token == ";" and not self._nexus_tokenizer.is_token_quoted:
+                raise self._nexus_error("Expecting translation token but found ';' instead")
+            translation_label = self._nexus_tokenizer.next_token()
+            try:
+                taxon = taxon_namespace.require_taxon(label=translation_label)
+            except error.ImmutableTaxonNamespaceError:
+                exc = self._undefined_taxon_error(taxon_namespace=taxon_namespace, label=translation_label)
+                exc.__context__ = None # Python 3.0, 3.1, 3.2
+                exc.__cause__ = None # Python 3.3, 3.4
+                raise exc
+            taxon_symbol_mapper.add_translate_token(translation_token, taxon)
+            token = self._nexus_tokenizer.next_token() # ","
+            if (not token) or (token == ';'):
+                break
+            if token != ',':
+                raise self._nexus_error("Expecting ',' in TRANSLATE statement after definition for %s = '%s', but found '%s' instead." % (translation_token, translation_label, token))
+        return taxon_symbol_mapper
+
+    def _parse_trees_block(self):
+        """
+        Expectations:
+            - current token: "TREES" [part of "BEGIN TREES"]
+        """
+        token = self._nexus_tokenizer.cast_current_token_to_ucase()
+        if token != "TREES":
+            raise self._nexus_error("Expecting 'TREES' token, but instead found '{}'".format(token))
+        if self.exclude_trees:
+            self._consume_to_end_of_block(self._nexus_tokenizer.current_token)
+            return
+        self._nexus_tokenizer.skip_to_semicolon() # move past "BEGIN TREES" command
+        link_title = None
+        taxon_namespace = None
+        taxon_symbol_mapper = None
+        trees_block = None
+        block_title = None
+        # while ((not self._nexus_tokenizer.is_eof())
+        #         and self._nexus_tokenizer.current_token is not None
+        #         and self._nexus_tokenixer.current_token != 'END'
+        #         and self._nexus_tokenixer.current_token != 'ENDBLOCK'):
+        while ((not self._nexus_tokenizer.is_eof())
+                and token is not None
+                and token != 'END'
+                and token != 'ENDBLOCK'):
+            token = self._nexus_tokenizer.next_token_ucase()
+            if token == 'LINK':
+                link_title = self._parse_link_statement().get("taxa")
+            elif token == 'TITLE':
+                block_title = self._parse_title_statement()
+                token = "" # clear; repopulate at start of loop
+            elif token == 'TRANSLATE':
+                if taxon_namespace is None:
+                    taxon_namespace = self._get_taxon_namespace(link_title)
+                taxon_symbol_mapper = self._parse_translate_statement(taxon_namespace)
+                token = "" # clear; repopulate at start of loop
+            elif token == 'TREE':
+                if taxon_namespace is None:
+                    taxon_namespace = self._get_taxon_namespace(link_title)
+                if taxon_symbol_mapper is None:
+                    taxon_symbol_mapper = self._get_taxon_symbol_mapper(taxon_namespace=taxon_namespace)
+                pre_tree_comments = self._nexus_tokenizer.pull_captured_comments()
+                if trees_block is None:
+                    trees_block = self._new_tree_list(taxon_namespace=taxon_namespace, title=block_title)
+                # All comments leading up to the first 'TREE' statement assumed
+                # to belong to the TreeList corresponding to the TREES block
+                nexusprocessing.process_comments_for_item(
+                        trees_block,
+                        pre_tree_comments,
+                        self.extract_comment_metadata)
+                tree_factory = trees_block.new_tree
+                while True:
+                    ## After the following, the current token
+                    ## will be the token immediately following
+                    ## the terminating semi-colon of a tree
+                    ## statement. Typically, this will be
+                    ## 'TREE' if there is another tree, or
+                    ## 'END'/'ENDBLOCK'.
+                    tree = self._parse_tree_statement(
+                            tree_factory=tree_factory,
+                            taxon_symbol_mapper=taxon_symbol_mapper)
+                    if self._nexus_tokenizer.is_eof() or not self._nexus_tokenizer.current_token:
+                        break
+                    if self._nexus_tokenizer.cast_current_token_to_ucase() != "TREE":
+                        token = self._nexus_tokenizer.current_token
+                        break
+            elif token == 'BEGIN':
+                raise self._nexus_error("'BEGIN' found without completion of previous block",
+                        NexusReader.IncompleteBlockError)
+        self._nexus_tokenizer.skip_to_semicolon() # move past END command
+
+    def _parse_charset_statement(self, block_title=None, link_title=None):
+        """
+        Parses a character set description. Assumes token stream is positioned right after 'charset' command.
+        """
+        char_matrix = self._get_char_matrix(title=link_title)
+        keyword = self._nexus_tokenizer.current_token
+        token = self._nexus_tokenizer.next_token()
+        if self._nexus_tokenizer.is_eof() or not token:
+            raise self._nexus_error('Unexpected end of file or null token')
+        else:
+            if not token:
+                raise self._nexus_error("Unexpected end of file or null token")
+            else:
+                charset_name = token
+                token = self._nexus_tokenizer.next_token()
+                if not token:
+                    raise self._nexus_error("Unexpected end of file or null token")
+                elif token != '=':
+                    raise self._nexus_error('Expecting "=" after character set name "%s", but instead found "%s"' % (charset_name, token))
+                else:
+                    positions = self._parse_positions(adjust_to_zero_based=True)
+                char_matrix.new_character_subset(charset_name, positions)
+
+    def _parse_positions(self, adjust_to_zero_based=True, verify=True):
+        """
+        Parses a character position list. Expects next character read to be the first item in a position list.
+        """
+        positions = []
+        # hyphens_as_tokens = self._nexus_tokenizer.hyphens_as_tokens
+        # self._nexus_tokenizer.hyphens_as_tokens = True
+        self._nexus_tokenizer.set_hyphens_as_captured_delimiters(True)
+        token = self._nexus_tokenizer.next_token()
+        max_positions = self._file_specified_nchar
+
+        if self._nexus_tokenizer.is_eof() or not token:
+            raise self._nexus_error('Unexpected end of file or null token')
+
+        while token != ';' and token != ',' and not self._nexus_tokenizer.is_eof():
+            if not token:
+                break
+            if token.upper() == 'ALL':
+                positions = range(1, max_positions + 1)
+                break
+            elif token.isdigit():
+                start = int(token)
+                token = self._nexus_tokenizer.next_token()
+                if token:
+                    if token == ',' or token.isdigit() or token == ';':
+                        positions.append(start)
+                    elif token == '-':
+                        token = self._nexus_tokenizer.next_token()
+                        if token:
+                            if token.isdigit() or token == '.':
+                                if token == '.':
+                                    end = max_positions
+                                    #token = self._nexus_tokenizer.next_token()
+                                else:
+                                    end = int(token)
+                                    #token = self._nexus_tokenizer.next_token()
+                                token = self._nexus_tokenizer.next_token()
+                                if token:
+                                    if token == '\\' or token == '/': # (NEXUS standard only accepts '\')
+                                        token = self._nexus_tokenizer.next_token()
+                                        if token:
+                                            if token.isdigit():
+                                                step = int(token)
+                                                #token = self._nexus_tokenizer.next_token()
+                                            else:
+                                                raise self._nexus_error('Expecting digit but found "%s".' % (token))
+                                        else:
+                                            raise self._nexus_error('Expecting other tokens after "\\", but no more found.')
+                                        token = self._nexus_tokenizer.next_token()
+                                    else:
+                                        step = 1
+                                else:
+                                    step = 1
+                                for q in range(start, end+1, step):
+                                    if q <= max_positions:
+                                        positions.append(q)
+                            else:
+                                raise self._nexus_error('Expecting digit or ".", but found "%s".' % (token))
+                        else:
+                            raise self._nexus_error('Expecting other tokens after "-", but no more found.')
+                    else:
+                        raise self._nexus_error('Expecting digit or "all", but found "%s".' % (token))
+                else:
+                    positions.append(start)
+        self._nexus_tokenizer.set_hyphens_as_captured_delimiters(False)
+        positions = list(set(positions))
+        positions.sort()
+        if verify:
+            for position in positions:
+                if position > max_positions:
+                    raise self._nexus_error("Specified position %d, but maximum position is %d" % (position, max_positions))
+        if adjust_to_zero_based:
+            positions = [position - 1 for position in positions]
+        return positions # make unique and return
+
+    def _consume_to_end_of_block(self, token=None):
+        if token:
+            token = token.upper()
+        else:
+            token = "DUMMY"
+        while not (token == 'END' or token == 'ENDBLOCK') \
+            and not self._nexus_tokenizer.is_eof() \
+            and not token==None:
+            self._nexus_tokenizer.skip_to_semicolon()
+            token = self._nexus_tokenizer.next_token_ucase()
+        return token
+
+    def _read_character_states(self,
+            character_data_vector,
+            state_alphabet,
+            first_sequence_defined,
+            ):
+        """
+        Reads character sequence data substatement until the number of
+        character states read is equal to ``self._file_specified_nchar`` (with
+        multi-state characters, such as '(AG)' counting as a single
+        state) or, if ``self._interleave`` is `True`, until an EOL is
+        reached.
+
+        Given a sequence of characters, with ambiguities denoted by
+        `{<STATES>}`, this returns a list of state alphabet elements.
+
+        For example, the following sequence:
+
+            "ACTG(AC)GGT(CGG)(CG)GG"
+
+        will result in a list such as:
+
+            [<A>, <C>, <T>, <G>, <AC>, <G>, <G>, <T>, <CGG>, <CG>, <G>, <G>]
+
+        where `<.>` is a StateIdentity object with the characters within the
+        brackets as symbol(s).
+
+        """
+        if self._interleave:
+            self._nexus_tokenizer.set_capture_eol(True)
+        states_to_add = []
+        while len(character_data_vector) + len(states_to_add) < self._file_specified_nchar:
+            token = self._nexus_tokenizer.require_next_token()
+            if token == "{" or token == "(":
+                if token == "{":
+                    # multistate_type = dataobject.StateIdentity.AMBIGUOUS_STATE
+                    multistate_type = state_alphabet.AMBIGUOUS_STATE
+                    closing_token = "}"
+                else:
+                    # multistate_type = dataobject.StateIdentity.POLYMORPHIC_STATE
+                    multistate_type = state_alphabet.POLYMORPHIC_STATE
+                    closing_token = ")"
+                multistate_tokens = []
+                while True:
+                    token = self._nexus_tokenizer.require_next_token()
+                    if token == closing_token:
+                        break
+                    multistate_tokens.append(token)
+                c = "".join(multistate_tokens)
+                state = self._get_state_for_multistate_tokens(c, multistate_type, state_alphabet)
+                if len(character_data_vector) + len(states_to_add) == self._file_specified_nchar:
+                    raise self._too_many_characters_error(c)
+                states_to_add.append(state)
+            elif token == "\r" or token == "\n":
+                if self._interleave:
+                    break
+            elif token == ";":
+                raise NexusReader.BlockTerminatedException
+            else:
+                for c in token:
+                    if c in self._match_char:
+                        try:
+                            state = first_sequence_defined[len(character_data_vector) + len(states_to_add)]
+                        except TypeError:
+                            exc = self._nexus_error("Cannot dereference MATCHCHAR '{}' on first sequence".format(c), NexusReader.NexusReaderError)
+                            exc.__context__ = None # Python 3.0, 3.1, 3.2
+                            exc.__cause__ = None # Python 3.3, 3.4
+                            raise exc
+                        except IndexError:
+                            exc = self._nexus_error("Cannot dereference MATCHCHAR '{}': current position ({}) exceeds length of first sequence ({})".format(c,
+                                    len(character_data_vector) + len(states_to_add) + 1,
+                                    len(first_sequence_defined),
+                                    NexusReader.NexusReaderError))
+                            exc.__context__ = None # Python 3.0, 3.1, 3.2
+                            exc.__cause__ = None # Python 3.3, 3.4
+                            raise exc
+                    else:
+                        try:
+                            state = state_alphabet.full_symbol_state_map[c]
+                        except KeyError:
+                            exc = self._nexus_error("Unrecognized character state symbol for state alphabet '{}' ({}) : '{}'".format(
+                                        state_alphabet.label,
+                                        state_alphabet.__class__.__name__,
+                                        c),
+                                        NexusReader.InvalidCharacterStateSymbolError)
+                            exc.__context__ = None # Python 3.0, 3.1, 3.2
+                            exc.__cause__ = None # Python 3.3, 3.4
+                            raise exc
+                    if len(character_data_vector) + len(states_to_add) == self._file_specified_nchar:
+                        raise self._too_many_characters_error(c)
+                    states_to_add.append(state)
+        if self._interleave:
+            self._nexus_tokenizer.set_capture_eol(False)
+        character_data_vector.extend(states_to_add)
+        return character_data_vector
+
+    def _read_continuous_character_values(self,
+            character_data_vector,
+            datatype=float,
+            ):
+        """
+        Reads character sequence data substatement until the number of
+        character states read is equal to ``self._file_specified_nchar`` (with
+        multi-state characters, such as '(AG)' counting as a single
+        state) or, if ``self._interleave`` is `True`, until an EOL is
+        reached.
+        """
+        if self._interleave:
+            self._nexus_tokenizer.set_capture_eol(True)
+        while len(character_data_vector) < self._file_specified_nchar:
+            token = self._nexus_tokenizer.require_next_token()
+            if token == "\r" or token == "\n":
+                if self._interleave:
+                    break
+            elif token == ";":
+                raise NexusReader.BlockTerminatedException
+            else:
+                try:
+                    state = float(token)
+                except ValueError:
+                    exc = self._nexus_error("Invalid value for continuous character type: '{invalid_value}'".format(datatype=datatype, invalid_value=token),
+                                NexusReader.InvalidContinuousCharacterValueError)
+                    exc.__context__ = None # Python 3.0, 3.1, 3.2
+                    exc.__cause__ = None # Python 3.3, 3.4
+                    raise exc
+                    # if c in self._match_char:
+                    #     try:
+                    #         state = first_sequence_defined[len(character_data_vector)]
+                    #     except TypeError:
+                    #         exc = self._nexus_error("Cannot dereference MATCHCHAR '{}' on first sequence".format(c), NexusReader.NexusReaderError)
+                    #         exc.__context__ = None # Python 3.0, 3.1, 3.2
+                    #         exc.__cause__ = None # Python 3.3, 3.4
+                    #         raise exc
+                    #     except IndexError:
+                    #         exc = self._nexus_error("Cannot dereference MATCHCHAR '{}': current position ({}) exceeds length of first sequence ({})".format(c,
+                    #                 len(character_data_vector)+1,
+                    #                 len(first_sequence_defined),
+                    #                 NexusReader.NexusReaderError))
+                    #         exc.__context__ = None # Python 3.0, 3.1, 3.2
+                    #         exc.__cause__ = None # Python 3.3, 3.4
+                    #         raise exc
+                    # else:
+                    #     try:
+                    #         state = state_alphabet.full_symbol_state_map[c]
+                    #     except KeyError:
+                    #         exc = self._nexus_error("Unrecognized character state symbol for state alphabet '{}' ({}) : '{}'".format(
+                    #                     state_alphabet.label,
+                    #                     state_alphabet.__class__.__name__,
+                    #                     c),
+                    #                     NexusReader.InvalidCharacterStateSymbolError)
+                    #         exc.__context__ = None # Python 3.0, 3.1, 3.2
+                    #         exc.__cause__ = None # Python 3.3, 3.4
+                    #         raise exc
+                if len(character_data_vector) == self._file_specified_nchar:
+                    raise self._too_many_characters_error(c)
+                character_data_vector.append(state)
+        if self._interleave:
+            self._nexus_tokenizer.set_capture_eol(False)
+        return character_data_vector
+
diff --git a/dendropy/dataio/nexuswriter.py b/dendropy/dataio/nexuswriter.py
new file mode 100644
index 0000000..618bc28
--- /dev/null
+++ b/dendropy/dataio/nexuswriter.py
@@ -0,0 +1,536 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Writing data in NEXUS format to an output stream.
+"""
+
+import re
+try:
+    from StringIO import StringIO # Python 2 legacy support: StringIO in this module is the one needed (not io)
+except ImportError:
+    from io import StringIO # Python 3
+import warnings
+import collections
+from dendropy.dataio import ioservice
+from dendropy.dataio import newick
+from dendropy.dataio import nexusprocessing
+from dendropy.dataio import newickwriter
+
+###############################################################################
+## NexusWriter
+
+class NexusWriter(ioservice.DataWriter):
+    """
+    Formatter for NEXUS data.
+    """
+
+    def __init__(self, **kwargs):
+        """
+
+        Keyword Arguments
+        -----------------
+        simple : boolean, default: `False`
+            If `True`, write in simple NEXUS format, i.e. in a single "DATA"
+            block, instead of separate "TAXA" and "CHARACTER" blocks.
+        suppress_taxa_blocks: boolean, default: `False`
+            If `True`, do not write a "TAXA" block. Note that this may make the
+            file impossible to parse if there are multiple taxon namespaces in
+            the data.
+        suppress_unreferenced_taxon_namespaces: boolean, default: `False`
+            If `True`, then when writing |DataSet| objects, any
+            |TaxonNamespace| object in the DataSet's ``taxon_namespaces``
+            collection will *not* be written as a "TAXA" block if it is not
+            referenced by any character matrix (``char_matrices``) or tree list
+            (``tree_lists``).
+        suppress_block_titles : bool or `None`
+            If `True` then 'TITLE' element to blocks will not be written. Note
+            that this may make the file impossible to parse if there are
+            multiple taxon namespaces in the data. If `False`, then the
+            'TITLE' element will always be written. Default is `None`: the
+            'TITLE' element will only be written if needed because there is
+            more than on taxon namespace in the data.
+        file_comments: iterable [``str``]
+            List of lines of text to be added as comments to the file.
+        preamble_blocks: iterable [``str``]
+            List of strings to be written before data (e.g., PAUP blocks
+            suppressing warnings etc.).
+        supplemental_blocks: iterable [``str``]
+            List of strings to be written after data (e.g., PAUP blocks,
+            MrBayes blocks etc.).
+        allow_multiline_comments : bool
+            If `False` then comments will be merged into a single string before
+            being written. Default is `True`: each comment element will be
+            written on its own line.
+        continuous_character_state_value_format_fn : function object
+            When writing |ContinuousCharacterMatrix| data: a function that
+            takes a continuous character value and returns the string
+            representation of it.
+        discrete_character_state_value_format_fn : function object
+            When writing discrete character data (e.g., a
+            |StandardCharacterMatrix|): a function that takes a
+            standard character state value (i.e., a |StateIdentity| instance)
+            and returns the string representation of it.
+        suppress_leaf_taxon_labels : boolean, default: `False`
+            If `True`, then taxon labels will not be rendered for leaves.
+            Default is `False`: render leaf taxon labels. See notes below for
+            details.
+        suppress_leaf_node_labels : boolean, default: `True`
+            If `False`, then node labels (if available) will be printed for
+            leaves. Defaults to `True`: do not render leaf node labels. See
+            notes below for details.
+        suppress_internal_taxon_labels : boolean, default: `False`
+            If `True`, then taxon labels will not be printed for internal
+            nodes. Default is `False`: print taxon labels for internal nodes.
+            See notes below for details.
+        suppress_internal_node_labels : boolean, default: `False`
+            If `True`, then node labels will not be printed for internal nodes.
+            Default is `False`: print node labels for internal nodes. See notes
+            below for details.
+        suppress_rooting : boolean, default: `False`
+            If `True`, will not write rooting token ('[&R]' or '[&U]').
+            Default is `False`: rooting token will be written.
+        suppress_edge_lengths : boolean, default: `False`
+            If `True`, will not write edge lengths. Default is `False`: edge
+            lengths will be written.
+        unquoted_underscores : boolean, default: `False`
+            If `True`, labels with underscores will not be quoted, which will
+            mean that they will be interpreted as spaces if read again ("soft"
+            underscores).  If `False`, then labels with underscores
+            will be quoted, resulting in "hard" underscores.  Default is
+            `False`.
+        preserve_spaces : boolean, default: `False`
+            If `True`, spaces will not be replaced with underscores in labels
+            (which means any labels containing spaces will have to be quoted).
+            Default is `False`: spaces will be converted to underscores.
+            False.
+        store_tree_weights : boolean, default: `False`
+            If `True`, tree weights are written. Default is `False`: tree
+            weights will not be written.
+        translate_tree_taxa : boolean or dict or `None`, default: `None`.
+            If `False` or `None`, then a "TRANSLATE" statement will not be
+            used, and tree statements will contain the full taxon labels. If
+            not `False` or `None`, a "TRANSLATE" statement will be written and
+            referenced in tree statements (instead of using the taxon labels).
+            If `True`, then a default translate statement will be used, with
+            tokens given by the taxon indexes. If a dictionary is given, then
+            the keys should be |Taxon| objects and the values should be the
+            token (strings).
+        suppress_annotations : boolean, default: `False`
+            If `True`, metadata annotations will be ignored.
+            Defaults to `False`: metadata annotations will be written.
+        annotations_as_nhx : boolean, default: `False`
+            If `True`, and if ``suppress_annotations`` is `False`, will write
+            annotations as NHX statements. Default is `False`: annotations
+            will not be written as NHX statements.
+        suppress_item_comments : boolean, default: `False`
+            If `True`: comments will be ignored. Default is `False`: any
+            additional comments associated with trees, nodes, edges, etc. will
+            be written.
+        node_label_element_separator : string, default: ' '
+            If both ``suppress_leaf_taxon_labels`` and
+            ``suppress_leaf_node_labels`` are `False`, then this will be the
+            string used to join them. Defaults to ' ' (space).
+        node_label_compose_fn : function object or `None`, default: `None`
+            If not `None`, should be a function that takes a |Node|
+            object as an argument and returns the string to be used to
+            represent the node in the tree statement. The return value from
+            this function is used unconditionally to print a node
+            representation in a tree statement, by-passing the default
+            labelling function, ignoring ``suppress_leaf_taxon_labels``,
+            ``suppress_leaf_node_labels=True``, ``suppress_internal_taxon_labels``,
+            ``suppress_internal_node_labels``, etc. Defaults to `None`.
+        edge_label_compose_fn : function object or `None`, default: `None`
+            If not `None`, should be a function that takes an Edge object as
+            an argument, and returns the string to be used to represent the
+            edge length in the tree statement.
+        real_value_format_specifier : string, default: ''
+            Format specification for real/float values. Will be applied to edge
+            lengths (if ``edge_label_compose_fn`` is not given) as well as
+            annotations. The format specifier should be given in Python's
+            string format specification mini-language. E.g. ".8f", ".4E",
+            "8.4f".
+        ignore_unrecognized_keyword_arguments : boolean, default: `False`
+            If `True`, then unsupported or unrecognized keyword arguments will
+            not result in an error. Default is `False`: unsupported keyword
+            arguments will result in an error.
+
+        """
+        # base
+        ioservice.DataWriter.__init__(self)
+
+        # Following are NEXUS specific (i.e., not used by NEWICK formatters),
+        # and need to be removed so as not to cause problems with our keyword
+        # validation scheme
+        self.simple = kwargs.pop("simple", False)
+        self.suppress_taxa_blocks = kwargs.pop("suppress_taxa_block", None)
+        self.suppress_block_titles = kwargs.pop("suppress_block_titles", None)
+        self.file_comments = kwargs.pop("file_comments", [])
+        if self.file_comments is None:
+            self.file_comments = []
+        self.preamble_blocks = kwargs.pop("preamble_blocks", [])
+        if self.preamble_blocks is None:
+            self.preamble_blocks = []
+        self.supplemental_blocks = kwargs.pop("supplemental_blocks", [])
+        if self.supplemental_blocks is None:
+            self.supplemental_blocks = []
+        self.allow_multiline_comments = kwargs.pop("allow_multiline_comments", True)
+        self.suppress_unreferenced_taxon_namespaces = kwargs.pop("suppress_unreferenced_taxon_namespaces", False)
+        self.continuous_character_state_value_format_fn = kwargs.pop("continuous_character_state_value_format_fn", self._format_continuous_character_value)
+        if self.continuous_character_state_value_format_fn is None:
+            self.continuous_character_state_value_format_fn = self._format_continuous_character_value
+        self.discrete_character_state_value_format_fn = kwargs.pop("discrete_character_state_value_format_fn", self._format_discrete_character_value)
+        if self.discrete_character_state_value_format_fn is None:
+            self.discrete_character_state_value_format_fn = self._format_discrete_character_value
+        self.translate_tree_taxa = kwargs.pop("translate_tree_taxa", None)
+
+        # The following are used by NewickWriter in addition to NexusWriter, so
+        # they are extracted/set here and then forwarded on ...
+        self.unquoted_underscores = kwargs.get('unquoted_underscores', False)
+        self.preserve_spaces = kwargs.get("preserve_spaces", False)
+        self.annotations_as_nhx = kwargs.get("annotations_as_nhx", False)
+        self.real_value_format_specifier = kwargs.pop("real_value_format_specifier", "")
+
+        # As above, but the NEXUS format default is different from the NEWICK
+        # default, so this rather convoluted approach
+        self.suppress_annotations = kwargs.pop("suppress_annotations", False)
+        kwargs["suppress_annotations"] = self.suppress_annotations
+        self.suppress_item_comments = kwargs.pop("suppress_item_comments", False)
+        kwargs["suppress_item_comments"] = self.suppress_item_comments
+
+        # The newick writer to which tree-writing will be delegated
+        self._newick_writer = newickwriter.NewickWriter(**kwargs)
+
+        # Book-keeping
+        self.taxon_namespaces_to_write = []
+        self._block_title_map = {}
+        self._title_block_map = {}
+
+    def _write(self,
+            stream,
+            taxon_namespaces=None,
+            tree_lists=None,
+            char_matrices=None,
+            global_annotations_target=None):
+
+        # Header
+        stream.write('#NEXUS\n\n')
+
+        # File/Document-level annotations and comments
+        if self.file_comments:
+            self._write_comments(stream, self.file_comments)
+        if global_annotations_target is not None:
+            self._write_item_annotations(stream, global_annotations_target)
+            self._write_item_comments(stream, global_annotations_target)
+
+        # Other blocks
+        if self.preamble_blocks:
+            for block in self.preamble_blocks:
+                stream.write(block)
+                stream.write("\n")
+            stream.write("\n")
+
+        # Taxon namespace discovery
+        candidate_taxon_namespaces = collections.OrderedDict()
+        if self.attached_taxon_namespace is not None:
+            # should this be False?
+            candidate_taxon_namespaces[self.attached_taxon_namespace] = True
+        elif taxon_namespaces is not None:
+            if self.suppress_unreferenced_taxon_namespaces:
+                # preload to preserve order
+                for tns in taxon_namespaces:
+                    candidate_taxon_namespaces[tns] = False
+            else:
+                for tns in taxon_namespaces:
+                    candidate_taxon_namespaces[tns] = True
+        for data_collection in (tree_lists, char_matrices):
+            if data_collection is not None:
+                for i in data_collection:
+                    if self.attached_taxon_namespace is None or i.taxon_namespace is self.attached_taxon_namespace:
+                        candidate_taxon_namespaces[i.taxon_namespace] = True
+        self.taxon_namespaces_to_write = [tns for tns in candidate_taxon_namespaces if candidate_taxon_namespaces[tns]]
+
+        #  Write out taxon namespaces
+        if not self.simple and not self.suppress_taxa_blocks:
+            if self.suppress_block_titles and len(taxon_namespace_to_write) > 1:
+                warnings.warn("Multiple taxon namespaces will be written, but block titles are suppressed: data file may not be interpretable")
+            for tns in self.taxon_namespaces_to_write:
+                self._write_taxa_block(stream, tns)
+
+        # Write out character matrices
+        if char_matrices is not None:
+            for char_matrix in char_matrices:
+                if (self.attached_taxon_namespace is None
+                        or char_matrix.taxon_namespace is self.attached_taxon_namespace):
+                    self._write_char_block(stream=stream, char_matrix=char_matrix)
+
+        # Write out tree lists
+        if tree_lists is not None:
+            for tree_list in tree_lists:
+                if (self.attached_taxon_namespace is None
+                        or tree_list.taxon_namespace is self.attached_taxon_namespace):
+                    self._write_trees_block(stream=stream,
+                            tree_list=tree_list)
+
+        # Write out remaining
+        if self.supplemental_blocks:
+            for block in self.supplemental_blocks:
+                stream.write(block)
+                stream.write("\n")
+
+    def _write_taxa_block(self, stream, taxon_namespace):
+        stream.write("BEGIN TAXA;\n")
+        self._write_block_title(stream, taxon_namespace)
+        self._write_item_annotations(stream, taxon_namespace)
+        self._write_item_comments(stream, taxon_namespace)
+        stream.write("    DIMENSIONS NTAX={};\n".format(len(taxon_namespace)))
+        stream.write("    TAXLABELS\n")
+        for taxon in taxon_namespace:
+            stream.write("        {}\n".format(
+                nexusprocessing.escape_nexus_token(taxon.label, preserve_spaces=self.preserve_spaces, quote_underscores=not self.unquoted_underscores),
+                ))
+            self._write_item_annotations(stream, taxon)
+            self._write_item_comments(stream, taxon)
+        stream.write("  ;\n")
+        stream.write("END;\n\n")
+
+    def _set_and_write_translate_block(self, stream, taxon_namespace):
+        if not self.translate_tree_taxa:
+            self._newick_writer.taxon_token_map = None
+            return
+        if self.translate_tree_taxa is True:
+            m = {}
+            for taxon_idx, t in enumerate(taxon_namespace):
+                m[t] = str(taxon_namespace.accession_index(t)+1)
+                # m[t] = str(taxon_idx)
+            self._newick_writer.taxon_token_map = m
+        else:
+            self._newick_writer.taxon_token_map = dict(self.translate_tree_taxa)
+        stream.write("        Translate\n")
+        statement = []
+        for taxon in taxon_namespace:
+            label = nexusprocessing.escape_nexus_token(str(taxon.label),
+                    preserve_spaces=self.preserve_spaces,
+                    quote_underscores=not self.unquoted_underscores)
+            statement.append("             {} {}".format(self._newick_writer.taxon_token_map[taxon], label))
+        statement = ",\n".join(statement)
+        stream.write("{}\n             ;\n".format(statement))
+
+    def _write_trees_block(self, stream, tree_list):
+        stream.write("BEGIN TREES;\n")
+        self._write_block_title(stream, tree_list)
+        self._write_item_annotations(stream, tree_list)
+        self._write_item_comments(stream, tree_list)
+        self._write_link_to_taxa_block(stream, tree_list.taxon_namespace)
+        self._set_and_write_translate_block(stream, tree_list.taxon_namespace)
+        for tree_idx, tree in enumerate(tree_list):
+            if tree.label:
+                tree_name = tree.label
+            else:
+                tree_name = str(tree_idx+1)
+            tree_name = nexusprocessing.escape_nexus_token(
+                    tree_name,
+                    preserve_spaces=self.preserve_spaces,
+                    quote_underscores=not self.unquoted_underscores)
+            stream.write("    TREE {} = ".format(tree_name))
+            self._newick_writer._write_tree(stream, tree)
+            stream.write("\n")
+        stream.write("END;\n\n")
+
+    def _write_char_block(self, stream, char_matrix):
+        taxon_label_map = collections.OrderedDict()
+        for taxon in char_matrix:
+            taxon_label_map[taxon] = nexusprocessing.escape_nexus_token(taxon.label, preserve_spaces=self.preserve_spaces, quote_underscores=not self.unquoted_underscores)
+        nchar = max([len(seq) for seq in char_matrix.values()])
+        if self.simple:
+            stream.write("BEGIN DATA;\n")
+            # note that this will only list the number of taxa for
+            # which sequences are available
+            ntaxstr = "NTAX={}".format(len(taxon_label_map))
+        else:
+            stream.write("BEGIN CHARACTERS;\n")
+            ntaxstr = ""
+        self._write_block_title(stream, char_matrix)
+        self._write_item_annotations(stream, char_matrix)
+        self._write_item_comments(stream, char_matrix)
+        self._write_link_to_taxa_block(stream, char_matrix.taxon_namespace)
+        stream.write("    DIMENSIONS{} NCHAR={};\n".format(ntaxstr, nchar))
+        stream.write("    FORMAT {};\n".format(self._compose_format_terms(char_matrix)))
+        stream.write("    MATRIX\n")
+        if char_matrix.data_type == "continuous":
+            state_value_writer = lambda x : stream.write("{} ".format(self.continuous_character_state_value_format_fn(x)))
+        else:
+            state_value_writer = lambda x : stream.write("{}".format(self.discrete_character_state_value_format_fn(x)))
+        max_label_len = max(len(v) for v in taxon_label_map.values())
+        for taxon in char_matrix:
+            stream.write("        {taxon_label:{field_len}}    ".format(taxon_label=taxon_label_map[taxon],
+                field_len=max_label_len))
+            for state in char_matrix[taxon]:
+                state_value_writer(state)
+            stream.write("\n")
+        stream.write("    ;\n")
+        stream.write("END;\n\n\n")
+        self._write_character_subsets(stream, char_matrix)
+
+    def _compose_format_terms(self, char_matrix):
+        format = []
+        if char_matrix.data_type == "dna":
+            format.append("DATATYPE=DNA")
+            format.append("GAP=- MISSING=? MATCHCHAR=.")
+        elif char_matrix.data_type == "rna":
+            format.append("DATATYPE=RNA")
+            format.append("GAP=- MISSING=? MATCHCHAR=.")
+        elif char_matrix.data_type == "nucleotide":
+            format.append("DATATYPE=NUCLEOTIDE")
+            format.append("GAP=- MISSING=? MATCHCHAR=.")
+        elif char_matrix.data_type == "protein":
+            format.append("DATATYPE=PROTEIN")
+            format.append("GAP=- MISSING=? MATCHCHAR=.")
+        elif char_matrix.data_type == "continuous":
+            format.append("DATATYPE=CONTINUOUS ITEMS=(STATES)")
+        else:
+            format.append("DATATYPE=STANDARD")
+            fundamental_symbols = set()
+            for state_alphabet in char_matrix.state_alphabets:
+                for s in state_alphabet.fundamental_state_iter():
+                    if s.symbol is not None:
+                        fundamental_symbols.add(s.symbol)
+                    else:
+                        raise Exception("Could not match character state to symbol: '%s'." % s)
+            format.append('SYMBOLS="%s"' % "".join(fundamental_symbols))
+            equates = set()
+            for state_alphabet in char_matrix.state_alphabets:
+                for a in state_alphabet.ambiguous_state_iter():
+                    if a.symbol == "?":
+                        format.append("MISSING=?")
+                    elif a.symbol == "-":
+                        format.append("GAP=-")
+                    else:
+                        if a.symbol is not None:
+                            equates.add("%s={%s}" % (a.symbol, "".join(a.fundamental_symbols)))
+            for state_alphabet in char_matrix.state_alphabets:
+                for p in state_alphabet.polymorphic_state_iter():
+                    if p.symbol is not None:
+                        equates.add("%s=(%s)" % (p.symbol, "".join(p.fundamental_symbols)))
+            if equates:
+                format.append('EQUATE="%s"' % equates)
+        return ' '.join(format)
+
+    def _write_comments(self, stream, comments):
+        if self.allow_multiline_comments:
+            if isinstance(comments, str):
+                stream.write("[{}]\n".format(comments))
+            else:
+                comments = "\n".join([str(c) for c in comments])
+                stream.write("[\n{}\n]\n".format(comments))
+        else:
+            if isinstance(comments, str):
+                comments = comments.replace("\r\n", "\n").replace("\n\r","\n").replace("\r","\n")
+                # comments = re.split(r'[\r\n]+', comments)
+                comments = [c for c in re.split(r'[\r\n]+', comments) if c]
+            for c in comments:
+                stream.write("[{}]\n".format(c))
+
+    def _write_item_comments(self, stream, item):
+        if not self.suppress_item_comments and item.comments:
+            self._write_comments(stream, item.comments)
+
+    def _write_item_annotations(self, stream, item):
+        if not self.suppress_annotations and item.annotations:
+            a = nexusprocessing.format_item_annotations_as_comments(item,
+                    nhx=self.annotations_as_nhx,
+                    real_value_format_specifier=self.real_value_format_specifier)
+            stream.write("{}\n".format(a))
+
+    def _write_block_title(self, stream, block):
+        if not self._link_blocks():
+            return
+        title = self._get_block_title(block)
+        if not title:
+            return
+        stream.write("    TITLE {};\n".format(title))
+
+    def _write_link_to_taxa_block(self, stream, taxon_namespace):
+        if not self._link_blocks():
+            return
+        link_title = self._get_block_title(taxon_namespace)
+        if not link_title:
+            return
+        stream.write("    LINK TAXA = {};\n".format(link_title))
+
+    def _get_block_title(self, block):
+        # if self.is_write_block_titles is False then no block titles;
+        # if only one taxon set, or attached taxon set mode, unless self.is_write_block_titles
+        # is explicitly True, then again, we do not write block titles
+        if not self._link_blocks():
+            return None
+        if block in self._block_title_map:
+            return self._block_title_map[block]
+        if not block.label:
+            title = str(id(block))
+        else:
+            title = block.label
+        idx = 1
+        original_title = title
+        title = nexusprocessing.escape_nexus_token(
+                original_title,
+                preserve_spaces=self.preserve_spaces,
+                quote_underscores=not self.unquoted_underscores)
+        while title in self._title_block_map:
+            raw_title = "{}.{}".format(original_title, idx)
+            title = nexusprocessing.escape_nexus_token(
+                    raw_title,
+                    preserve_spaces=self.preserve_spaces,
+                    quote_underscores=not self.unquoted_underscores)
+            idx += 1
+        self._title_block_map[title] = block
+        self._block_title_map[block] = title
+        return title
+
+    def _link_blocks(self):
+        """
+        If only one taxon set in dataset, or in attached taxon set mode, then
+        unless the 'block_titles' directive has been explicitly set to True
+        by the user, block titles and links will not be written.
+        """
+        if self.suppress_block_titles is None:
+            if len(self.taxon_namespaces_to_write) > 1:
+                return True
+            else:
+                return False
+        else:
+            return self.suppress_block_titles
+
+    def _format_continuous_character_value(self, v):
+        return "{}".format(v)
+
+    def _format_discrete_character_value(self, v):
+        return str(v)
+
+    def _write_character_subsets(self, stream, char_matrix):
+        if not hasattr(char_matrix, "character_subsets") or not char_matrix.character_subsets:
+            return
+        stream.write("BEGIN SETS;\n")
+        for label, char_set in char_matrix.character_subsets.items():
+            label = textutils.escape_nexus_token(char_set.label,
+                    preserve_spaces=self.preserve_spaces,
+                    quote_underscores=not self.unquoted_underscores)
+            ranges = textutils.group_ranges(char_set.character_indices)
+            pos = " ".join("-".join(str(c+1) for c in r) for r in ranges)
+            stream.write("    charset {} = {};\n".format((label, pos)))
+        stream.write("END;\n\n\n")
diff --git a/dendropy/dataio/nexusyielder.py b/dendropy/dataio/nexusyielder.py
new file mode 100644
index 0000000..cc08993
--- /dev/null
+++ b/dendropy/dataio/nexusyielder.py
@@ -0,0 +1,185 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Implementation of NEXUS-schema tree iterator.
+"""
+
+import sys
+if not (sys.version_info.major >= 3 and sys.version_info.minor >= 4):
+    from dendropy.utility.filesys import pre_py34_open as open
+from dendropy.dataio import ioservice
+from dendropy.dataio import nexusreader
+from dendropy.dataio import nexusprocessing
+
+class NexusTreeDataYielder(
+        ioservice.TreeDataYielder,
+        nexusreader.NexusReader):
+
+    def __init__(self,
+            files=None,
+            taxon_namespace=None,
+            tree_type=None,
+            **kwargs):
+        """
+
+        Parameters
+        ----------
+        files : iterable of sources
+            Iterable of sources, which can either be strings specifying file
+            paths or file-like objects open for reading. If a source element is
+            a string (``isinstance(i,str) == True``), then it is assumed to be
+            a path to a file. Otherwise, the source is assumed to be a file-like
+            object.
+        taxon_namespace : |TaxonNamespace| instance
+            The operational taxonomic unit concept namespace to use to manage
+            taxon definitions.
+        \*\*kwargs : keyword arguments
+            These will be passed directly to the base `nexusreader.NexusReader`
+            class. See `nexusreader.NexusReader` for details.
+        """
+        ioservice.TreeDataYielder.__init__(self,
+                files=files,
+                taxon_namespace=taxon_namespace,
+                tree_type=tree_type)
+        self.assume_newick_if_not_nexus = kwargs.pop("assume_newick_if_not_nexus", False)
+        kwargs["attached_taxon_namespace"] = self.attached_taxon_namespace
+        nexusreader.NexusReader.__init__(self, **kwargs)
+        self.exclude_chars = True
+        self.exclude_trees = False
+
+    ###########################################################################
+    ## Implementation of DataYielder interface
+
+    def _yield_items_from_stream(self, stream):
+        if self._nexus_tokenizer is None:
+            self.create_tokenizer(stream,
+                preserve_unquoted_underscores=self.preserve_underscores)
+        else:
+            self._nexus_tokenizer.set_stream(stream)
+        token = self._nexus_tokenizer.next_token()
+        if token.upper() != "#NEXUS":
+            if self.assume_newick_if_not_nexus:
+                taxon_symbol_mapper = self._get_taxon_symbol_mapper(taxon_namespace=self.attached_taxon_namespace)
+                while True:
+                    tree = self._build_tree_from_newick_tree_string(
+                            tree_factory=self.tree_factory,
+                            taxon_symbol_mapper=taxon_symbol_mapper)
+                    if tree is None:
+                        break
+                    yield tree
+            else:
+                raise self._nexus_error("Expecting '#NEXUS', but found '{}'".format(token),
+                        nexusreader.NexusReader.NotNexusFileError)
+        while not self._nexus_tokenizer.is_eof():
+            token = self._nexus_tokenizer.next_token_ucase()
+            while token != None and token != 'BEGIN' and not self._nexus_tokenizer.is_eof():
+                token = self._nexus_tokenizer.next_token_ucase()
+            self._nexus_tokenizer.process_and_clear_comments_for_item(
+                    self._global_annotations_target,
+                    self.extract_comment_metadata)
+            token = self._nexus_tokenizer.next_token_ucase()
+            if token == 'TAXA':
+                self._parse_taxa_block()
+            elif token == 'TREES':
+                for tree in self._yield_from_trees_block():
+                    yield tree
+            elif token == 'BEGIN':
+                raise self._nexus_error("'BEGIN' found without completion of previous block",
+                        nexusreader.NexusReader.IncompleteBlockError)
+            else:
+                # unknown block
+                token = self._consume_to_end_of_block(token)
+
+    ###########################################################################
+    ## Supporting Functions
+
+    def _yield_from_trees_block(self):
+        """
+        Expectations:
+            - current token: "TREES" [part of "BEGIN TREES"]
+        """
+        token = self._nexus_tokenizer.cast_current_token_to_ucase()
+        if token != "TREES":
+            raise self._nexus_error("Expecting 'TREES' token, but instead found '{}'".format(token))
+        if self.exclude_trees:
+            self._consume_to_end_of_block(self._nexus_tokenizer.current_token)
+            return
+        self._nexus_tokenizer.skip_to_semicolon() # move past "BEGIN TREES" command
+        link_title = None
+        taxon_namespace = None
+        taxon_symbol_mapper = None
+        trees_block = None
+        block_title = None
+        while ((not self._nexus_tokenizer.is_eof())
+                and token is not None
+                and token != 'END'
+                and token != 'ENDBLOCK'):
+            token = self._nexus_tokenizer.next_token_ucase()
+            if token == 'LINK':
+                link_title = self._parse_link_statement().get("taxa")
+            elif token == 'TITLE':
+                block_title = self._parse_title_statement()
+                token = "" # clear; repopulate at start of loop
+            elif token == 'TRANSLATE':
+                if taxon_namespace is None:
+                    taxon_namespace = self._get_taxon_namespace(link_title)
+                taxon_symbol_mapper = self._parse_translate_statement(taxon_namespace)
+                token = "" # clear; repopulate at start of loop
+            elif token == 'TREE':
+                if taxon_namespace is None:
+                    taxon_namespace = self._get_taxon_namespace(link_title)
+                if taxon_symbol_mapper is None:
+                    taxon_symbol_mapper = self._get_taxon_symbol_mapper(taxon_namespace=taxon_namespace)
+                pre_tree_comments = self._nexus_tokenizer.pull_captured_comments()
+                tree_factory = self.tree_factory
+                while True:
+                    ## After the following, the current token
+                    ## will be the token immediately following
+                    ## the terminating semi-colon of a tree
+                    ## statement. Typically, this will be
+                    ## 'TREE' if there is another tree, or
+                    ## 'END'/'ENDBLOCK'.
+                    tree = self._parse_tree_statement(
+                            tree_factory=tree_factory,
+                            taxon_symbol_mapper=taxon_symbol_mapper)
+                    yield tree
+                    if self._nexus_tokenizer.is_eof() or not self._nexus_tokenizer.current_token:
+                        break
+                    if self._nexus_tokenizer.cast_current_token_to_ucase() != "TREE":
+                        token = self._nexus_tokenizer.current_token
+                        break
+            elif token == 'BEGIN':
+                raise self._nexus_error("'BEGIN' found without completion of previous block",
+                        nexusreader.NexusReader.IncompleteBlockError)
+        self._nexus_tokenizer.skip_to_semicolon() # move past END command
+        raise StopIteration
+
+class NexusNewickTreeDataYielder(NexusTreeDataYielder):
+
+    def __init__(self,
+            files=None,
+            taxon_namespace=None,
+            tree_type=None,
+            **kwargs):
+        kwargs["assume_newick_if_not_nexus"] = kwargs.get("assume_newick_if_not_nexus", True)
+        NexusTreeDataYielder.__init__(self,
+                files=files,
+                taxon_namespace=taxon_namespace,
+                tree_type=tree_type,
+                **kwargs)
diff --git a/dendropy/dataio/phylipreader.py b/dendropy/dataio/phylipreader.py
new file mode 100644
index 0000000..ac85399
--- /dev/null
+++ b/dendropy/dataio/phylipreader.py
@@ -0,0 +1,308 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Implementation of PHYLIP-format data reader.
+"""
+
+
+import re
+from dendropy.dataio import ioservice
+from dendropy.utility import filesys
+from dendropy.utility import error
+
+class PhylipReader(ioservice.DataReader):
+    "Implements the DataReader interface for parsing PHYLIP files."
+
+    # supported_data_types = ['dna', 'rna', 'protein', 'standard', 'restriction', 'infinite']
+    # supported_matrix_types = [dataobject.DnaCharacterMatrix,
+    #                           dataobject.RnaCharacterMatrix,
+    #                           dataobject.ProteinCharacterMatrix,
+    #                           dataobject.StandardCharacterMatrix,
+    #                           dataobject.RestrictionSitesCharacterMatrix,
+    #                           dataobject.InfiniteSitesCharacterMatrix]
+
+    class PhylipStrictSequentialError(error.DataParseError):
+        def __init__(self, *args, **kwargs):
+            error.DataParseError.__init__(self, *args, **kwargs)
+
+    class PhylipStrictInterleavedError(error.DataParseError):
+        def __init__(self, *args, **kwargs):
+            error.DataParseError.__init__(self, *args, **kwargs)
+
+    class PhylipRelaxedSequentialError(error.DataParseError):
+        def __init__(self, *args, **kwargs):
+            error.DataParseError.__init__(self, *args, **kwargs)
+
+    class PhylipRelaxedInterleavedError(error.DataParseError):
+        def __init__(self, *args, **kwargs):
+            error.DataParseError.__init__(self, *args, **kwargs)
+
+    def __init__(self, **kwargs):
+        """
+        Keyword Arguments
+        -----------------
+        data_type: str
+            When reading into a |DataSet| object, the type of data must be
+            specified: "dna", "rna", "protein", "restriction", "infinite",
+            "standard", or "continuous".
+        default_state_alphabet: |StateAlphabet| instance
+            A |StateAlphabet| object to be used to manage the alphabet of the
+            characters (|StandardCharacterMatrix| **only**).
+        strict : bool
+            If `True`, then data is given in 'strict' format, where first 10
+            characters are the taxon label and remaining characters are the sequence.
+            Default is `False`: relaxed format, where taxon labels are of
+            arbitrary length and separation of sequences are is by one or more (if
+            ``multispace_delimiter`` is `False`) or two or more (if
+            ``multispace_delimiter`` is `True`) spaces.
+        interleaved : bool
+            If `True`, then data is in interleaved format.
+            Default is `False`: data is non-interleaved.
+        multispace_delimiter: bool
+            If `True` (and ``strict`` is `False`), then at least two spaces are
+            required to delimit taxon label and associated sequence. Default is
+            `False`: one or more spaces delimit taxon label and associated
+            sequence.
+        underscore_to_spaces: bool
+            If `True`, then underscores in taxon labels are converted to
+            spaces. Default is `False`: underscores are not converted.
+        ignore_invalid_chars : bool
+            If `True` then any invalid characters in sequences will be ignored.
+            Default is `False`: invalid characters result in errors.
+        ignore_unrecognized_keyword_arguments : boolean, default: `False`
+            If `True`, then unsupported or unrecognized keyword arguments will
+            not result in an error. Default is `False`: unsupported keyword
+            arguments will result in an error.
+        """
+        ioservice.DataReader.__init__(self)
+        self.data_type = kwargs.pop("data_type", None)
+        # if "char_matrix_type" in kwargs and "data_type" in kwargs:
+        #     raise ValueError("Cannot specify both 'data_type' and 'char_matrix_type'")
+        # if "data_type" in kwargs:
+        #     data_type = kwargs["data_type"].lower()
+        #     if data_type not in PhylipReader.supported_data_types:
+        #         raise ValueError("'%s' is not a valid data type specification; must be one of: %s" \
+        #             % (", ".join([("'" + d + "'") for d in PhylipReader.supported_data_types])))
+        #     else:
+        #         self.char_matrix_type = dataobject.character_data_type_label_map[data_type]
+        # elif "char_matrix_type" in kwargs:
+        #     self.char_matrix_type = kwargs.pop("char_matrix_type")
+        # else:
+        #     raise ValueError("Must specify 'data_type' for PHYLIP format, one of: %s" % (PhylipReader.supported_data_types))
+        # if self.char_matrix_type not in PhylipReader.supported_matrix_types:
+        #     raise ValueError("'%s' is not a supported data type for PhylipReader" % self.char_matrix_type.__name__)
+        self.strict = kwargs.pop("strict", False)
+        self.interleaved = kwargs.pop("interleaved", False)
+        self.multispace_delimiter = kwargs.pop("multispace_delimiter", False)
+        self.underscores_to_spaces = kwargs.pop("underscores_to_spaces", False)
+        self.ignore_invalid_chars = kwargs.pop("ignore_invalid_chars", False)
+        self.default_state_alphabet = kwargs.pop("default_state_alphabet", None)
+        if self.default_state_alphabet is not None:
+            if self.data_type is None:
+                self.data_type = "standard"
+            elif self.data_type != "standard":
+                raise ValueError("Cannot specify 'default_state_alphabet' with data type of '{}'".format(self.data_type))
+        self.check_for_unused_keyword_arguments(kwargs)
+        self.ntax = None
+        self.nchar = None
+        self.char_matrix = None
+        self.taxon_namespace = None
+
+    def describe_mode(self):
+        parts = []
+        if self.strict:
+            parts.append("strict")
+        else:
+            parts.append("relaxed")
+        if self.interleaved:
+            parts.append("interleaved")
+        else:
+            parts.append("sequential")
+        return ", ".join(parts)
+
+    def reset(self):
+        self.ntax = None
+        self.nchar = None
+        self.char_matrix = None
+        self.taxon_namespace = None
+        self.stream = None
+
+    def _read(self,
+            stream,
+            taxon_namespace_factory=None,
+            tree_list_factory=None,
+            char_matrix_factory=None,
+            state_alphabet_factory=None,
+            global_annotations_target=None):
+        self.reset()
+        self.stream = stream
+        self.taxon_namespace = taxon_namespace_factory(label=None)
+        if self.data_type is None:
+            raise TypeError("Data type must be specified for this schema")
+        if self.data_type == "standard" and self.default_state_alphabet is not None:
+            self.char_matrix = char_matrix_factory(
+                    self.data_type,
+                    label=None,
+                    taxon_namespace=self.taxon_namespace,
+                    default_state_alphabet=self.default_state_alphabet,
+                    )
+        else:
+            self.char_matrix = char_matrix_factory(
+                    self.data_type,
+                    label=None,
+                    taxon_namespace=self.taxon_namespace)
+            if self.data_type == "standard":
+                state_alphabet = state_alphabet_factory(
+                    fundamental_states="0123456789",
+                    no_data_symbol="?",
+                    gap_symbol="-",
+                    case_sensitive=False)
+                self.char_matrix.state_alphabets.append(state_alphabet)
+        lines = filesys.get_lines(stream)
+        if len(lines) == 0:
+            raise error.DataSourceError("No data in source", stream=self.stream)
+        elif len(lines) <= 2:
+            raise error.DataParseError("Expecting at least 2 lines in PHYLIP format data source", stream=self.stream)
+        desc_line = lines[0]
+        lines = lines[1:]
+        m = re.match('\s*(\d+)\s+(\d+)\s*$', desc_line)
+        if m is None:
+            raise self._data_parse_error("Invalid data description line: '%s'" % desc_line)
+        self.ntax = int(m.groups()[0])
+        self.nchar = int(m.groups()[1])
+        if self.ntax == 0 or self.nchar == 0:
+            raise error.DataSourceError("No data in source", stream=self.stream)
+        if self.interleaved:
+            self._parse_interleaved(lines)
+        else:
+            self._parse_sequential(lines)
+        product = self.Product(
+                taxon_namespaces=None,
+                tree_lists=None,
+                char_matrices=[self.char_matrix])
+        return product
+
+    def _parse_taxon_from_line(self, line, line_index):
+        if self.strict:
+            seq_label = line[:10].strip()
+            line = line[10:]
+        else:
+            if self.multispace_delimiter:
+                parts = re.split('[ \t]{2,}', line, maxsplit=1)
+            else:
+                parts = re.split('[ \t]{1,}', line, maxsplit=1)
+            seq_label = parts[0]
+            if len(parts) < 2:
+                line = ''
+            else:
+                line = parts[1]
+        seq_label = seq_label.strip()
+        if not seq_label:
+            raise self._data_parse_error("Expecting taxon label", line_index=line_index)
+        if self.underscores_to_spaces:
+            seq_label = seq_label.replace('_', ' ')
+        current_taxon = self.char_matrix.taxon_namespace.require_taxon(label=seq_label)
+        if current_taxon not in self.char_matrix:
+            self.char_matrix[current_taxon] = self.char_matrix.new_sequence(taxon=current_taxon)
+        else:
+            if len(self.char_matrix[current_taxon]) >= self.nchar:
+                raise self._data_parse_error("Cannot add characters to sequence for taxon '%s': already has declared number of characters (%d)" \
+                        % (current_taxon.label, self.char_matrix[current_taxon]), line_index=line_index)
+        return current_taxon, line
+
+    def _parse_sequence_from_line(self, current_taxon, line, line_index):
+        if self.data_type == "continuous":
+            for c in line.split():
+                if not c:
+                    continue
+                try:
+                    state = float(c)
+                except ValueError:
+                    if not self.ignore_invalid_chars:
+                        raise self._data_parse_error("Invalid state for taxon '%s': '%s'" % (current_taxon.label, c),
+                                line_index=line_index)
+                else:
+                    self.char_matrix[current_taxon].append(state)
+        else:
+            for c in line:
+                if c in [' ', '\t']:
+                    continue
+                try:
+                    state = self.char_matrix.default_state_alphabet[c]
+                except KeyError:
+                    if not self.ignore_invalid_chars:
+                        raise self._data_parse_error("Invalid state symbol for taxon '%s': '%s'" % (current_taxon.label, c),
+                                line_index=line_index)
+                else:
+                    self.char_matrix[current_taxon].append(state)
+
+    def _parse_sequential(self, lines, line_num_start=1):
+        seq_labels = []
+        current_taxon = None
+        for line_index, line in enumerate(lines):
+            line = line.rstrip()
+            if line == '':
+                continue
+            if current_taxon is None:
+                seq_label = None
+                current_taxon, line = self._parse_taxon_from_line(line, line_index)
+                # if current_taxon not in self.char_matrix and len(self.char_matrix.taxon_namespace) >= self.ntax:
+                #     raise self._data_parse_error("Cannot add new sequence %s: declared number of sequences (%d) already defined" \
+                #                 % (current_taxon, len(self.char_matrix.taxon_namespace)), line_index=line_index)
+            self._parse_sequence_from_line(current_taxon, line, line_index)
+            if len(self.char_matrix[current_taxon]) >= self.nchar:
+                current_taxon = None
+
+    def _parse_interleaved(self, lines, line_num_start=1):
+        seq_labels = []
+        current_taxon = None
+        paged = False
+        paged_row = -1
+        for line_index, line in enumerate(lines):
+            current_taxon = None
+            line = line.rstrip()
+            if line == '':
+                continue
+            paged_row += 1
+            if paged_row >= self.ntax:
+                paged_row = 0
+            if paged:
+                current_taxon = self.char_matrix.taxon_namespace[paged_row]
+            else:
+                current_taxon, line = self._parse_taxon_from_line(line, line_index)
+                if len(self.char_matrix.taxon_namespace) == self.ntax:
+                    paged = True
+                    paged_row = -1
+            self._parse_sequence_from_line(current_taxon, line, line_index)
+
+    def _data_parse_error(self, message, line_index=None):
+        if line_index is None:
+            row = None
+        else:
+            row = line_index + 2
+        if self.strict and self.interleaved:
+            error_type = PhylipReader.PhylipStrictInterleavedError
+        elif self.strict:
+            error_type = PhylipReader.PhylipStrictSequentialError
+        elif self.interleaved:
+            error_type = PhylipReader.PhylipRelaxedInterleavedError
+        else:
+            error_type = PhylipReader.PhylipStrictSequentialError
+        return error_type(message, line_num=row, stream=self.stream)
+
diff --git a/dendropy/dataio/phylipwriter.py b/dendropy/dataio/phylipwriter.py
new file mode 100644
index 0000000..190dcde
--- /dev/null
+++ b/dendropy/dataio/phylipwriter.py
@@ -0,0 +1,124 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Implementation of PHYLIP-format data writer.
+"""
+
+try:
+    from StringIO import StringIO # Python 2 legacy support: StringIO in this module is the one needed (not io)
+except ImportError:
+    from io import StringIO # Python 3
+from dendropy.dataio import ioservice
+from dendropy.utility import textprocessing
+
+STRICT_MODE_MAX_LABEL_LENGTH = 10
+
+class PhylipWriter(ioservice.DataWriter):
+    "Implements the DataWriter interface for writing PHYLIP files."
+
+    def __init__(self, **kwargs):
+        """
+
+        Keyword Arguments
+        -----------------
+
+        strict : bool
+            If `True`, use 'strict' format, i.e., taxon labels given in
+            first 10 characters, followed by sequence starting at character 11.
+            Default is `False`: use 'relaxed' format, with arbitrary-length
+            taxon labels separated from sequences by two or more spaces.
+        spaces_to_underscores : bool
+            If `True`, all spaces will be converted to underscores. Default is
+            `False`: spaces will be preserved.
+        force_unique_taxon_labels : bool
+            If `True`, then taxon labels will be modified to avoid duplicate
+            labels. Default is `False`: taxon labels will not be modified.
+        ignore_unrecognized_keyword_arguments : boolean, default: `False`
+            If `True`, then unsupported or unrecognized keyword arguments will
+            not result in an error. Default is `False`: unsupported keyword
+            arguments will result in an error.
+        """
+        ioservice.DataWriter.__init__(self, **kwargs)
+        self.strict = kwargs.pop("strict", False)
+        self.spaces_to_underscores = kwargs.pop("spaces_to_underscores", False)
+        self.force_unique_taxon_labels = kwargs.pop("force_unique_taxon_labels", False)
+        self.check_for_unused_keyword_arguments(kwargs)
+
+    def _write(self,
+            stream,
+            taxon_namespaces=None,
+            tree_lists=None,
+            char_matrices=None,
+            global_annotations_target=None):
+        for char_matrix in char_matrices:
+            if (self.attached_taxon_namespace is not None
+                    and char_matrix.taxon_namespace is not self.attached_taxon_namespace):
+                continue
+            self._write_char_matrix(stream, char_matrix)
+
+    def _write_char_matrix(self, stream, char_matrix):
+        "Writes dataset to a full PHYLIP document."
+
+        if self.strict or self.force_unique_taxon_labels:
+            taxon_label_map = self.get_taxon_label_map(char_matrix.taxon_namespace)
+            if not self.strict:
+                spacer = "  "
+            else:
+                spacer = ""
+        else:
+            taxon_label_map = {}
+            for taxon in char_matrix.taxon_namespace:
+                label = taxon.label
+                if self.spaces_to_underscores:
+                    label = label.replace(' ', '_')
+                taxon_label_map[taxon] = label
+            spacer = "  "
+        maxlen = max([len(str(label)) for label in taxon_label_map.values()])
+        n_seqs = len(char_matrix)
+        n_sites = char_matrix.max_sequence_size
+        stream.write("%d %d\n" % (n_seqs, n_sites))
+        for taxon in char_matrix.taxon_namespace:
+            label = taxon_label_map[taxon]
+            try:
+                seq_vec = char_matrix[taxon]
+            except KeyError:
+                continue
+            stream.write("%s%s%s\n" % ( label.ljust(maxlen), spacer, str(seq_vec.symbols_as_string())))
+
+    def get_taxon_label_map(self, taxon_namespace):
+        taxon_label_map = {}
+        if self.strict:
+            max_label_len = STRICT_MODE_MAX_LABEL_LENGTH
+        else:
+            max_label_len = 0
+        for taxon in taxon_namespace:
+            label = taxon.label
+            if self.spaces_to_underscores:
+                label = label.replace(' ', '_')
+            if self.strict:
+                label = label[:max_label_len]
+            taxon_label_map[taxon] = label
+        taxon_label_map = textprocessing.unique_taxon_label_map(taxon_namespace, taxon_label_map, max_label_len)
+        if self.strict:
+            for t in taxon_label_map:
+                label = taxon_label_map[t]
+                if len(label) < STRICT_MODE_MAX_LABEL_LENGTH:
+                    taxon_label_map[t] = label.ljust(STRICT_MODE_MAX_LABEL_LENGTH)
+        return taxon_label_map
+
diff --git a/dendropy/dataio/tokenizer.py b/dendropy/dataio/tokenizer.py
new file mode 100644
index 0000000..0f27d08
--- /dev/null
+++ b/dendropy/dataio/tokenizer.py
@@ -0,0 +1,293 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+import sys
+from dendropy.utility import error
+
+##############################################################################
+## Tokenizer
+
+class Tokenizer(object):
+    """
+    Stream tokenizer.
+    """
+
+    class TokenizerError(error.DataParseError):
+
+        def __init__(self,
+                message=None,
+                line_num=None,
+                col_num=None,
+                stream=None):
+            error.DataParseError.__init__(self,
+                    message=message,
+                    line_num=line_num,
+                    col_num=col_num,
+                    stream=stream)
+
+    class UnterminatedQuoteError(TokenizerError):
+
+        def __init__(self,
+                quote_char=None,
+                line_num=None,
+                col_num=None,
+                stream=None):
+            Tokenizer.TokenizerError.__init__(self,
+                    message="Unterminated quote: {}".format(quote_char),
+                    line_num=line_num,
+                    col_num=col_num,
+                    stream=stream)
+
+    class UnexpectedEndOfStreamError(TokenizerError):
+
+        def __init__(self,
+                message=None,
+                line_num=None,
+                col_num=None,
+                stream=None):
+            Tokenizer.TokenizerError.__init__(self,
+                    message=message,
+                    line_num=line_num,
+                    col_num=col_num,
+                    stream=stream)
+
+    def __init__(self,
+            src,                        # source stream
+            uncaptured_delimiters,      # delimiters between tokens (not returned)
+            captured_delimiters,        # delimiters between tokens (returned as tokens)
+            quote_chars,                # characters enclosing literals
+            escape_quote_by_doubling,   # should two consecutive quote characters indicate a literal character (rather than a quote)?
+            escape_chars,               # characters indicating beginning of escaped character
+            comment_begin,              # string indicating beginning of comment
+            comment_end,                # string indicating end of comment
+            capture_comments,           # are comments to be stored?
+            preserve_unquoted_underscores,       # are unquoted underscores to be preserved
+            ):
+        # Tokenizer behavior customization
+        self.uncaptured_delimiters = uncaptured_delimiters
+        self.captured_delimiters = captured_delimiters
+        self.quote_chars = quote_chars
+        self.escape_quote_by_doubling = escape_quote_by_doubling
+        self.escape_chars = escape_chars
+        self.comment_begin = comment_begin
+        self.comment_end = comment_end
+        self.capture_comments = capture_comments
+        self.preserve_unquoted_underscores = preserve_unquoted_underscores
+
+        # State (internals)
+        self.src = src
+        self._cur_char = None
+        self.current_token = None
+        self.is_token_quoted = False
+
+        # Meta-information
+        self.captured_comments = []
+        self.current_line_num = 1
+        self.current_column_num = 0
+        self.token_line_num = 0
+        self.token_column_num = 0
+
+    def reset(self):
+        self.set_stream(src=None)
+
+    def set_stream(self, src=None):
+        self.src = src
+        self._cur_char = None
+        self.current_token = None
+        self.is_token_quoted = False
+        self.captured_comments = []
+        self.current_line_num = 1
+        self.current_column_num = 0
+        self.token_line_num = 0
+        self.token_column_num = 0
+
+    def is_eof(self):
+        return self._cur_char == ""
+
+    def has_captured_comments(self):
+        return len(self.captured_comments) > 0
+
+    def next_token(self):
+        try:
+            t = self.__next__()
+            return t
+        except StopIteration:
+            self.current_token = None
+            return None
+
+    def require_next_token(self):
+        try:
+            t = self.__next__()
+            return t
+        except StopIteration:
+            # In Python 3, if you catch an exception and then raise an
+            # exception that is not a subclass of the original exception,
+            # the original exception is not considered to have been
+            # handled.
+            # In Python > 3.3, this can be solved by:
+            #
+            #   raise Tokenizer.UnexpectedEndOfStreamError(
+            #                   message="Unexpected end of stream",
+            #                   line_num=self.current_line_num,
+            #                   col_num=self.current_column_num,
+            #                   stream=self.src) from None
+            #
+            # To accommodate other versions, the following
+            # is required:
+            exc = Tokenizer.UnexpectedEndOfStreamError(
+                            message="Unexpected end of stream",
+                            line_num=self.current_line_num,
+                            col_num=self.current_column_num,
+                            stream=self.src)
+            exc.__context__ = None # Python 3.0, 3.1, 3.2
+            exc.__cause__ = None # Python 3.3, 3.4
+            raise exc
+
+    def clear_captured_comments(self):
+        del self.captured_comments[:]
+
+    def pull_captured_comments(self):
+        if not self.captured_comments:
+            return None
+        c = self.captured_comments[:]
+        del self.captured_comments[:]
+        return c
+
+    def __iter__(self):
+        return self
+
+    def __next__(self):
+        self.is_token_quoted = False
+        if self._cur_char is None:
+            self._get_next_char()
+        self._skip_to_significant_char()
+        if self._cur_char == "":
+            raise StopIteration
+        if self._cur_char in self.captured_delimiters:
+            self.current_token = self._cur_char
+            self.token_line_num = self.current_line_num
+            self.token_column_num = self.current_column_num
+            self._get_next_char()
+            return self.current_token
+        elif self._cur_char in self.quote_chars:
+            self.token_line_num = self.current_line_num
+            self.token_column_num = self.current_column_num
+            dest = []
+            self.is_token_quoted = True
+            cur_quote_char = self._cur_char
+            self._get_next_char()
+            while True:
+                if self._cur_char == "":
+                    raise Tokenizer.UnterminatedQuoteError(
+                            quote_char=cur_quote_char,
+                            line_num=self.current_line_num,
+                            col_num=self.current_column_num,
+                            stream=src)
+                if self._cur_char == cur_quote_char:
+                    self._get_next_char()
+                    if self.escape_quote_by_doubling:
+                        if self._cur_char == cur_quote_char:
+                            # dest.write(cur_quote_char)
+                            dest.append(cur_quote_char)
+                            self._get_next_char()
+                        else:
+                            break
+                    else:
+                        self._get_next_char()
+                        break
+                else:
+                    # dest.write(self._cur_char)
+                    dest.append(self._cur_char)
+                    self._get_next_char()
+            # self.current_token = dest.getvalue()
+            self.current_token = "".join(dest)
+            return self.current_token
+        else:
+            # unquoted
+            self.token_line_num = self.current_line_num
+            self.token_column_num = self.current_column_num
+            dest = []
+            self.is_token_quoted = False
+            while self._cur_char != "":
+                if self._cur_char in self.uncaptured_delimiters:
+                    self._get_next_char()
+                    break
+                elif self._cur_char in self.captured_delimiters:
+                    break
+                elif self._cur_char in self.comment_begin:
+                    self._handle_comment()
+                    if self._cur_char == "":
+                        break
+                else:
+                    if self._cur_char == "_" and not self.preserve_unquoted_underscores:
+                        self._cur_char = " "
+                    dest.append(self._cur_char)
+                    self._get_next_char()
+            # self.current_token = dest.getvalue()
+            self.current_token = "".join(dest)
+            if self.current_token == "":
+                if self._cur_char != "":
+                    self.__next__()
+                else:
+                    raise StopIteration
+            return self.current_token
+    next = __next__ # Python 2 legacy support
+
+    def _skip_to_significant_char(self):
+        if self._cur_char == "":
+            return
+        if self._cur_char is None:
+            self._get_next_char()
+        if self._cur_char not in self.uncaptured_delimiters:
+            return
+        while self._cur_char != "" and self._cur_char in self.uncaptured_delimiters:
+            self._get_next_char()
+        return
+
+    def _get_next_char(self):
+        self._cur_char = self.src.read(1)
+        if self._cur_char != "":
+            if self._cur_char == "\n":
+                self.current_line_num += 1
+                self.current_column_num = 1
+            else:
+                # print("@@@ {}: {}".format(self.current_column_num, self._cur_char))
+                self.current_column_num += 1
+        return self._cur_char
+
+    def _handle_comment(self):
+        dest = []
+        nesting = 0
+        comment_complete = False
+        while self._cur_char != "":
+            if self._cur_char in self.comment_end:
+                nesting -= 1
+                if nesting <= 0:
+                    comment_complete = True
+                    self._get_next_char()
+                    break
+            elif self._cur_char in self.comment_begin:
+                nesting += 1
+            elif self.capture_comments:
+                # dest.write(self._cur_char)
+                dest.append(self._cur_char)
+            self._get_next_char()
+        if self.capture_comments:
+            # self.captured_comments.append(dest.getvalue())
+            self.captured_comments.append("".join(dest))
+
diff --git a/dendropy/dataio/xmlprocessing.py b/dendropy/dataio/xmlprocessing.py
new file mode 100644
index 0000000..8038a1b
--- /dev/null
+++ b/dendropy/dataio/xmlprocessing.py
@@ -0,0 +1,177 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+XML-parsing abstraction layer.
+"""
+
+from xml.etree import ElementTree
+try:
+    from StringIO import StringIO # Python 2 legacy support: StringIO in this module is the one needed (not io)
+except ImportError:
+    from io import StringIO # Python 3
+
+class XmlNamespaces(object):
+
+    def __init__(self):
+        self.namespace_prefix_map = {}
+        self.prefix_namespace_map = {}
+
+    def add_namespace(self, prefix, namespace):
+        try:
+            self.namespace_prefix_map[namespace].append(prefix)
+        except KeyError:
+            self.namespace_prefix_map[namespace] = [prefix]
+        self.prefix_namespace_map[prefix] = namespace
+
+class XmlObject(object):
+
+    def __init__(self):
+        pass
+
+    def recast_element(self, element, subelement_factory=None):
+        if element is None:
+            return None
+        if subelement_factory is not None:
+            return subelement_factory(element)
+        else:
+            return self.__class__(element)
+
+    def getiterator(self, tag, subelement_factory=None):
+        for element in self._element.getiterator(tag):
+            yield self.recast_element(element=element, subelement_factory=subelement_factory)
+
+    def findall(self, tag, subelement_factory=None):
+        for element in self._element.findall(tag):
+            yield self.recast_element(element, subelement_factory=subelement_factory)
+
+    def find(self, tag, subelement_factory=None):
+        return self.recast_element(self._element.find(tag), subelement_factory=subelement_factory)
+
+    def get(self, attrib_name, default=None):
+        return self._element.get(attrib_name, default)
+
+    def _get_attrib(self):
+        return self._element.attrib
+    attrib = property(_get_attrib)
+
+    def _get_text(self):
+        return self._element.text
+    text = property(_get_text)
+
+class XmlElement(XmlObject):
+    """
+    Abstraction layer around an item.
+    """
+
+    def __init__(self, element, default_namespace=None):
+        XmlObject.__init__(self)
+        self._element = element
+        self.default_namespace = default_namespace
+
+    def subelement_factory(self, element):
+        return self.__class__(element, default_namespace=self.default_namespace)
+
+    def format_namespace(self, namespace=None):
+        if namespace:
+            return "{%s}" % namespace
+        if self.default_namespace:
+            return "{%s}" % self.default_namespace
+        return ""
+
+    def compose_tag(self, tag, namespace=None):
+        if namespace:
+            ns = "{%s}" % namespace
+        elif self.default_namespace:
+            ns = "{%s}" % self.default_namespace
+        else:
+            ns = ""
+        if isinstance(tag, list):
+            return "/".join( ("%s%s" % (ns, i)) for i in tag )
+        else:
+            return "%s%s" % (ns, tag)
+
+    def namespaced_getiterator(self, tag, namespace=None, subelement_factory=None):
+        if subelement_factory is None:
+            subelement_factory = self.subelement_factory
+        for element in self._element.getiterator(self.compose_tag(tag, namespace)):
+            yield self.recast_element(element=element, subelement_factory=subelement_factory)
+
+    def namespaced_findall(self, tag, namespace=None, subelement_factory=None):
+        if subelement_factory is None:
+            subelement_factory = self.subelement_factory
+        for element in self._element.findall(self.compose_tag(tag, namespace)):
+            yield self.recast_element(element=element, subelement_factory=subelement_factory)
+
+    def namespaced_find(self, tag, namespace=None, subelement_factory=None):
+        if subelement_factory is None:
+            subelement_factory = self.subelement_factory
+        e = self._element.find(self.compose_tag(tag, namespace))
+        return self.recast_element(e, subelement_factory=subelement_factory)
+
+    def namespaced_findtext(self, tag, namespace=None):
+        return self._element.findtext(self.compose_tag(tag, namespace))
+
+class XmlDocument(XmlObject):
+    """
+    Abstraction layer around an XML document.
+    """
+
+    def __init__(self,
+            file_obj=None,
+            subelement_factory=None):
+        """
+        __init__ initializes a reference to the ElementTree parser, passing it
+        the a file descripter object to be read and parsed or the
+        ElemenTree.Element object to be used as the root element.
+        """
+        XmlObject.__init__(self)
+        if subelement_factory is None:
+            self.subelement_factory = XmlElement
+        else:
+            self.subelement_factory = subelement_factory
+        self.namespace_registry = XmlNamespaces()
+        self.root = None
+        if file_obj:
+            self.parse_file(file_obj)
+
+    def parse_string(self, source):
+        "Loads an XML document from an XML string, source."
+        s = StringIO(source)
+        return self.parse_file(source)
+
+    def parse_file(self, source):
+        """
+        Loads an XML document from source, which can either be a
+        filepath string or a file object.
+        Custom parsing to make sure namespaces are saved.
+        """
+        events = "start", "start-ns", "end-ns"
+        root = None
+        ns_map = []
+        for event, elem in ElementTree.iterparse(source, events):
+            if event == "start-ns":
+                ns_map.append(elem)
+            elif event == "start":
+                if root is None:
+                    root = elem
+        # self.doctree = ElementTree.ElementTree(element=root)
+        self.root = self.subelement_factory(root)
+        for prefix, namespace in ns_map:
+            self.namespace_registry.add_namespace(prefix=prefix, namespace=namespace)
+
diff --git a/dendropy/datamodel/__init__.py b/dendropy/datamodel/__init__.py
new file mode 100644
index 0000000..442f116
--- /dev/null
+++ b/dendropy/datamodel/__init__.py
@@ -0,0 +1,18 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
diff --git a/dendropy/datamodel/basemodel.py b/dendropy/datamodel/basemodel.py
new file mode 100644
index 0000000..1681ab3
--- /dev/null
+++ b/dendropy/datamodel/basemodel.py
@@ -0,0 +1,1563 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Infrastructure for phylogenetic data objects.
+"""
+
+import os
+import copy
+import sys
+try:
+    from StringIO import StringIO # Python 2 legacy support: StringIO in this module is the one needed (not io)
+except ImportError:
+    from io import StringIO # Python 3
+if not (sys.version_info.major >= 3 and sys.version_info.minor >= 4):
+    from dendropy.utility.filesys import pre_py34_open as open
+from dendropy.utility import container
+from dendropy.utility import bibtex
+from dendropy.utility import textprocessing
+from dendropy.utility import urlio
+from dendropy.utility import error
+from dendropy.utility import deprecate
+
+##############################################################################
+## Keyword Processor
+
+def _extract_serialization_target_keyword(kwargs, target_type):
+    target_type_keywords = ["file", "path", "url", "data", "stream", "string"]
+    found_kw = []
+    for kw in target_type_keywords:
+        if kw in kwargs:
+            found_kw.append(kw)
+    if not found_kw:
+        raise TypeError("{} not specified; exactly one of the following keyword arguments required to be specified: {}".format(target_type, target_type_keywords))
+    if len(found_kw) > 1:
+        raise TypeError("{} specified multiple times: {}".format(target_type, found_kw))
+    target = kwargs.pop(found_kw[0])
+    if "schema" not in kwargs:
+        raise TypeError("Mandatory keyword argument 'schema' not specified")
+    schema = kwargs.pop("schema")
+    return found_kw[0], target, schema
+
+##############################################################################
+## DataObject
+
+class DataObject(object):
+
+    """
+    Base class for all phylogenetic data objects.
+    """
+
+    def __init__(self, label=None):
+        self._label = None
+        if label is not None:
+            self._set_label(label)
+
+    def _get_label(self):
+        return self._label
+    def _set_label(self, v):
+        # self._label = str(v) if v is not None else v
+        self._label = v
+    label = property(_get_label, _set_label)
+
+    def clone(self, depth=1):
+        """
+        Creates and returns a copy of ``self``.
+
+        Parameters
+        ----------
+        depth : integer
+            The depth of the copy:
+
+                - 0: shallow-copy: All member objects are references,
+                  except for :attr:``annotation_set`` of top-level object and
+                  member |Annotation| objects: these are full,
+                  independent instances (though any complex objects in the
+                  ``value`` field of |Annotation| objects are also
+                  just references).
+                - 1: taxon-namespace-scoped copy: All member objects are full
+                  independent instances, *except* for |TaxonNamespace|
+                  and |Taxon| instances: these are references.
+                - 2: Exhaustive deep-copy: all objects are cloned.
+        """
+        if depth == 0:
+            return copy.copy(self)
+        elif depth == 1:
+            return self.taxon_namespace_scoped_copy(memo=None)
+        elif depth == 2:
+            return copy.deepcopy(self)
+        else:
+            raise TypeError("Unsupported cloning depth: {}".format(depth))
+
+    def taxon_namespace_scoped_copy(self, memo=None):
+        """
+        Cloning level: 1.
+        Taxon-namespace-scoped copy: All member objects are full independent
+        instances, *except* for |TaxonNamespace| and |Taxon|
+        objects: these are preserved as references.
+        """
+        raise NotImplementedError
+
+##############################################################################
+## Deserializable
+
+class Deserializable(object):
+    """
+    Mixin class which all classes that require deserialization should subclass.
+    """
+
+    def _parse_and_create_from_stream(cls, stream, schema, **kwargs):
+        """
+        Subclasses need to implement this method to create
+        and return and instance of themselves read from the
+        stream.
+        """
+        raise NotImplementedError
+    _parse_and_create_from_stream = classmethod(_parse_and_create_from_stream)
+
+    def _get_from(cls, **kwargs):
+        """
+        Factory method to return new object of this class from an external
+        source by dispatching calls to more specialized ``get_from_*`` methods.
+        Implementing classes will have a publically-exposed method, ``get()``,
+        that wraps a call to this method. This allows for class-specific
+        documentation of keyword arguments. E.g.::
+
+            @classmethod
+            def get(cls, **kwargs):
+                '''
+                ... (documentation) ...
+                '''
+                return cls._get_from(**kwargs)
+
+        """
+        try:
+            src_type, src, schema = _extract_serialization_target_keyword(kwargs, "Source")
+        except Exception as e:
+            raise e
+        if src_type == "file" or src_type == "stream":
+            return cls.get_from_stream(src=src, schema=schema, **kwargs)
+        elif src_type == "path":
+            return cls.get_from_path(src=src, schema=schema, **kwargs)
+        elif src_type == "data" or src_type == "string":
+            return cls.get_from_string(src=src, schema=schema, **kwargs)
+        elif src_type == "url":
+            return cls.get_from_url(src=src, schema=schema, **kwargs)
+        else:
+            raise ValueError("Unsupported source type: {}".format(src_type))
+    _get_from = classmethod(_get_from)
+
+    def get_from_stream(cls, src, schema, **kwargs):
+        """
+        Factory method to return new object of this class from file-like object
+        ``src``.
+
+        Parameters
+        ----------
+        src : file or file-like
+            Source of data.
+        schema : string
+            Specification of data format (e.g., "nexus").
+        \*\*kwargs : keyword arguments, optional
+            Arguments to customize parsing, instantiation, processing, and
+            accession of objects read from the data source, including schema-
+            or format-specific handling. These will be passed to the underlying
+            schema-specific reader for handling.
+
+        Returns
+        -------
+        pdo : phylogenetic data object
+            New instance of object, constructed and populated from data given
+            in source.
+        """
+        return cls._parse_and_create_from_stream(stream=src,
+                schema=schema,
+                **kwargs)
+    get_from_stream = classmethod(get_from_stream)
+
+    def get_from_path(cls, src, schema, **kwargs):
+        """
+        Factory method to return new object of this class from file
+        specified by string ``src``.
+
+        Parameters
+        ----------
+        src : string
+            Full file path to source of data.
+        schema : string
+            Specification of data format (e.g., "nexus").
+        \*\*kwargs : keyword arguments, optional
+            Arguments to customize parsing, instantiation, processing, and
+            accession of objects read from the data source, including schema-
+            or format-specific handling. These will be passed to the underlying
+            schema-specific reader for handling.
+
+        Returns
+        -------
+        pdo : phylogenetic data object
+            New instance of object, constructed and populated from data given
+            in source.
+        """
+        with open(src, "r", newline=None) as fsrc:
+            return cls._parse_and_create_from_stream(stream=fsrc,
+                    schema=schema,
+                    **kwargs)
+    get_from_path = classmethod(get_from_path)
+
+    def get_from_string(cls, src, schema, **kwargs):
+        """
+        Factory method to return new object of this class from string ``src``.
+
+        Parameters
+        ----------
+        src : string
+            Data as a string.
+        schema : string
+            Specification of data format (e.g., "nexus").
+        \*\*kwargs : keyword arguments, optional
+            Arguments to customize parsing, instantiation, processing, and
+            accession of objects read from the data source, including schema-
+            or format-specific handling. These will be passed to the underlying
+            schema-specific reader for handling.
+
+        Returns
+        -------
+        pdo : phylogenetic data object
+            New instance of object, constructed and populated from data given
+            in source.
+        """
+        ssrc = StringIO(src)
+        return cls._parse_and_create_from_stream(stream=ssrc,
+                schema=schema,
+                **kwargs)
+    get_from_string = classmethod(get_from_string)
+
+    def get_from_url(cls, src, schema, strip_markup=False, **kwargs):
+        """
+        Factory method to return a new object of this class from
+        URL given by ``src``.
+
+        Parameters
+        ----------
+        src : string
+            URL of location providing source of data.
+        schema : string
+            Specification of data format (e.g., "nexus").
+        \*\*kwargs : keyword arguments, optional
+            Arguments to customize parsing, instantiation, processing, and
+            accession of objects read from the data source, including schema-
+            or format-specific handling. These will be passed to the underlying
+            schema-specific reader for handling.
+
+        Returns
+        -------
+        pdo : phylogenetic data object
+            New instance of object, constructed and populated from data given
+            in source.
+        """
+        text = urlio.read_url(src, strip_markup=strip_markup)
+        ssrc = StringIO(text)
+        try:
+            return cls._parse_and_create_from_stream(
+                    stream=ssrc,
+                    schema=schema,
+                    **kwargs)
+        except error.DataParseError:
+            sys.stderr.write(text)
+            raise
+    get_from_url = classmethod(get_from_url)
+
+
+##############################################################################
+## MultiReadabe
+
+class MultiReadable(object):
+    """
+    Mixin class which all classes that support multiple (e.g., aggregative) deserialization should subclass.
+    """
+
+    def _parse_and_add_from_stream(self, stream, schema, **kwargs):
+        """
+        Populates/constructs objects of this type from ``schema``-formatted
+        data in the file-like object source ``stream``.
+
+        Parameters
+        ----------
+        stream : file or file-like
+            Source of data.
+        schema : string
+            Specification of data format (e.g., "nexus").
+        \*\*kwargs : keyword arguments, optional
+            Arguments to customize parsing, instantiation, processing, and
+            accession of objects read from the data source, including schema-
+            or format-specific handling. These will be passed to the underlying
+            schema-specific reader for handling.
+
+        Returns
+        -------
+        n : ``int`` or ``tuple`` [``int``]
+            A value indicating size of data read, where "size" depends on
+            the object:
+
+                - |Tree|: **undefined**
+                - |TreeList|: number of trees
+                - |CharacterMatrix|: number of sequences
+                - |DataSet|: ``tuple`` (number of taxon namespaces, number of tree lists, number of matrices)
+
+        """
+        raise NotImplementedError
+
+    def _read_from(self, **kwargs):
+        """
+        Add data to objects of this class from an external
+        source by dispatching calls to more specialized ``read_from_*`` methods.
+        Implementing classes will have a publically-exposed method, ``read()``,
+        that wraps a call to this method. This allows for class-specific
+        documentation of keyword arguments. E.g.::
+
+            def read(self, **kwargs):
+                '''
+                ... (documentation) ...
+                '''
+                return MultiReadable._read_from(self, **kwargs)
+
+        """
+        try:
+            src_type, src, schema = _extract_serialization_target_keyword(kwargs, "Source")
+        except Exception as e:
+            raise e
+        if src_type == "file" or src_type == "stream":
+            return self.read_from_stream(src=src, schema=schema, **kwargs)
+        elif src_type == "path":
+            return self.read_from_path(src=src, schema=schema, **kwargs)
+        elif src_type == "data" or src_type == "string":
+            return self.read_from_string(src=src, schema=schema, **kwargs)
+        elif src_type == "url":
+            return self.read_from_url(src=src, schema=schema, **kwargs)
+        else:
+            raise ValueError("Unsupported source type: {}".format(src_type))
+
+    def read_from_stream(self, src, schema, **kwargs):
+        """
+        Reads from file (exactly equivalent to just ``read()``, provided
+        here as a separate method for completeness.
+
+        Parameters
+        ----------
+        fileobj : file or file-like
+            Source of data.
+        schema : string
+            Specification of data format (e.g., "nexus").
+        \*\*kwargs : keyword arguments, optional
+            Arguments to customize parsing, instantiation, processing, and
+            accession of objects read from the data source, including schema-
+            or format-specific handling. These will be passed to the underlying
+            schema-specific reader for handling.
+
+        Returns
+        -------
+        n : ``tuple`` [integer]
+            A value indicating size of data read, where "size" depends on
+            the object:
+
+                - |Tree|: **undefined**
+                - |TreeList|: number of trees
+                - |CharacterMatrix|: number of sequences
+                - |DataSet|: ``tuple`` (number of taxon namespaces, number of tree lists, number of matrices)
+
+        """
+        return self._parse_and_add_from_stream(stream=src, schema=schema, **kwargs)
+
+    def read_from_path(self, src, schema, **kwargs):
+        """
+        Reads data from file specified by ``filepath``.
+
+        Parameters
+        ----------
+        filepath : file or file-like
+            Full file path to source of data.
+        schema : string
+            Specification of data format (e.g., "nexus").
+        \*\*kwargs : keyword arguments, optional
+            Arguments to customize parsing, instantiation, processing, and
+            accession of objects read from the data source, including schema-
+            or format-specific handling. These will be passed to the underlying
+            schema-specific reader for handling.
+
+        Returns
+        -------
+        n : ``tuple`` [integer]
+            A value indicating size of data read, where "size" depends on
+            the object:
+
+                - |Tree|: **undefined**
+                - |TreeList|: number of trees
+                - |CharacterMatrix|: number of sequences
+                - |DataSet|: ``tuple`` (number of taxon namespaces, number of tree lists, number of matrices)
+        """
+        with open(src, "r", newline=None) as fsrc:
+            return self._parse_and_add_from_stream(stream=fsrc, schema=schema, **kwargs)
+
+    def read_from_string(self, src, schema, **kwargs):
+        """
+        Reads a string.
+
+        Parameters
+        ----------
+        src_str : string
+            Data as a string.
+        schema : string
+            Specification of data format (e.g., "nexus").
+        \*\*kwargs : keyword arguments, optional
+            Arguments to customize parsing, instantiation, processing, and
+            accession of objects read from the data source, including schema-
+            or format-specific handling. These will be passed to the underlying
+            schema-specific reader for handling.
+
+        Returns
+        -------
+        n : ``tuple`` [integer]
+            A value indicating size of data read, where "size" depends on
+            the object:
+
+                - |Tree|: **undefined**
+                - |TreeList|: number of trees
+                - |CharacterMatrix|: number of sequences
+                - |DataSet|: ``tuple`` (number of taxon namespaces, number of tree lists, number of matrices)
+        """
+        s = StringIO(src)
+        return self._parse_and_add_from_stream(stream=s, schema=schema, **kwargs)
+
+    def read_from_url(self, src, schema, **kwargs):
+        """
+        Reads a URL source.
+
+        Parameters
+        ----------
+        src : string
+            URL of location providing source of data.
+        schema : string
+            Specification of data format (e.g., "nexus").
+        \*\*kwargs : keyword arguments, optional
+            Arguments to customize parsing, instantiation, processing, and
+            accession of objects read from the data source, including schema-
+            or format-specific handling. These will be passed to the underlying
+            schema-specific reader for handling.
+
+        Returns
+        -------
+        n : ``tuple`` [integer]
+            A value indicating size of data read, where "size" depends on
+            the object:
+
+                - |Tree|: **undefined**
+                - |TreeList|: number of trees
+                - |CharacterMatrix|: number of sequences
+                - |DataSet|: ``tuple`` (number of taxon namespaces, number of tree lists, number of matrices)
+        """
+        src_str = urlio.read_url(src)
+        s = StringIO(src_str)
+        return self._parse_and_add_from_stream(stream=s, schema=schema, **kwargs)
+
+##############################################################################
+## NonMultiReadable
+
+class NonMultiReadable(object):
+    """
+    Mixin to enforce transition from DendroPy 3 to DendroPy 4 API
+    """
+
+    def error(self, funcname):
+        read_from_func = funcname
+        get_from_func = funcname.replace("read", "get")
+        raise TypeError(("\n".join((
+                "The '{classname}' class no longer supports           ",
+                "(re-)population by re-reading data from an external  ",
+                "source. Instantiate a new object using, for example, ",
+                "'{classname}.{get_from_func}()' and bind it to",
+                "the variable name instead. That is, instead of:",
+                "",
+                "    x.{read_from_func}(...)",
+                "",
+                "use:",
+                "",
+                "    x = {classname}.{get_from_func}(...)",
+                "",
+                "",
+                ))).format(classname=self.__class__.__name__, get_from_func=get_from_func, read_from_func=read_from_func))
+    def read(self, stream, schema, **kwargs):
+        raise NotImplementedError()
+    def read_from_stream(self, fileobj, schema, **kwargs):
+        self.error("read_from_stream")
+    def read_from_path(self, filepath, schema, **kwargs):
+        self.error("read_from_path")
+    def read_from_string(self, src_str, schema, **kwargs):
+        self.error("read_from_string")
+    def read_from_url(self, url, schema, **kwargs):
+        self.error("read_from_url")
+
+##############################################################################
+## Serializable
+
+class Serializable(object):
+    """
+    Mixin class which all classes that require serialization should subclass.
+    """
+
+    def _format_and_write_to_stream(self, stream, schema, **kwargs):
+        """
+        Writes the object to the file-like object ``stream`` in ``schema``
+        schema.
+        """
+        raise NotImplementedError
+
+    def _write_to(self, **kwargs):
+        """
+        Write this object to an external resource by dispatching calls to more
+        specialized ``write_to_*`` methods. Implementing classes will have a
+        publically-exposed method, ``write()``, that wraps a call to this
+        method. This allows for class-specific documentation of keyword
+        arguments. E.g.::
+
+            def write(self, **kwargs):
+                '''
+                ... (documentation) ...
+                '''
+                return Serializable._write_to(self, **kwargs)
+
+        """
+        try:
+            dest_type, dest, schema = _extract_serialization_target_keyword(kwargs, "Destination")
+        except Exception as e:
+            raise e
+        if dest_type == "file":
+            return self.write_to_stream(dest=dest, schema=schema, **kwargs)
+        elif dest_type == "path":
+            return self.write_to_path(dest=dest, schema=schema, **kwargs)
+        else:
+            raise ValueError("Unsupported source type: {}".format(dest_type))
+
+    def write(self, **kwargs):
+        """
+        Writes out ``self`` in ``schema`` format.
+
+        **Mandatory Destination-Specification Keyword Argument (Exactly One of the Following Required):**
+
+            - **file** (*file*) -- File or file-like object opened for writing.
+            - **path** (*str*) -- Path to file to which to write.
+
+        **Mandatory Schema-Specification Keyword Argument:**
+
+            - **schema** (*str*) -- Identifier of format of data. See
+              "|Schemas|" for more details.
+
+        **Optional Schema-Specific Keyword Arguments:**
+
+            These provide control over how the data is formatted, and supported
+            argument names and values depend on the schema as specified by the
+            value passed as the "``schema``" argument. See "|Schemas|" for more
+            details.
+
+        Examples
+        --------
+
+        ::
+
+                d.write(path="path/to/file.dat",
+                        schema="nexus",
+                        preserve_underscores=True)
+                f = open("path/to/file.dat")
+                d.write(file=f,
+                        schema="nexus",
+                        preserve_underscores=True)
+
+        """
+        return Serializable._write_to(self, **kwargs)
+
+    def write_to_stream(self, dest, schema, **kwargs):
+        """
+        Writes to file-like object ``dest``.
+        """
+        return self._format_and_write_to_stream(stream=dest, schema=schema, **kwargs)
+
+    def write_to_path(self, dest, schema, **kwargs):
+        """
+        Writes to file specified by ``dest``.
+        """
+        with open(os.path.expandvars(os.path.expanduser(dest)), "w") as f:
+            return self._format_and_write_to_stream(stream=f, schema=schema, **kwargs)
+
+    def as_string(self, schema, **kwargs):
+        """
+        Composes and returns string representation of the data.
+
+        **Mandatory Schema-Specification Keyword Argument:**
+
+            - **schema** (*str*) -- Identifier of format of data. See
+              "|Schemas|" for more details.
+
+        **Optional Schema-Specific Keyword Arguments:**
+
+            These provide control over how the data is formatted, and supported
+            argument names and values depend on the schema as specified by the
+            value passed as the "``schema``" argument. See "|Schemas|" for more
+            details.
+
+        """
+        s = StringIO()
+        self._format_and_write_to_stream(stream=s, schema=schema, **kwargs)
+        return s.getvalue()
+
+##############################################################################
+## Annotable
+
+class Annotable(object):
+    """
+    Mixin class which all classes that need to persist object attributes
+    or other information as metadata should subclass.
+    """
+
+    def _get_annotations(self):
+        if not hasattr(self, "_annotations"):
+            self._annotations = AnnotationSet(self)
+        return self._annotations
+    def _set_annotations(self, annotations):
+        if hasattr(self, "_annotations") \
+                and annotations is self._annotations \
+                and self._annotations.target is self:
+            return
+        if not isinstance(annotations, AnnotationSet):
+            raise ValueError("Cannot set 'annotations' to object of type '{}'".format(type(annotations)))
+        old_target = annotations.target
+        self._annotations = annotations
+        self._annotations.target = self
+        for a in self._annotations:
+            if a.is_attribute and a._value[0] is old_target:
+                a.target = self
+    annotations = property(_get_annotations, _set_annotations)
+
+    def _has_annotations(self):
+        return hasattr(self, "_annotations") and len(self._annotations) > 0
+    has_annotations = property(_has_annotations)
+
+    def copy_annotations_from(self,
+            other,
+            attribute_object_mapper=None):
+        """
+        Copies annotations from ``other``, which must be of |Annotable|
+        type.
+
+        Copies are deep-copies, in that the |Annotation| objects added
+        to the ``annotation_set`` |AnnotationSet| collection of ``self`` are
+        independent copies of those in the ``annotate_set`` collection of
+        ``other``. However, dynamic bound-attribute annotations retain references
+        to the original objects as given in ``other``, which may or may not be
+        desirable. This is handled by updated the objects to which attributes
+        are bound via mappings found in ``attribute_object_mapper``.
+        In dynamic bound-attribute annotations, the ``_value`` attribute of the
+        annotations object (:attr:`Annotation._value`) is a tuple consisting of
+        "``(obj, attr_name)``", which instructs the |Annotation| object to
+        return "``getattr(obj, attr_name)``" (via: "``getattr(*self._value)``")
+        when returning the value of the Annotation. "``obj``" is typically the object
+        to which the |AnnotationSet| belongs (i.e., ``self``). When a copy
+        of |Annotation| is created, the object reference given in the
+        first element of the ``_value`` tuple of dynamic bound-attribute
+        annotations are unchanged, unless the id of the object reference is fo
+
+        Parameters
+        ----------
+
+        ``other`` : |Annotable|
+            Source of annotations to copy.
+
+        ``attribute_object_mapper`` : dict
+            Like the ``memo`` of ``__deepcopy__``, maps object id's to objects. The
+            purpose of this is to update the parent or owner objects of dynamic
+            attribute annotations.
+            If a dynamic attribute |Annotation|
+            gives object ``x`` as the parent or owner of the attribute (that is,
+            the first element of the :attr:`Annotation._value` tuple is
+            ``other``) and ``id(x)`` is found in ``attribute_object_mapper``,
+            then in the copy the owner of the attribute is changed to
+            ``attribute_object_mapper[id(x)]``.
+            If ``attribute_object_mapper`` is `None` (default), then the
+            following mapping is automatically inserted: ``id(other): self``.
+            That is, any references to ``other`` in any |Annotation|
+            object will be remapped to ``self``.  If really no reattribution
+            mappings are desired, then an empty dictionary should be passed
+            instead.
+
+        """
+        if hasattr(other, "_annotations"):
+            if attribute_object_mapper is None:
+                attribute_object_mapper = {id(object):self}
+            for a1 in other._annotations:
+                a2 = a1.clone(attribute_object_mapper=attribute_object_mapper)
+                if a2.is_attribute and a2._value[0] is other:
+                    a2._value = (attribute_object_mapper.get(id(other), other), a2._value[1])
+                self.annotations.add(a2)
+
+    def deep_copy_annotations_from(self, other, memo=None):
+        """
+        Note that all references to ``other`` in any annotation value (and
+        sub-annotation, and sub-sub-sub-annotation, etc.) will be
+        replaced with references to ``self``. This may not always make sense
+        (i.e., a reference to a particular entity may be absolute regardless of
+        context).
+        """
+        if hasattr(other, "_annotations"):
+            # if not isinstance(self, other.__class__) or not isinstance(other, self.__class__):
+            if type(self) is not type(other):
+                raise TypeError("Cannot deep-copy annotations from different type (unable to assume object equivalence in dynamic or nested annotations)")
+            if memo is None:
+                memo = {}
+            for a1 in other._annotations:
+                a2 = copy.deepcopy(a1, memo=memo)
+                memo[id(a1)] = a2
+                if a2.is_attribute and a1._value[0] is other:
+                    a2._value = (self, a1._value[1])
+                self.annotations.add(a2)
+            memo[id(other._annotations)] = self._annotations
+
+    # def __copy__(self):
+    #     o = self.__class__.__new__(self.__class__)
+    #     for k in self.__dict__:
+    #         if k == "_annotations":
+    #             continue
+    #         o.__dict__[k] = self.__dict__[k]
+    #     o.copy_annotations_from(self)
+
+    def __copy__(self, memo=None):
+        """
+        Cloning level: 0.
+        :attr:``annotation_set`` of top-level object and member |Annotation|
+        objects are full, independent instances. All other member objects (include
+        objects referenced by dynamically-bound attribute values of
+        |Annotation| objects) are references.
+        All member objects are references, except for
+        """
+        if memo is None:
+            memo = {}
+        other = self.__class__()
+        memo[id(self)] = other
+        for k in self.__dict__:
+            if k == "_annotations":
+                continue
+            other.__dict__[k] = copy.copy(self.__dict__[k])
+            memo[id(self.__dict__[k])] = other.__dict__[k]
+        self.deep_copy_annotations_from(other, memo=memo)
+
+    def __deepcopy__(self, memo=None):
+        # ensure clone map
+        if memo is None:
+            memo = {}
+        # get or create clone of self
+        try:
+            other = memo[id(self)]
+        except KeyError:
+            # create object without initialization
+            # other = type(self).__new__(self.__class__)
+            other = self.__class__.__new__(self.__class__)
+            # store
+            memo[id(self)] = other
+        # copy other attributes first, skipping annotations
+        for k in self.__dict__:
+            if k == "_annotations":
+                continue
+            if k in other.__dict__:
+                continue
+            other.__dict__[k] = copy.deepcopy(self.__dict__[k], memo)
+            memo[id(self.__dict__[k])] = other.__dict__[k]
+            # assert id(self.__dict__[k]) in memo
+        # create annotations
+        other.deep_copy_annotations_from(self, memo)
+        # return
+        return other
+
+##############################################################################
+## Annotation
+
+class Annotation(Annotable):
+    """
+    Metadata storage, composition and persistance, with the following attributes:
+
+        * ``name``
+        * ``value``
+        * ``datatype_hint``
+        * ``name_prefix``
+        * ``namespace``
+        * ``annotate_as_reference``
+        * ``is_hidden``
+        * ``real_value_format_specifier`` - format specifier for printing or rendering
+          values as string, given in Python's format specification
+          mini-language. E.g., '.8f', '4E', '>04d'.
+
+    """
+
+    def __init__(self,
+            name,
+            value,
+            datatype_hint=None,
+            name_prefix=None,
+            namespace=None,
+            name_is_prefixed=False,
+            is_attribute=False,
+            annotate_as_reference=False,
+            is_hidden=False,
+            label=None,
+            real_value_format_specifier=None,
+            ):
+        self._value = value
+        self.is_attribute = is_attribute
+        if name_is_prefixed:
+            self.prefixed_name = name
+            if name_prefix is not None:
+                self._name_prefix = name_prefix
+        else:
+            self.name = name
+            self._name_prefix = name_prefix
+        self.datatype_hint = datatype_hint
+        self._namespace = None
+        self.namespace = namespace
+        self.annotate_as_reference = annotate_as_reference
+        self.is_hidden = is_hidden
+        self.real_value_format_specifier = real_value_format_specifier
+
+    def __eq__(self, o):
+        return self is o
+        # if not isinstance(o, self.__class__):
+        #     return False
+        # if self._value != o._value:
+        #     return False
+        # if self.is_attribute != o.is_attribute:
+        #     return False
+        # if self.is_attribute and o.is_attribute:
+        #     if getattr(*self._value) != getattr(*o._value):
+        #         return False
+        # # at this point, we have established that the values
+        # # are equal
+        # return (self.name == o.name
+        #         and self._name_prefix == o._name_prefix
+        #         and self.datatype_hint == o.datatype_hint
+        #         and self._namespace == o._namespace
+        #         and self.annotate_as_reference == o.annotate_as_reference
+        #         and self.is_hidden == o.is_hidden
+        #         and ( ((not hasattr(self, "_annotations")) and (not hasattr(o, "_annotations")))
+        #             or (hasattr(self, "_annotations") and hasattr(o, "_annotations") and self._annotations == o._annotations)))
+
+    def __hash__(self):
+        return id(self)
+
+    def __str__(self):
+        return "{}='{}'".format(self.name, self.value)
+
+    def __copy__(self):
+        return self.clone()
+
+    # def __deepcopy__(self, memo=None):
+    #     if memo is None:
+    #         memo = {}
+    #     o = self.__class__.__new__(self.__class__)
+    #     memo[id(self)] = o
+    #     for k in self.__dict__:
+    #         # if k not in o.__dict__: # do not add attributes already added by base class
+    #         print("--->{}: {}".format(id(o), k))
+    #         o.__dict__[k] = copy.deepcopy(self.__dict__[k], memo)
+    #         memo[id(self.__dict__[k])] = o.__dict__[k]
+    #     return o
+
+    def clone(self, attribute_object_mapper=None):
+        """
+        Essentially a shallow-copy, except that any objects in the ``_value``
+        field with an ``id`` found in ``attribute_object_mapper`` will be replaced
+        with ``attribute_object_mapper[id]``.
+        """
+        o = self.__class__.__new__(self.__class__)
+        if attribute_object_mapper is None:
+            attribute_object_mapper = {id(self):o}
+        if hasattr(self, "_annotations"):
+            o.copy_annotations_from(self)
+        for k in self.__dict__:
+            if k == "_annotations":
+                continue
+            o.__dict__[k] = self.__dict__[k]
+        return o
+
+    def is_match(self, **kwargs):
+        match = True
+        for k, v in kwargs.items():
+            if k == "name_prefix":
+                if self.name_prefix != v:
+                    return False
+            elif k == "prefixed_name":
+                if self.prefixed_name != v:
+                    return False
+            elif k == "namespace":
+                if self.namespace != v:
+                    return False
+            elif k == "value":
+                if self.value != v:
+                    return False
+            elif hasattr(self, k):
+                if getattr(self, k) != v:
+                    return False
+        return True
+
+    def _get_value(self):
+        if self.is_attribute:
+            return getattr(*self._value)
+        else:
+            return self._value
+    def _set_value(self, value):
+        self._value = value
+    value = property(_get_value, _set_value)
+
+    def _get_name_prefix(self):
+        if self._name_prefix is None:
+            self._name_prefix = "dendropy"
+        return self._name_prefix
+    def _set_name_prefix(self, prefix):
+        self._name_prefix = prefix
+    name_prefix = property(_get_name_prefix, _set_name_prefix)
+
+    def _get_namespace(self):
+        if self._namespace is None:
+            self._namespace = "http://packages.python.org/DendroPy/"
+        return self._namespace
+    def _set_namespace(self, prefix):
+        self._namespace = prefix
+    namespace = property(_get_namespace, _set_namespace)
+
+    def _get_prefixed_name(self):
+        return "{}:{}".format(self.name_prefix, self.name)
+    def _set_prefixed_name(self, prefixed_name):
+        self._name_prefix, self.name = textprocessing.parse_curie_standard_qualified_name(prefixed_name)
+    prefixed_name = property(_get_prefixed_name, _set_prefixed_name)
+
+##############################################################################
+## AnnotationSet
+
+class AnnotationSet(container.OrderedSet):
+
+    def __init__(self, target, *args):
+        container.OrderedSet.__init__(self, *args)
+        self.target = target
+
+    def __eq__(self, other):
+        if not isinstance(other, self.__class__):
+            return False
+        return (container.OrderedSet.__eq__(self, other))
+                #and self.target is other.target) # we consider two
+                # AnnotationSet objects equal even if their targets are
+                # different; this is because (a) the target is convenience
+                # artifact, so client code calls to ``add_bound_attribute`` do
+                # not need to specify an owner, and (b) the target is not part
+                # of the contents of the AnnotationSet
+
+    def __str__(self):
+        return "AnnotationSet([{}])".format(( ", ".join(str(a) for a in self)))
+
+    def __deepcopy__(self, memo):
+        try:
+            o = self.__class__(target=memo[id(self.target)])
+        except KeyError:
+            raise KeyError("deepcopy error: object id {} not found: {}".format((id(self.target), repr(self.target))))
+        memo[id(self)] = o
+        for a in self:
+            x = copy.deepcopy(a, memo)
+            memo[id(a)] = x
+            o.add(x)
+        return o
+
+    def __getitem__(self, name):
+        """
+        Experimental! Inefficient! Volatile! Subject to change!
+        """
+        if isinstance(name, int):
+            return container.OrderedSet.__getitem__(self, name)
+        for a in self:
+            if a.name == name:
+                return a.value
+        return self.add_new(name, "")
+
+    def __setitem__(self, name, value):
+        """
+        Experimental! Inefficient! Volatile! Subject to change!
+        """
+        if isinstance(name, int):
+            container.OrderedSet.__setitem__(self, name, value)
+        for a in self:
+            if a.name == name:
+                a.value = value
+                return
+        self.add_new(name=name, value=value)
+
+    def add_new(self,
+            name,
+            value,
+            datatype_hint=None,
+            name_prefix=None,
+            namespace=None,
+            name_is_prefixed=False,
+            is_attribute=False,
+            annotate_as_reference=False,
+            is_hidden=False,
+            real_value_format_specifier=None,
+            ):
+        """
+        Add an annotation.
+
+        Parameters
+        ----------
+        name : string
+            The property/subject/field of the annotation (e.g. "color",
+            "locality", "dc:citation")
+        value: string
+            The content of the annotation.
+        datatype_hint : string, optional
+            Mainly for NeXML output (e.g. "xsd:string").
+        namespace_prefix : string, optional
+            Mainly for NeXML output (e.g. "dc:").
+        namespace : string, optional
+            Mainly for NeXML output (e.g. "http://www.w3.org/XML/1998/namespace").
+        name_is_prefixed : string, optional
+            Mainly for NeXML *input*: name will be split into prefix and local part
+            before storage (e.g., "dc:citations" will result in prefix = "dc" and
+            name="citations")
+        is_attribute : boolean, optional
+            If value is passed as a tuple of (object, "attribute_name") and this
+            is True, then actual content will be the result of calling
+            ``getattr(object, "attribute_name")``.
+        annotate_as_reference : boolean, optional
+            The value should be interpreted as a URI that points to content.
+        is_hidden : boolean, optional
+            Do not write or print this annotation when writing data.
+        real_value_format_specifier : str
+          Format specifier for printing or rendering values as string, given
+          in Python's format specification mini-language. E.g., '.8f', '4E',
+          '>04d'.
+
+        Returns
+        -------
+        annotation : |Annotation|
+            The new |Annotation| created.
+        """
+        if not name_is_prefixed:
+            if name_prefix is None and namespace is None:
+                name_prefix = "dendropy"
+                namespace = "http://packages.python.org/DendroPy/"
+            elif name_prefix is None:
+                raise TypeError("Cannot specify 'name_prefix' for unqualified name without specifying 'namespace'")
+            elif namespace is None:
+                raise TypeError("Cannot specify 'namespace' for unqualified name without specifying 'name_prefix'")
+        else:
+            if namespace is None:
+                raise TypeError("Cannot specify qualified name without specifying 'namespace'")
+        annote = Annotation(
+                name=name,
+                value=value,
+                datatype_hint=datatype_hint,
+                name_prefix=name_prefix,
+                namespace=namespace,
+                name_is_prefixed=name_is_prefixed,
+                is_attribute=is_attribute,
+                annotate_as_reference=annotate_as_reference,
+                is_hidden=is_hidden,
+                real_value_format_specifier=real_value_format_specifier,
+                )
+        return self.add(annote)
+
+    def add_bound_attribute(self,
+            attr_name,
+            annotation_name=None,
+            datatype_hint=None,
+            name_prefix=None,
+            namespace=None,
+            name_is_prefixed=False,
+            annotate_as_reference=False,
+            is_hidden=False,
+            real_value_format_specifier=None,
+            owner_instance=None,
+            ):
+        """
+        Add an attribute of an object as a dynamic annotation. The value of the
+        annotation will be dynamically bound to the value of the attribute.
+
+        Parameters
+        ----------
+        attr_name : string
+            The (string) name of the attribute to be used as the source of the
+            content or value of the annotation.
+        annotation_name : string, optional
+            Use this string as the annotation field/name rather than the attribute
+            name.
+        datatype_hint : string, optional
+            Mainly for NeXML output (e.g. "xsd:string").
+        namespace_prefix : string, optional
+            Mainly for NeXML output (e.g. "dc:").
+        namespace : string, optional
+            Mainly for NeXML output (e.g. "http://www.w3.org/XML/1998/namespace").
+        name_is_prefixed : string, optional
+            Mainly for NeXML *input*: name will be split into prefix and local part
+            before storage (e.g., "dc:citations" will result in prefix = "dc" and
+            name="citations")
+        annotate_as_reference : bool, optional
+            The value should be interpreted as a URI that points to content.
+        is_hidden : bool, optional
+            Do not write or print this annotation when writing data.
+        owner_instance : object, optional
+            The object whose attribute is to be used as the value of the
+            annotation. Defaults to ``self.target``.
+
+        Returns
+        -------
+        annotation : |Annotation|
+            The new |Annotation| created.
+        """
+        if annotation_name is None:
+            annotation_name = attr_name
+        if owner_instance is None:
+            owner_instance = self.target
+        if not hasattr(owner_instance, attr_name):
+            raise AttributeError(attr_name)
+        if not name_is_prefixed:
+            if name_prefix is None and namespace is None:
+                name_prefix = "dendropy"
+                namespace = "http://packages.python.org/DendroPy/"
+            elif name_prefix is None:
+                raise TypeError("Cannot specify 'name_prefix' for unqualified name without specifying 'namespace'")
+            elif namespace is None:
+                raise TypeError("Cannot specify 'namespace' for unqualified name without specifying 'name_prefix'")
+        else:
+            if namespace is None:
+                raise TypeError("Cannot specify qualified name without specifying 'namespace'")
+        annote = Annotation(
+                name=annotation_name,
+                value=(owner_instance, attr_name),
+                datatype_hint=datatype_hint,
+                name_prefix=name_prefix,
+                namespace=namespace,
+                name_is_prefixed=name_is_prefixed,
+                is_attribute=True,
+                annotate_as_reference=annotate_as_reference,
+                is_hidden=is_hidden,
+                real_value_format_specifier=real_value_format_specifier,
+                )
+        return self.add(annote)
+
+    def add_citation(self,
+            citation,
+            read_as="bibtex",
+            store_as="bibtex",
+            name_prefix=None,
+            namespace=None,
+            is_hidden=False):
+        """
+        Add a citation as an annotation.
+
+        Parameters
+        ----------
+        citation : string or dict or `BibTexEntry`
+            The citation to be added. If a string, then it must be a
+            BibTex-formatted entry. If a dictionary, then it must have
+            BibTex fields as keys and contents as values.
+        read_as : string, optional
+            Specifies the format/schema/structure of the citation. Currently
+            only supports 'bibtex'.
+        store_as : string, optional
+            Specifies how to record the citation, with one of the
+            following strings as values: "bibtex" (a set of annotations, where
+            each BibTex field becomes a separate annotation); "prism"
+            (a set of PRISM [Publishing Requirements for Industry Standard
+            Metadata] annotations); "dublin" (A set of of Dublic Core
+            annotations). Defaults to "bibtex".
+        name_prefix : string, optional
+            Mainly for NeXML output (e.g. "dc:").
+        namespace : string, optional
+            Mainly for NeXML output (e.g. "http://www.w3.org/XML/1998/namespace").
+        is_hidden : boolean, optional
+            Do not write or print this annotation when writing data.
+
+        Returns
+        -------
+        annotation : |Annotation|
+            The new |Annotation| created.
+        """
+        if read_as == "bibtex":
+            return self.add_bibtex(citation=citation,
+                    store_as=store_as,
+                    name_prefix=name_prefix,
+                    namespace=namespace,
+                    is_hidden=is_hidden)
+        else:
+            raise ValueError("Source format '{}' is not supported".format(read_as))
+
+    def add_bibtex(self,
+            citation,
+            store_as="bibtex",
+            name_prefix=None,
+            namespace=None,
+            is_hidden=False):
+        """
+        Add a citation as an annotation.
+
+        Parameters
+        ----------
+        citation : string or dict or `BibTexEntry`
+            The citation to be added. If a string, then it must be a
+            BibTex-formatted entry. If a dictionary, then it must have
+            BibTex fields as keys and contents as values.
+        store_as : string, optional
+            Specifies how to record the citation, with one of the
+            following strings as values: "bibtex" (a set of annotations, where
+            each BibTex field becomes a separate annotation); "prism"
+            (a set of PRISM [Publishing Requirements for Industry Standard
+            Metadata] annotations); "dublin" (A set of of Dublic Core
+            annotations). Defaults to "bibtex".
+        name_prefix : string, optional
+            Mainly for NeXML output (e.g. "dc:").
+        namespace : string, optional
+            Mainly for NeXML output (e.g. "http://www.w3.org/XML/1998/namespace").
+        is_hidden : boolean, optional
+            Do not write or print this annotation when writing data.
+
+        Returns
+        -------
+        annotation : |Annotation|
+            The new |Annotation| created.
+        """
+        bt = bibtex.BibTexEntry(citation)
+        bt_dict = bt.fields_as_dict()
+
+        if name_prefix is None and namespace is not None:
+            raise TypeError("Cannot specify 'name_prefix' for unqualified name without specifying 'namespace'")
+        elif namespace is None and name_prefix is not None:
+            raise TypeError("Cannot specify 'namespace' for unqualified name without specifying 'name_prefix'")
+
+        if store_as.lower().startswith("bibtex"):
+            if name_prefix is None and namespace is None:
+                name_prefix = "bibtex"
+                namespace = "http://www.edutella.org/bibtex#"
+            self.add_new(
+                    name="bibtype",
+                    value=bt.bibtype,
+                    datatype_hint="xsd:string",
+                    name_prefix=name_prefix,
+                    namespace=namespace,
+                    name_is_prefixed=False,
+                    is_attribute=False,
+                    annotate_as_reference=False,
+                    is_hidden=is_hidden)
+            self.add_new(
+                    name="citekey",
+                    value=bt.citekey,
+                    datatype_hint="xsd:string",
+                    name_prefix=name_prefix,
+                    namespace=namespace,
+                    name_is_prefixed=False,
+                    is_attribute=False,
+                    annotate_as_reference=False,
+                    is_hidden=is_hidden)
+            for entry_key, entry_value in bt_dict.items():
+                self.add_new(
+                        name=entry_key,
+                        value=entry_value,
+                        datatype_hint="xsd:string",
+                        name_prefix=name_prefix,
+                        namespace=namespace,
+                        name_is_prefixed=False,
+                        is_attribute=False,
+                        annotate_as_reference=False,
+                        is_hidden=is_hidden)
+        # elif store_as.lower().startswith("bibtex-record"):
+        #     if name_prefix is None and namespace is None:
+        #         name_prefix = "dendropy"
+        #         namespace = "http://packages.python.org/DendroPy/"
+        #     self.add_new(
+        #             name="bibtex",
+        #             value=bt.as_compact_bibtex(),
+        #             datatype_hint="xsd:string",
+        #             name_is_prefixed=False,
+        #             name_prefix=name_prefix,
+        #             namespace=namespace,
+        #             is_attribute=False,
+        #             annotate_as_reference=False,
+        #             is_hidden=is_hidden)
+        elif store_as.lower().startswith("prism"):
+            prism_map = {
+                    'volume': bt_dict.get('volume', None),
+                    'publicationName':  bt_dict.get('journal', None),
+                    'pageRange': bt_dict.get('pages', None),
+                    'publicationDate': bt_dict.get('year', None),
+                    }
+            if name_prefix is None and namespace is None:
+                name_prefix = "prism"
+                namespace = "http://prismstandard.org/namespaces/1.2/basic/"
+            for field, value in prism_map.items():
+                if value is None:
+                    continue
+                self.add_new(
+                        name=field,
+                        value=value,
+                        datatype_hint="xsd:string",
+                        name_prefix=name_prefix,
+                        namespace=namespace,
+                        name_is_prefixed=False,
+                        is_attribute=False,
+                        annotate_as_reference=False,
+                        is_hidden=is_hidden)
+        elif store_as.lower().startswith("dublin"):
+            dc_map = {
+                    'title': bt_dict.get('title', None),
+                    'creator':  bt_dict.get('author', None),
+                    'publisher': bt_dict.get('journal', None),
+                    'date': bt_dict.get('year', None),
+                    }
+            if name_prefix is None and namespace is None:
+                name_prefix = "dc"
+                namespace = "http://purl.org/dc/elements/1.1/"
+            for field, value in dc_map.items():
+                if value is None:
+                    continue
+                self.add_new(
+                        name=field,
+                        value=value,
+                        datatype_hint="xsd:string",
+                        name_is_prefixed=False,
+                        name_prefix=name_prefix,
+                        namespace=namespace,
+                        is_attribute=False,
+                        annotate_as_reference=False,
+                        is_hidden=is_hidden)
+        else:
+            raise ValueError("Unrecognized composition specification: '{}'".format(store_as))
+
+    def findall(self, **kwargs):
+        """
+        Returns AnnotationSet of Annotation objects associated with self.target
+        that match based on *all* criteria specified in keyword arguments::
+
+            >>> notes = tree.annotations.findall(name="color")
+            >>> notes = tree.annotations.findall(namespace="http://packages.python.org/DendroPy/")
+            >>> notes = tree.annotations.findall(namespace="http://packages.python.org/DendroPy/",
+                                          name="color")
+            >>> notes = tree.annotations.findall(name_prefix="dc")
+            >>> notes = tree.annotations.findall(prefixed_name="dc:color")
+
+        If no matches are found, the return AnnotationSet is empty.
+
+        If no keyword arguments are given, *all* annotations are returned::
+
+            >>> notes = tree.annotations.findall()
+
+        Returns
+        -------
+        results : |AnnotationSet| or `None`
+            |AnnotationSet| containing |Annotation| objects that
+            match criteria, or `None` if no matching annotations found.
+        """
+        results = []
+        for a in self:
+            if a.is_match(**kwargs):
+                results.append(a)
+        results = AnnotationSet(self.target, results)
+        return results
+
+    def find(self, **kwargs):
+        """
+        Returns the *first* Annotation associated with self.target
+        which matches based on *all* criteria specified in keyword arguments::
+
+            >>> note = tree.annotations.find(name="color")
+            >>> note = tree.annotations.find(name_prefix="dc", name="color")
+            >>> note = tree.annotations.find(prefixed_name="dc:color")
+
+        If no match is found, None is returned.
+
+        If no keyword arguments are given, a TypeError is raised.
+
+        Returns
+        -------
+        results : |Annotation| or `None`
+            First |Annotation| object found that matches criteria, or
+            `None` if no matching annotations found.
+        """
+        if "default" in kwargs:
+            default = kwargs["default"]
+            del kwargs["default"]
+        else:
+            default = None
+        if not kwargs:
+            raise TypeError("Search criteria not specified")
+        for a in self:
+            if a.is_match(**kwargs):
+                return a
+        return default
+
+    def get_value(self, name, default=None):
+        """
+        Returns the *value* of the *first* Annotation associated with
+        self.target which has ``name`` in the name field.
+
+        If no match is found, then ``default`` is returned.
+
+        Parameters
+        ----------
+        name : string
+            Name of |Annotation| object whose value is to be returned.
+
+        default : any, optional
+            Value to return if no matching |Annotation| object found.
+
+        Returns
+        -------
+        results : |Annotation| or `None`
+            ``value`` of first |Annotation| object found that matches
+            criteria, or `None` if no matching annotations found.
+        """
+        for a in self:
+            if a.is_match(name=name):
+                return a.value
+        return default
+
+    def require_value(self, name):
+        """
+        Returns the *value* of the *first* Annotation associated with
+        self.target which has ``name`` in the name field.
+
+        If no match is found, then KeyError is raised.
+
+        Parameters
+        ----------
+        name : string
+            Name of |Annotation| object whose value is to be returned.
+
+        Returns
+        -------
+        results : |Annotation| or `None`
+            ``value`` of first |Annotation| object found that matches
+            criteria.
+        """
+        v = self.get_value(name, default=None)
+        if v is None:
+            raise KeyError(name)
+        return v
+
+    def drop(self, **kwargs):
+        """
+        Removes Annotation objects that match based on *all* criteria specified
+        in keyword arguments.
+
+        Remove all annotation objects with ``name`` ==
+        "color"::
+
+            >>> tree.annotations.drop(name="color")
+
+        Remove all annotation objects with ``namespace`` ==
+        "http://packages.python.org/DendroPy/"::
+
+            >>> tree.annotations.drop(namespace="http://packages.python.org/DendroPy/")
+
+        Remove all annotation objects with ``namespace`` ==
+        "http://packages.python.org/DendroPy/" *and* ``name`` == "color"::
+
+            >>> tree.annotations.drop(namespace="http://packages.python.org/DendroPy/",
+                    name="color")
+
+        Remove all annotation objects with ``name_prefix`` == "dc"::
+
+            >>> tree.annotations.drop(name_prefix="dc")
+
+        Remove all annotation objects with ``prefixed_name`` == "dc:color"::
+
+            >>> tree.annotations.drop(prefixed_name="dc:color")
+
+        If no keyword argument filter criteria are given, *all* annotations are
+        removed::
+
+            >>> tree.annotations.drop()
+
+        Returns
+        -------
+        results : |AnnotationSet|
+            |AnnotationSet| containing |Annotation| objects that
+            were removed.
+        """
+        to_remove = []
+        for a in self:
+            if a.is_match(**kwargs):
+                to_remove.append(a)
+        for a in to_remove:
+            self.remove(a)
+        return AnnotationSet(self.target, to_remove)
+
+    def values_as_dict(self, **kwargs):
+        """
+        Returns annotation set as a dictionary. The keys and values for the dictionary will
+        be generated based on the following keyword arguments:
+
+        Keyword Arguments
+        -----------------
+        key_attr : string
+            String specifying an Annotation object attribute name to be used
+            as keys for the dictionary.
+        key_fn : string
+            Function that takes an Annotation object as an argument and returns
+            the value to be used as a key for the dictionary.
+        value_attr : string
+            String specifying an Annotation object attribute name to be used
+            as values for the dictionary.
+        value_fn : string
+            Function that takes an Annotation object as an argument and returns
+            the value to be used as a value for the dictionary.
+
+        At most one of ``key_attr`` or ``key_fn`` can be specified. If neither
+        is specified, then by default the keys are generated from Annotation.name.
+        At most one of ``value_attr`` or ``value_fn`` can be specified. If neither
+        is specified, then by default the values are generated from Annotation.value.
+        Key collisions will result in the dictionary entry for that key being
+        overwritten.
+
+        Returns
+        -------
+        values : dict
+        """
+        if "key_attr" in kwargs and "key_fn" in kwargs:
+            raise TypeError("Cannot specify both 'key_attr' and 'key_fn'")
+        elif "key_attr" in kwargs:
+            key_attr = kwargs["key_attr"]
+            key_fn = lambda a: getattr(a, key_attr)
+        elif "key_fn" in kwargs:
+            key_fn = kwargs["key_fn"]
+        else:
+            key_fn = lambda a: a.name
+        if "value_attr" in kwargs and "value_fn" in kwargs:
+            raise TypeError("Cannot specify both 'value_attr' and 'value_fn'")
+        elif "value_attr" in kwargs:
+            value_attr = kwargs["value_attr"]
+            value_fn = lambda a: getattr(a, value_attr)
+        elif "value_fn" in kwargs:
+            value_fn = kwargs["value_fn"]
+        else:
+            value_fn = lambda a: a.value
+        d = {}
+        for a in self:
+            d[key_fn(a)] = value_fn(a)
+        return d
+
diff --git a/dendropy/datamodel/charmatrixmodel.py b/dendropy/datamodel/charmatrixmodel.py
new file mode 100644
index 0000000..ebdc668
--- /dev/null
+++ b/dendropy/datamodel/charmatrixmodel.py
@@ -0,0 +1,1978 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Character and character-sequence data structures.
+"""
+
+import warnings
+import copy
+import collections
+try:
+    from StringIO import StringIO # Python 2 legacy support: StringIO in this module is the one needed (not io)
+except ImportError:
+    from io import StringIO # Python 3
+from dendropy.utility import error
+from dendropy.utility import deprecate
+from dendropy.utility import container
+from dendropy.datamodel import charstatemodel
+from dendropy.datamodel.charstatemodel import DNA_STATE_ALPHABET
+from dendropy.datamodel.charstatemodel import RNA_STATE_ALPHABET
+from dendropy.datamodel.charstatemodel import NUCLEOTIDE_STATE_ALPHABET
+from dendropy.datamodel.charstatemodel import PROTEIN_STATE_ALPHABET
+from dendropy.datamodel.charstatemodel import RESTRICTION_SITES_STATE_ALPHABET
+from dendropy.datamodel.charstatemodel import INFINITE_SITES_STATE_ALPHABET
+from dendropy.datamodel import basemodel
+from dendropy.datamodel import taxonmodel
+from dendropy import dataio
+
+###############################################################################
+## ContinuousCharElement
+
+class ContinuousCharElement(
+        basemodel.DataObject,
+        basemodel.Annotable):
+    def __init__(self, value, column_def=None, label=None):
+        basemodel.DataObject.__init__(self,
+                label=label)
+        self.value = value
+        self.column_def = column_def
+
+###############################################################################
+## CharacterType
+
+class CharacterType(
+        basemodel.DataObject,
+        basemodel.Annotable):
+    """
+    A character format or type of a particular column: i.e., maps a particular
+    set of character state definitions to a column in a character matrix.
+    """
+
+    def __init__(self,
+            label=None,
+            state_alphabet=None):
+        basemodel.DataObject.__init__(self, label=label)
+        self._state_alphabet = None
+        self.state_alphabet = state_alphabet
+
+    def _get_state_alphabet(self):
+        """
+        The |StateAlphabet| representing the state alphabet for this
+        column: i.e., the collection of symbols and the state identities to
+        which they map.
+        """
+        return self._state_alphabet
+    def _set_state_alphabet(self, value):
+        self._state_alphabet = value
+    state_alphabet = property(_get_state_alphabet, _set_state_alphabet)
+
+    def __copy__(self, memo=None):
+        raise TypeError("Cannot directly copy {}".format(self.__class__.__name__))
+
+    def taxon_namespace_scoped_copy(self, memo=None):
+        raise TypeError("Cannot directly copy {}".format(self.__class__.__name__))
+
+    def __deepcopy__(self, memo=None):
+        return basemodel.Annotable.__deepcopy__(self, memo=memo)
+
+###############################################################################
+## CharacterDataSequence
+
+class CharacterDataSequence(
+        basemodel.Annotable,
+        ):
+    """
+    A sequence of character values or values for a particular taxon or entry in
+    a data matrix.
+
+    Objects of this class can be (almost) treated as simple lists, where the
+    elements are the values of characters (typically, real values in the case
+    of continuous data, and special instances of |StateIdentity| objects in the
+    case of discrete data.
+
+    Character type data (represented by |CharacterType| instances) and metadata
+    annotations (represented by |AnnotationSet| instances), if any, are
+    maintained in a parallel list that need to be accessed separately using the
+    index of the value to which the data correspond. So, for example, the
+    |AnnotationSet| object containing the metadata annotations for the first
+    value in a sequence, ``s[0]``, is available through
+    ``s.annotations_at(0)``, while the character type information for that
+    first element is available through ``s.character_type_at(0)`` and can be
+    set through ``s.set_character_type_at(0, c)``.
+
+    In most cases where metadata annotations and character type information are
+    not needed, treating objects of this class as a simple list provides all
+    the functionality needed. Where metadata annotations or character type
+    information are required, all the standard list mutation methods (e.g.,
+    ``CharacterDataSequence.insert``, ``CharacterDataSequence.append``,
+    ``CharacterDataSequence.extend``) also take optional ``character_type``
+    and ``character_annotations`` argument in addition to the primary
+    ``character_value`` argument, thus allowing for setting of the value,
+    character type, and annotation set simultaneously. While iteration over
+    character values are available through the standard list iteration
+    interface, the method ``CharacterDataSequence.iter_cells()`` provides for
+    iterating over ``<character-value, character-type,
+    character-annotation-set>`` triplets.
+
+    """
+
+    ###############################################################################
+    ## Life-cycle
+
+    def __init__(self,
+            character_values=None,
+            character_types=None,
+            character_annotations=None):
+        """
+        Parameters
+        ----------
+        character_values : iterable of values
+            A set of values for this sequence.
+        """
+        self._character_values = []
+        self._character_types = []
+        self._character_annotations = []
+        if character_values:
+            self.extend(
+                    character_values=character_values,
+                    character_types=character_types,
+                    character_annotations=character_annotations)
+
+    ###############################################################################
+    ## Life-cycle
+
+    # def values(self):
+    #     return list(self._character_values)
+
+    def values(self):
+        """
+        Returns list of values of this vector.
+
+        Returns
+        -------
+        v : list
+            List of values making up this vector.
+        """
+        return self._character_values
+
+    def symbols_as_list(self):
+        """
+        Returns list of string representation of values of this vector.
+
+        Returns
+        -------
+        v : list
+            List of string representation of values making up this vector.
+        """
+        return list(str(cs) for cs in self._character_values)
+
+    def symbols_as_string(self, sep=""):
+        """
+        Returns values of this vector as a single string, with individual value
+        elements separated by ``sep``.
+
+        Returns
+        -------
+        s : string
+            String representation of values making up this vector.
+        """
+        return sep.join(str(cs) for cs in self._character_values)
+
+    def __str__(self):
+        return self.symbols_as_string()
+
+    def append(self, character_value, character_type=None, character_annotations=None):
+        """
+        Adds a value to ``self``.
+
+        Parameters
+        ----------
+        character_value : object
+            Value to be stored.
+        character_type : |CharacterType|
+            Description of character value.
+        character_annotations : |AnnotationSet|
+            Metadata annotations associated with this character.
+        """
+        self._character_values.append(character_value)
+        self._character_types.append(character_type)
+        self._character_annotations.append(character_annotations)
+
+    def extend(self, character_values, character_types=None, character_annotations=None):
+        """
+        Extends ``self`` with values.
+
+        Parameters
+        ----------
+        character_values : iterable of objects
+            Values to be stored.
+        character_types : iterable of |CharacterType| objects
+            Descriptions of character values.
+        character_annotations : iterable |AnnotationSet| objects
+            Metadata annotations associated with characters.
+        """
+        self._character_values.extend(character_values)
+        if character_types is None:
+            self._character_types.extend( [None] * len(character_values) )
+        else:
+            assert len(character_types) == len(character_values)
+            self._character_types.extend(character_types)
+        if character_annotations is None:
+            self._character_annotations.extend( [None] * len(character_values) )
+        else:
+            assert len(character_annotations) == len(character_values)
+            self._character_annotations.extend(character_annotations)
+
+    def __len__(self):
+        return len(self._character_values)
+
+    def __getitem__(self, idx):
+        return self._character_values[idx]
+
+    def __setitem__(self, idx, value):
+        self._character_values[idx] = value
+
+    def __iter__(self):
+        return self.__next__()
+
+    def __next__(self):
+        for v in self._character_values:
+            yield v
+
+    next = __next__ # Python 2 legacy support
+
+    def iter_cells(self):
+        """
+        Iterate over triplets of character values and associated
+        |CharacterType| and |AnnotationSet| instances.
+        """
+        for v, t, a in zip(self._character_values, self._character_types, self._character_annotations):
+            yield v, t, a
+
+    def __delitem__(self, idx):
+        del self._character_values[idx]
+        del self._character_types[idx]
+        del self._character_annotations[idx]
+
+    def set_at(self, idx, character_value, character_type=None, character_annotations=None):
+        """
+        Set value and associated character type and metadata annotations for
+        element at ``idx``.
+
+        Parameters
+        ----------
+        idx : integer
+            Index of element to set.
+        character_value : object
+            Value to be stored.
+        character_type : |CharacterType|
+            Description of character value.
+        character_annotations : |AnnotationSet|
+            Metadata annotations associated with this character.
+        """
+        to_add = (idx+1) - len(self._character_values)
+        while to_add > 0:
+            self.append(None)
+            to_add -= 1
+        self._character_values[idx] = character_value
+        self._character_types[idx] = character_type
+        self._character_annotations[idx] = character_annotations
+
+    def insert(self, idx, character_value, character_type=None, character_annotations=None):
+        """
+        Insert value and associated character type and metadata annotations for
+        element at ``idx``.
+
+        Parameters
+        ----------
+        idx : integer
+            Index of element to set.
+        character_value : object
+            Value to be stored.
+        character_type : |CharacterType|
+            Description of character value.
+        character_annotations : |AnnotationSet|
+            Metadata annotations associated with this character.
+        """
+        self._character_values.insert(idx, character_value)
+        self._character_types.insert(idx, character_type)
+        self._character_annotations.insert(idx, character_annotations)
+
+    def value_at(self, idx):
+        """
+        Return value of character at ``idx``.
+
+        Parameters
+        ----------
+        idx : integer
+            Index of element value to return.
+
+        Returns
+        -------
+        c : object
+            Value of character at index ``idx``.
+        """
+        return self._character_values[idx]
+
+    def character_type_at(self, idx):
+        """
+        Return type of character at ``idx``.
+
+        Parameters
+        ----------
+        idx : integer
+            Index of element character type to return.
+
+        Returns
+        -------
+        c : |CharacterType|
+            |CharacterType| associated with character index ``idx``.
+        """
+        return self._character_types[idx]
+
+    def annotations_at(self, idx):
+        """
+        Return metadata annotations of character at ``idx``.
+
+        Parameters
+        ----------
+        idx : integer
+            Index of element annotations to return.
+
+        Returns
+        -------
+        c : |AnnotationSet|
+            |AnnotationSet| representing metadata annotations of character at index ``idx``.
+        """
+        if self._character_annotations[idx] is None:
+            self._character_annotations[idx] = basemodel.AnnotationSet()
+        return self._character_annotations[idx]
+
+    def has_annotations_at(self, idx):
+        """
+        Return `True` if character at ``idx`` has metadata annotations.
+
+        Parameters
+        ----------
+        idx : integer
+            Index of element annotations to check.
+
+        Returns
+        -------
+        b : bool
+            `True` if character at ``idx`` has metadata annotations, `False`
+            otherwise.
+        """
+        return not self._character_annotations[idx] is None
+
+    def set_character_type_at(self, idx, character_type):
+        """
+        Set type of character at ``idx``.
+
+        Parameters
+        ----------
+        idx : integer
+            Index of element character type to set.
+        """
+        self._character_types[idx] = character_type
+
+    def set_annotations_at(self, idx, annotations):
+        """
+        Set metadata annotations of character at ``idx``.
+
+        Parameters
+        ----------
+        idx : integer
+            Index of element annotations to set.
+        """
+        self._character_annotations[idx] = annotations
+
+###############################################################################
+## Subset of Character (Columns)
+
+class CharacterSubset(
+        basemodel.DataObject,
+        basemodel.Annotable,
+        ):
+    """
+    Tracks definition of a subset of characters.
+    """
+
+    def __init__(self, label=None, character_indices=None):
+        """
+        Parameters
+        ----------
+        label: str
+            Name of this subset.
+        character_indices: iterable of ``int``
+            Iterable of 0-based (integer) indices of column positions that
+            constitute this subset.
+
+        """
+        basemodel.DataObject.__init__(self, label=label)
+        if character_indices is None:
+            self.character_indices = set()
+        else:
+            self.character_indices = set(character_indices)
+
+    def __len__(self):
+        return len(self.character_indices)
+
+    def __iter__(self):
+        return iter(self.character_indices)
+
+    def __deepcopy__(self, memo):
+        return basemodel.Annotable.__deepcopy__(self, memo=memo)
+
+###############################################################################
+## CharacterMatrix
+
+class CharacterMatrix(
+        taxonmodel.TaxonNamespaceAssociated,
+        basemodel.Annotable,
+        basemodel.Deserializable,
+        basemodel.NonMultiReadable,
+        basemodel.Serializable,
+        basemodel.DataObject):
+    """
+    A data structure that manages assocation of operational taxononomic unit
+    concepts to sequences of character state identities or values.
+
+    This is a base class that provides general functionality; derived classes
+    specialize for particular data types. You will not be using the class
+    directly, but rather one of the derived classes below, specialized for data
+    types such as DNA, RNA, continuous, etc.
+
+    This class and derived classes behave like a dictionary where the keys are
+    |Taxon| objects and the values are `CharacterDataSequence` objects. Access
+    to sequences based on taxon labels as well as indexes are also provided.
+    Numerous methods are provided to manipulate and iterate over sequences.
+    Character partitions can be managed through `CharacterSubset` objects,
+    while management of detailed metadata on character types are available
+    through |CharacterType| objects.
+
+    Objects can be instantiated by reading data from external sources through
+    the usual ``get_from_stream()``, ``get_from_path()``, or
+    ``get_from_string()`` functions. In addition, a single matrix object can be
+    instantiated from multiple matrices (``concatenate()``) or data sources
+    (``concatenate_from_paths``).
+
+    A range of methods also exist for importing data from another matrix object.
+    These vary depending on how "new" and "existing" are treated.  A "new"
+    sequence is a sequence in the other matrix associated with a |Taxon|
+    object for which there is no sequence defined in the current matrix.  An
+    "existing" sequence is a sequence in the other matrix associated with a
+    |Taxon| object for which there *is* a sequence defined in the
+    current matrix.
+
+    +---------------------------------+---------------------------------------------+--------------------------------------------+
+    |                                 | New Sequences: IGNORED                      | New Sequences: ADDED                       |
+    +=================================+=============================================+============================================+
+    | Existing Sequences: IGNORED     | [NO-OP]                                     | :meth:`CharacterMatrix.add_sequences()`    |
+    +---------------------------------+---------------------------------------------+--------------------------------------------+
+    | Existing Sequences: OVERWRITTEN | :meth:`CharacterMatrix.replace_sequences()` | :meth:`CharacterMatrix.update_sequences()` |
+    +---------------------------------+---------------------------------------------+--------------------------------------------+
+    | Existing Sequences: EXTENDED    | :meth:`CharacterMatrix.extend_sequences()`  | :meth:`CharacterMatrix.extend_matrix()`    |
+    +---------------------------------+---------------------------------------------+--------------------------------------------+
+
+    If character subsets have been defined, these subsets can be exported to independent matrices.
+
+    """
+
+    ###########################################################################
+    ### Class Variables
+
+    data_type = None
+    character_sequence_type = CharacterDataSequence
+
+    ###########################################################################
+    ### Factory (Class) Methods
+
+    def _parse_and_create_from_stream(cls,
+            stream,
+            schema,
+            matrix_offset=0,
+            **kwargs):
+        taxon_namespace = taxonmodel.process_kwargs_dict_for_taxon_namespace(kwargs, None)
+        if taxon_namespace is None:
+            taxon_namespace = taxonmodel.TaxonNamespace()
+        def tns_factory(label):
+            if label is not None and taxon_namespace.label is None:
+                taxon_namespace.label = label
+            return taxon_namespace
+        label = kwargs.pop("label", None)
+        kwargs["data_type"] = cls.data_type
+        reader = dataio.get_reader(schema, **kwargs)
+        char_matrices = reader.read_char_matrices(
+                stream=stream,
+                taxon_namespace_factory=tns_factory,
+                char_matrix_factory=new_char_matrix,
+                state_alphabet_factory=charstatemodel.StateAlphabet,
+                global_annotations_target=None)
+        if len(char_matrices) == 0:
+            raise ValueError("No character data in data source")
+        char_matrix = char_matrices[matrix_offset]
+        if char_matrix.data_type != cls.data_type:
+            raise ValueError(
+                "Data source (at offset {}) is of type '{}', "
+                "but current CharacterMatrix is of type '{}'.".format(
+                    matrix_offset,
+                    char_matrix.data_type,
+                    cls.data_type))
+        return char_matrix
+    _parse_and_create_from_stream = classmethod(_parse_and_create_from_stream)
+
+    @classmethod
+    def get(cls, **kwargs):
+        """
+        Instantiate and return a *new* character matrix object from a data source.
+
+        **Mandatory Source-Specification Keyword Argument (Exactly One of the Following Required):**
+
+            - **file** (*file*) -- File or file-like object of data opened for reading.
+            - **path** (*str*) -- Path to file of data.
+            - **url** (*str*) -- URL of data.
+            - **data** (*str*) -- Data given directly.
+
+        **Mandatory Schema-Specification Keyword Argument:**
+
+            - **schema** (*str*) -- Identifier of format of data given by the
+              "``file``", "``path``", "``data``", or "``url``" argument
+              specified above: ":doc:`fasta </schemas/fasta>`", ":doc:`nexus
+              </schemas/nexus>`", or ":doc:`nexml </schemas/nexml>`",
+              ":doc:`phylip </schemas/phylip>`", etc.
+              See "|Schemas|" for more details.
+
+        **Optional General Keyword Arguments:**
+
+            - **label** (*str*) -- Name or identifier to be assigned to the new
+              object; if not given, will be assigned the one specified in the
+              data source, or `None` otherwise.
+            - **taxon_namespace** (|TaxonNamespace|) -- The |TaxonNamespace|
+              instance to use to :doc:`manage the taxon names </primer/taxa>`.
+              If not specified, a new one will be created.
+            - **matrix_offset** (*int*) -- 0-based index of character block or
+              matrix in source to be parsed. If not specified then the
+              first matrix (offset = 0) is assumed.
+            - **ignore_unrecognized_keyword_arguments** (*bool*) -- If `True`,
+              then unsupported or unrecognized keyword arguments will not
+              result in an error. Default is `False`: unsupported keyword
+              arguments will result in an error.
+
+        **Optional Schema-Specific Keyword Arguments:**
+
+        These provide control over how the data is interpreted and
+        processed, and supported argument names and values depend on
+        the schema as specified by the value passed as the "``schema``"
+        argument. See "|Schemas|" for more details.
+
+        **Examples:**
+
+        ::
+
+            dna1 = dendropy.DnaCharacterMatrix.get(
+                    file=open("pythonidae.fasta"),
+                    schema="fasta")
+            dna2 = dendropy.DnaCharacterMatrix.get(
+                    url="http://purl.org/phylo/treebase/phylows/matrix/TB2:M2610?format=nexus",
+                    schema="nexus")
+            aa1 = dendropy.ProteinCharacterMatrix.get(
+                    file=open("pythonidae.dat"),
+                    schema="phylip")
+            std1 = dendropy.StandardCharacterMatrix.get(
+                    path="python_morph.nex",
+                    schema="nexus")
+            std2 = dendropy.StandardCharacterMatrix.get(
+                    data=">t1\\n01011\\n\\n>t2\\n11100",
+                    schema="fasta")
+
+        """
+        return cls._get_from(**kwargs)
+
+    def concatenate(cls, char_matrices):
+        """
+        Creates and returns a single character matrix from multiple
+        CharacterMatrix objects specified as a list, 'char_matrices'.
+        All the CharacterMatrix objects in the list must be of the
+        same type, and share the same TaxonNamespace reference. All taxa
+        must be present in all alignments, all all alignments must
+        be of the same length. Component parts will be recorded as
+        character subsets.
+        """
+        taxon_namespace = char_matrices[0].taxon_namespace
+        nseqs = len(char_matrices[0])
+        concatenated_chars = cls(taxon_namespace=taxon_namespace)
+        pos_start = 0
+        for cidx, cm in enumerate(char_matrices):
+            if cm.taxon_namespace is not taxon_namespace:
+                raise ValueError("Different ``taxon_namespace`` references in matrices to be merged")
+            if len(cm) != len(taxon_namespace):
+                raise ValueError("Number of sequences not equal to the number of taxa")
+            if len(cm) != nseqs:
+                raise ValueError("Different number of sequences across alignments: %d (expecting %d based on first matrix)" % (len(cm), nseqs))
+            v1 = len(cm[0])
+            for t, s in cm.items():
+                if len(s) != v1:
+                    raise ValueError("Unequal length sequences in character matrix %d".format(cidx+1))
+            concatenated_chars.extend_matrix(cm)
+            if cm.label is None:
+                new_label = "locus%03d" % cidx
+            else:
+                new_label = cm.label
+            cs_label = new_label
+            i = 2
+            while cs_label in concatenated_chars.character_subsets:
+                label = "%s_%03d" % (new_label, i)
+                i += 1
+            character_indices = range(pos_start, pos_start + cm.vector_size)
+            pos_start += cm.vector_size
+            concatenated_chars.new_character_subset(character_indices=character_indices,
+                    label=cs_label)
+        return concatenated_chars
+    concatenate = classmethod(concatenate)
+
+    def concatenate_from_streams(cls, streams, schema, **kwargs):
+        """
+        Read a character matrix from each file object given in ``streams``,
+        assuming data format/schema ``schema``, and passing any keyword arguments
+        down to the underlying specialized reader. Merge the character matrices
+        and return the combined character matrix. Component parts will be
+        recorded as character subsets.
+        """
+        taxon_namespace = taxonmodel.process_kwargs_dict_for_taxon_namespace(kwargs, None)
+        if taxon_namespace is None:
+            taxon_namespace = taxonmodel.TaxonNamespace()
+        kwargs["taxon_namespace"] = taxon_namespace
+        char_matrices = []
+        for stream in streams:
+            char_matrices.append(cls.get_from_stream(stream,
+                schema=schema, **kwargs))
+        return cls.concatenate(char_matrices)
+    concatenate_from_streams = classmethod(concatenate_from_streams)
+
+    def concatenate_from_paths(cls, paths, schema, **kwargs):
+        """
+        Read a character matrix from each file path given in ``paths``, assuming
+        data format/schema ``schema``, and passing any keyword arguments down to
+        the underlying specialized reader. Merge the and return the combined
+        character matrix. Component parts will be recorded as character
+        subsets.
+        """
+        streams = [open(path, "rU") for path in paths]
+        return cls.concatenate_from_streams(streams, schema, **kwargs)
+    concatenate_from_paths = classmethod(concatenate_from_paths)
+
+    def from_dict(cls,
+            source_dict,
+            char_matrix=None,
+            case_sensitive_taxon_labels=False,
+            **kwargs):
+        """
+        Populates character matrix from dictionary (or similar mapping type),
+        creating |Taxon| objects and sequences as needed.
+
+        Keys must be strings representing labels |Taxon| objects or
+        |Taxon| objects directly. If key is specified as string, then it
+        will be dereferenced to the first existing |Taxon| object in the
+        current taxon namespace with the same label. If no such |Taxon|
+        object can be found, then a new |Taxon| object is created and
+        added to the current namespace. If a key is specified as a
+        |Taxon| object, then this is used directly. If it is not in the
+        current taxon namespace, it will be added.
+
+        Values are the sequences (more generally, iterable of values).  If
+        values are of type `CharacterDataSequence`, then they are added
+        as-is.  Otherwise `CharacterDataSequence` instances are
+        created for them. Values may be coerced into types compatible with
+        particular matrices. The classmethod `coerce_values()` will be
+        called for this.
+
+        Examples
+        --------
+
+        The following creates a |DnaCharacterMatrix| instance with three
+        sequences::
+
+            d = {
+                    "s1" : "TCCAA",
+                    "s2" : "TGCAA",
+                    "s3" : "TG-AA",
+            }
+            dna = DnaCharacterMatrix.from_dict(d)
+
+        Three |Taxon| objects will be created, corresponding to the
+        labels 's1', 's2', 's3'. Each associated string sequence will be
+        converted to a `CharacterDataSequence`, with each symbol ("A", "C",
+        etc.) being replaced by the DNA state represented by the symbol.
+
+        Parameters
+        ----------
+        source_dict : dict or other mapping type
+            Keys must be strings representing labels |Taxon| objects or
+            |Taxon| objects directly. Values are sequences. See above
+            for details.
+        char_matrix : |CharacterMatrix|
+            Instance of |CharacterMatrix| to populate with data. If not
+            specified, a new one will be created using keyword arguments
+            specified by ``kwargs``.
+        case_sensitive_taxon_labels : boolean
+            If `True`, matching of string labels specified as keys in ``d`` will
+            be matched to |Taxon| objects in current taxon namespace
+            with case being respected. If `False`, then case will be ignored.
+        \*\*kwargs : keyword arguments, optional
+            Keyword arguments to be passed to constructor of
+            |CharacterMatrix| when creating new instance to populate, if
+            no target instance is provided via ``char_matrix``.
+
+        Returns
+        -------
+        char_matrix : |CharacterMatrix|
+            |CharacterMatrix| populated by data from ``d``.
+        """
+        if char_matrix is None:
+            char_matrix = cls(**kwargs)
+        for key in source_dict:
+            if isinstance(key, str):
+                taxon = char_matrix.taxon_namespace.require_taxon(key,
+                        is_case_sensitive=case_sensitive_taxon_labels)
+            else:
+                taxon = key
+                if taxon not in char_matrix.taxon_namespace:
+                    char_matrix.taxon_namespace.add_taxon(taxon)
+            s = cls.coerce_values(source_dict[key])
+            char_matrix[taxon] = s
+        return char_matrix
+    from_dict = classmethod(from_dict)
+
+    def coerce_values(cls, values):
+        """
+        Converts elements of ``values`` to type of matrix.
+
+        This method is called by :meth:`CharacterMatrix.from_dict` to create
+        sequences from iterables of values.  This method should be overridden
+        by derived classes to ensure that ``values`` consists of types compatible
+        with the particular type of matrix. For example, a CharacterMatrix type
+        with a fixed state alphabet (such as |DnaCharacterMatrix|) would
+        dereference the string elements of ``values`` to return a list of
+        |StateIdentity| objects corresponding to the symbols represented
+        by the strings.  If there is no value-type conversion done, then
+        ``values`` should be returned as-is. If no value-type conversion is
+        possible (e.g., when the type of a value is dependent on positionaly
+        information), then a TypeError should be raised.
+
+        Parameters
+        ----------
+        values : iterable
+            Iterable of values to be converted.
+
+        Returns
+        -------
+        v : list of values.
+        """
+        return values
+    coerce_values = classmethod(coerce_values)
+
+    ###########################################################################
+    ### Lifecycle and Identity
+
+    def __init__(self, *args, **kwargs):
+        if len(args) > 1:
+            # only allow 1 positional argument
+            raise error.TooManyArgumentsError(func_name=self.__class__.__name__, max_args=1, args=args)
+        elif len(args) == 1 and isinstance(args[0], CharacterMatrix):
+            self._clone_from(args[0], kwargs)
+        else:
+            basemodel.DataObject.__init__(self, label=kwargs.pop("label", None))
+            taxonmodel.TaxonNamespaceAssociated.__init__(self,
+                    taxon_namespace=taxonmodel.process_kwargs_dict_for_taxon_namespace(kwargs, None))
+            self._taxon_sequence_map = {}
+            self.character_types = []
+            self.comments = []
+            self.character_subsets = container.OrderedCaselessDict()
+            if len(args) == 1:
+                # takes care of all possible initializations, including. e.g.,
+                # tuples and so on
+                d = collections.OrderedDict(args[0])
+                self.__class__.from_dict(d, char_matrix=self)
+        if kwargs:
+            raise TypeError("Unrecognized or unsupported arguments: {}".format(kwargs))
+
+    def __hash__(self):
+        return id(self)
+
+    def __eq__(self, other):
+        return self is other
+
+    def _clone_from(self, src, kwargs_dict):
+        # super(Tree, self).__init__()
+        memo = {}
+        # memo[id(tree)] = self
+        taxon_namespace = taxonmodel.process_kwargs_dict_for_taxon_namespace(kwargs_dict, src.taxon_namespace)
+        memo[id(src.taxon_namespace)] = taxon_namespace
+        if taxon_namespace is not src.taxon_namespace:
+            for t1 in src.taxon_namespace:
+                t2 = taxon_namespace.require_taxon(label=t1.label)
+                memo[id(t1)] = t2
+        else:
+            for t1 in src.taxon_namespace:
+                memo[id(t1)] = t1
+        t = copy.deepcopy(src, memo)
+        self.__dict__ = t.__dict__
+        self.label = kwargs_dict.pop("label", src.label)
+        return self
+
+    def __copy__(self):
+        other = self.__class__(label=self.label,
+            taxon_namespace=self.taxon_namespace)
+        for taxon in self._taxon_sequence_map:
+            # other._taxon_sequence_map[taxon] = self.__class__.character_sequence_type(self._taxon_sequence_map[taxon])
+            other._taxon_sequence_map[taxon] = self._taxon_sequence_map[taxon]
+        memo = {}
+        memo[id(self)] = other
+        other.deep_copy_annotations_from(self, memo)
+        return other
+
+    def taxon_namespace_scoped_copy(self, memo=None):
+        if memo is None:
+            memo = {}
+        # this populates ``memo`` with references to the
+        # the TaxonNamespace and Taxon objects
+        self.taxon_namespace.populate_memo_for_taxon_namespace_scoped_copy(memo)
+        return self.__deepcopy__(memo=memo)
+
+    def __deepcopy__(self, memo=None):
+        return basemodel.Annotable.__deepcopy__(self, memo=memo)
+
+    ###########################################################################
+    ### Data I/O
+
+    # def _parse_and_add_from_stream(self, stream, schema, **kwargs):
+    #     """
+    #     Populates objects of this type from ``schema``-formatted
+    #     data in the file-like object source ``stream``, *replacing*
+    #     all current data. If multiple character matrices are in the data
+    #     source, a 0-based index of the character matrix to use can
+    #     be specified using the ``matrix_offset`` keyword (defaults to 0, i.e., first
+    #     character matrix).
+    #     """
+    #     warnings.warn("Repopulating a CharacterMatrix is now deprecated. Instantiate a new instance from the source instead.",
+    #             DeprecationWarning)
+    #     m = self.__class__._parse_and_create_from_stream(stream=stream,
+    #             schema=schema,
+    #             **kwargs)
+    #     return self.clone_from(m)
+
+    def _format_and_write_to_stream(self, stream, schema, **kwargs):
+        """
+        Writes out ``self`` in ``schema`` format to a destination given by
+        file-like object ``stream``.
+
+        Parameters
+        ----------
+        stream : file or file-like object
+            Destination for data.
+        schema : string
+            Must be a recognized character file schema, such as "nexus",
+            "phylip", etc, for which a specialized writer is available. If this
+            is not implemented for the schema specified, then a
+            UnsupportedSchemaError is raised.
+
+        \*\*kwargs : keyword arguments, optional
+            Keyword arguments will be passed directly to the writer for the
+            specified schema. See documentation for details on keyword
+            arguments supported by writers of various schemas.
+
+        """
+        writer = dataio.get_writer(schema, **kwargs)
+        writer.write_char_matrices([self],
+                stream)
+
+    ###########################################################################
+    ### Taxon Management
+
+    def reconstruct_taxon_namespace(self,
+            unify_taxa_by_label=True,
+            taxon_mapping_memo=None):
+        """
+        See `TaxonNamespaceAssociated.reconstruct_taxon_namespace`.
+        """
+        if taxon_mapping_memo is None:
+            taxon_mapping_memo = {}
+        original_taxa = list(self._taxon_sequence_map.keys())
+        for original_taxon in original_taxa:
+            if unify_taxa_by_label or original_taxon not in self.taxon_namespace:
+                t = taxon_mapping_memo.get(original_taxon, None)
+                if t is None:
+                    # taxon to use not given and
+                    # we have not yet created a counterpart
+                    if unify_taxa_by_label:
+                        # this will force usage of any taxon with
+                        # a label that matches the current taxon
+                        t = self.taxon_namespace.require_taxon(label=original_taxon.label)
+                    else:
+                        # this will unconditionally create a new taxon
+                        t = self.taxon_namespace.new_taxon(label=original_taxon.label)
+                    taxon_mapping_memo[original_taxon] = t
+                else:
+                    # taxon to use is given by mapping
+                    self.taxon_namespace.add_taxon(t)
+                if t in self._taxon_sequence_map:
+                    raise error.TaxonNamespaceReconstructionError("Multiple sequences for taxon with label '{}'".format(t.label))
+                self._taxon_sequence_map[t] = self._taxon_sequence_map[original_taxon]
+                del self._taxon_sequence_map[original_taxon]
+
+    def poll_taxa(self, taxa=None):
+        """
+        Returns a set populated with all of |Taxon| instances associated
+        with ``self``.
+
+        Parameters
+        ----------
+        taxa : set()
+            Set to populate. If not specified, a new one will be created.
+
+        Returns
+        -------
+        taxa : set[|Taxon|]
+            Set of taxa associated with ``self``.
+        """
+        if taxa is None:
+            taxa = set()
+        for taxon in self._taxon_sequence_map:
+            taxa.add(taxon)
+        return taxa
+
+    def update_taxon_namespace(self):
+        """
+        All |Taxon| objects in ``self`` that are not in
+        ``self.taxon_namespace`` will be added.
+        """
+        assert self.taxon_namespace is not None
+        for taxon in self._taxon_sequence_map:
+            if taxon not in self.taxon_namespace:
+                self.taxon_namespace.add_taxon(taxon)
+
+    def reindex_subcomponent_taxa(self):
+        """
+        Synchronizes |Taxon| objects of map to ``taxon_namespace`` of self.
+        """
+        raise NotImplementedError("'reindex_subcomponent_taxa()' is no longer supported; use '{}.reconstruct_taxon_namespace()' instead".format(self.__class__.__name__))
+
+    ###########################################################################
+    ### Sequence CRUD
+
+    def _resolve_key(self, key):
+        """
+        Resolves map access key into |Taxon| instance.
+
+        If ``key`` is integer, assumed to be taxon index.
+        If ``key`` string, assumed to be taxon label.
+        Otherwise, assumed to be |Taxon| instance directly.
+        """
+        if isinstance(key, int):
+            if abs(key) < len(self.taxon_namespace):
+                taxon = self.taxon_namespace[key]
+            else:
+                raise IndexError(key)
+        elif isinstance(key, str):
+            taxon = self.taxon_namespace.get_taxon(label=key)
+            if taxon is None:
+                raise KeyError(key)
+        else:
+            taxon = key
+        return taxon
+
+    def new_sequence(self, taxon, values=None):
+        """
+        Creates a new `CharacterDataSequence` associated with |Taxon|
+        ``taxon``, and populates it with values in ``values``.
+
+        Parameters
+        ----------
+        taxon : |Taxon|
+            |Taxon| instance with which this sequence is associated.
+        values : iterable or `None`
+            An initial set of values with which to populate the new character
+            sequence.
+
+        Returns
+        -------
+        s : `CharacterDataSequence`
+            A new `CharacterDataSequence` associated with |Taxon|
+            ``taxon``.
+        """
+        if taxon in self._taxon_sequence_map:
+            raise ValueError("Character values vector for taxon {} already exists".format(repr(taxon)))
+        if taxon not in self.taxon_namespace:
+            raise ValueError("Taxon {} is not in object taxon namespace".format(repr(taxon)))
+        cv = self.__class__.character_sequence_type(values)
+        self._taxon_sequence_map[taxon] = cv
+        return cv
+
+    def __getitem__(self, key):
+        """
+        Retrieves sequence for ``key``, which can be a index or a label of a
+        |Taxon| instance in the current taxon namespace, or a
+        |Taxon| instance directly.
+
+        If no sequence is currently associated with specified |Taxon|, a
+        new one will be created. Note that the |Taxon| object must have
+        already been defined in the curent taxon namespace.
+
+        Parameters
+        ----------
+        key : integer, string, or |Taxon|
+            If an integer, assumed to be an index of a |Taxon| object in
+            the current |TaxonNamespace| object of ``self.taxon_namespace``.
+            If a string, assumed to be a label of a |Taxon| object in
+            the current |TaxonNamespace| object of ``self.taxon_namespace``.
+            Otherwise, assumed to be |Taxon| instance directly. In all
+            cases, the |Taxon| object must be (already) defined in the
+            current taxon namespace.
+
+        Returns
+        -------
+        s : `CharacterDataSequence`
+            A sequence associated with the |Taxon| instance referenced
+            by ``key``.
+        """
+        taxon = self._resolve_key(key)
+        try:
+            return self._taxon_sequence_map[taxon]
+        except KeyError:
+            return self.new_sequence(taxon)
+
+    def __setitem__(self, key, values):
+        """
+        Assigns sequence ``values`` to taxon specified by ``key``, which can be a
+        index or a label of a |Taxon| instance in the current taxon
+        namespace, or a |Taxon| instance directly.
+
+        If no sequence is currently associated with specified |Taxon|, a
+        new one will be created.  Note that the |Taxon| object must have
+        already been defined in the curent taxon namespace.
+
+        Parameters
+        ----------
+        key : integer, string, or |Taxon|
+            If an integer, assumed to be an index of a |Taxon| object in
+            the current |TaxonNamespace| object of ``self.taxon_namespace``.
+            If a string, assumed to be a label of a |Taxon| object in
+            the current |TaxonNamespace| object of ``self.taxon_namespace``.
+            Otherwise, assumed to be |Taxon| instance directly. In all
+            cases, the |Taxon| object must be (already) defined in the
+            current taxon namespace.
+
+        """
+        taxon = self._resolve_key(key)
+        if taxon not in self.taxon_namespace:
+            raise ValueError(repr(key))
+        if not isinstance(values, self.__class__.character_sequence_type):
+            values = self.__class__.character_sequence_type(values)
+        self._taxon_sequence_map[taxon] = values
+
+    def __contains__(self, key):
+        """
+        Returns `True` if a sequence associated with ``key`` is in ``self``, or
+        `False` otherwise.
+
+        Parameters
+        ----------
+        key : integer, string, or |Taxon|
+            If an integer, assumed to be an index of a |Taxon| object in
+            the current |TaxonNamespace| object of ``self.taxon_namespace``.
+            If a string, assumed to be a label of a |Taxon| object in
+            the current |TaxonNamespace| object of ``self.taxon_namespace``.
+            Otherwise, assumed to be |Taxon| instance directly. In all
+            cases, the |Taxon| object must be (already) defined in the
+            current taxon namespace.
+
+        Returns
+        -------
+        b : boolean
+            `True` if ``key`` is in ``self``; `False` otherwise.
+        """
+        return self._taxon_sequence_map.__contains__(key)
+
+    def __delitem__(self, key):
+        """
+        Removes sequence for ``key``, which can be a index or a label of a
+        |Taxon| instance in the current taxon namespace, or a
+        |Taxon| instance directly.
+
+        Parameters
+        ----------
+        key : integer, string, or |Taxon|
+            If an integer, assumed to be an index of a |Taxon| object in
+            the current |TaxonNamespace| object of ``self.taxon_namespace``.
+            If a string, assumed to be a label of a |Taxon| object in
+            the current |TaxonNamespace| object of ``self.taxon_namespace``.
+            Otherwise, assumed to be |Taxon| instance directly. In all
+            cases, the |Taxon| object must be (already) defined in the
+            current taxon namespace.
+
+        """
+        return self._taxon_sequence_map.__delitem__(key)
+
+    def clear(self):
+        """
+        Removes all sequences from matrix.
+        """
+        self._taxon_sequence_map.clear()
+
+    def sequences(self):
+        """
+        List of all sequences in self.
+
+        Returns
+        -------
+        s : list of `CharacterDataSequence` objects in self
+
+        """
+        s = [self[taxon] for taxon in self]
+        return s
+
+    def vectors(self):
+        deprecate.dendropy_deprecation_warning(
+                message="Deprecated since DendroPy 4: 'vectors()' will no longer be supported in future releases; use 'sequences()' instead")
+        return self.sequences()
+
+    ###########################################################################
+    ### Sequence Access Iteration
+
+    def __iter__(self):
+        "Returns an iterator over character map's ordered keys."
+        for t in self.taxon_namespace:
+            if t in self._taxon_sequence_map:
+                yield t
+
+    def values(self):
+        """
+        Iterates values (i.e. sequences) in this matrix.
+        """
+        for t in self:
+            yield self[t]
+
+    # def iterkeys(self):
+    #     "Dictionary interface implementation for direct access to character map."
+    #     for t in self.taxon_namespace:
+    #         if t in self._taxon_sequence_map:
+    #             yield t
+
+    # def itervalues(self):
+    #     "Dictionary interface implementation for direct access to character map."
+    #     for t in self.taxon_namespace:
+    #         if t in self._taxon_sequence_map:
+    #             yield self._taxon_sequence_map[t]
+
+    def items(self):
+        "Returns character map key, value pairs in key-order."
+        for t in self.taxon_namespace:
+            if t in self._taxon_sequence_map:
+                yield t, self._taxon_sequence_map[t]
+
+    # def values(self):
+    #     "Returns list of values."
+    #     return [self._taxon_sequence_map[t] for t in self.taxon_namespace if t in self._taxon_seq_map]
+
+    # def pop(self, key, alt_val=None):
+    #     "a.pop(k[, x]):  a[k] if k in a, else x (and remove k)"
+    #     return self._taxon_sequence_map.pop(key, alt_val)
+
+    # def popitem(self):
+    #     "a.popitem()  remove and last (key, value) pair"
+    #     return self._taxon_sequence_map.popitem()
+
+    # def keys(self):
+    #     "Returns a copy of the ordered list of character map keys."
+    #     return list(self._taxon_sequence_map.keys())
+
+    ###########################################################################
+    ### Metrics
+
+    def __len__(self):
+        """
+        Number of sequences in matrix.
+
+        Returns
+        -------
+        n : Number of sequences in matrix.
+        """
+        return len(self._taxon_sequence_map)
+
+    def _get_sequence_size(self):
+        """
+        Number of characters in *first* sequence in matrix.
+
+        Returns
+        -------
+        n : integer
+            Number of sequences in matrix.
+        """
+        if len(self):
+            # yuck, but len(self.values())
+            # means we have to create and populate a list ...
+            return len(self[next(iter(self._taxon_sequence_map))])
+        else:
+            return 0
+    sequence_size = property(_get_sequence_size, None, None)
+    vector_size = property(_get_sequence_size, None, None) # legacy
+
+    def _get_max_sequence_size(self):
+        """
+        Maximum number of characters across all sequences in matrix.
+
+        Returns
+        -------
+        n : integer
+            Maximum number of characters across all sequences in matrix.
+        """
+        max_len = 0
+        for k in self:
+            if len(self[k]) > max_len:
+                max_len  = len(self._taxon_sequence_map[k])
+        return max_len
+    max_sequence_size = property(_get_max_sequence_size, None, None)
+
+    ###########################################################################
+    ### Mass/Bulk Operations
+
+    def fill(self, value, size=None, append=True):
+        """
+        Pads out all sequences in ``self`` by adding ``value`` to each sequence
+        until its length is ``size`` long or equal to the length of the longest
+        sequence if ``size`` is not specified.
+
+        Parameters
+        ----------
+        value : object
+            A valid value (e.g., a numeric value for continuous characters, or
+            a |StateIdentity| for discrete character).
+        size : integer or None
+            The size (length) up to which the sequences will be padded. If `None`, then
+            the maximum (longest) sequence size will be used.
+        append : boolean
+            If `True` (default), then new values will be added to the end of
+            each sequence. If `False`, then new values will be inserted to the
+            front of each sequence.
+        """
+        if size is None:
+            size = self.max_sequence_size
+        for k in self:
+            v = self[k]
+            while len(v) < size:
+                if append:
+                    v.append(value)
+                else:
+                    v.insert(0, value)
+        return size
+
+    def fill_taxa(self):
+        """
+        Adds a new (empty) sequence for each |Taxon| instance in
+        current taxon namespace that does not have a sequence.
+        """
+        for taxon in self.taxon_namespace:
+            if taxon not in self:
+                self[taxon] = CharacterDataSequence()
+
+    def pack(self, value=None, size=None, append=True):
+        """
+        Adds missing sequences for all |Taxon| instances in current
+        namespace, and then pads out all sequences in ``self`` by adding ``value``
+        to each sequence until its length is ``size`` long or equal to the length
+        of the longest sequence if ``size`` is not specified. A combination of
+        :meth:`CharacterMatrix.fill_taxa()` and
+        :meth:`CharacterMatrix.fill()`.
+
+        Parameters
+        ----------
+        value : object
+            A valid value (e.g., a numeric value for continuous characters, or
+            a |StateIdentity| for discrete character).
+        size : integer or None
+            The size (length) up to which the sequences will be padded. If `None`, then
+            the maximum (longest) sequence size will be used.
+        append : boolean
+            If `True` (default), then new values will be added to the end of
+            each sequence. If `False`, then new values will be inserted to the
+            front of each sequence.
+        """
+        self.fill_taxa()
+        self.fill(value=value, size=size, append=append)
+
+    def add_sequences(self, other_matrix):
+        """
+        Adds sequences for |Taxon| objects that are in ``other_matrix`` but not in
+        ``self``.
+
+        Parameters
+        ----------
+        other_matrix : |CharacterMatrix|
+            Matrix from which to add sequences.
+
+        Notes
+        -----
+            1. ``other_matrix`` must be of same type as ``self``.
+            2. ``other_matrix`` must have the same |TaxonNamespace| as ``self``.
+            3. Each sequence associated with a |Taxon| reference in ``other_matrix``
+               but not in ``self`` will be added to ``self`` as a shallow-copy.
+            4. All other sequences will be ignored.
+
+        """
+        if other_matrix.taxon_namespace is not self.taxon_namespace:
+            raise error.TaxonNamespaceIdentityError(self, other_matrix)
+        for taxon in other_matrix._taxon_sequence_map:
+            if taxon not in self._taxon_sequence_map:
+                self._taxon_sequence_map[taxon] = self.__class__.character_sequence_type(other_matrix._taxon_sequence_map[taxon])
+
+    def replace_sequences(self, other_matrix):
+        """
+        Replaces sequences for |Taxon| objects shared between ``self`` and
+        ``other_matrix``.
+
+        Parameters
+        ----------
+        other_matrix : |CharacterMatrix|
+            Matrix from which to replace sequences.
+
+        Notes
+        -----
+            1. ``other_matrix`` must be of same type as ``self``.
+            2. ``other_matrix`` must have the same |TaxonNamespace| as ``self``.
+            3. Each sequence in ``self`` associated with a |Taxon| that is
+               also represented in ``other_matrix`` will be replaced with a
+               shallow-copy of the corresponding sequence from ``other_matrix``.
+            4. All other sequences will be ignored.
+        """
+        if other_matrix.taxon_namespace is not self.taxon_namespace:
+            raise error.TaxonNamespaceIdentityError(self, other_matrix)
+        for taxon in other_matrix._taxon_sequence_map:
+            if taxon in self._taxon_sequence_map:
+                self._taxon_sequence_map[taxon] = self.__class__.character_sequence_type(other_matrix._taxon_sequence_map[taxon])
+
+    def update_sequences(self, other_matrix):
+        """
+        Replaces sequences for |Taxon| objects shared between ``self`` and
+        ``other_matrix`` and adds sequences for |Taxon| objects that are
+        in ``other_matrix`` but not in ``self``.
+
+        Parameters
+        ----------
+        other_matrix : |CharacterMatrix|
+            Matrix from which to update sequences.
+
+        Notes
+        -----
+            1. ``other_matrix`` must be of same type as ``self``.
+            2. ``other_matrix`` must have the same |TaxonNamespace| as ``self``.
+            3. Each sequence associated with a |Taxon| reference in ``other_matrix``
+               but not in ``self`` will be added to ``self``.
+            4. Each sequence in ``self`` associated with a |Taxon| that is
+               also represented in ``other_matrix`` will be replaced with a
+               shallow-copy of the corresponding sequence from ``other_matrix``.
+        """
+        if other_matrix.taxon_namespace is not self.taxon_namespace:
+            raise error.TaxonNamespaceIdentityError(self, other_matrix)
+        for taxon in other_matrix._taxon_sequence_map:
+            self._taxon_sequence_map[taxon] = self.__class__.character_sequence_type(other_matrix._taxon_sequence_map[taxon])
+
+    def extend_sequences(self, other_matrix):
+        """
+        Extends sequences in ``self`` with characters associated with
+        corresponding |Taxon| objects in ``other_matrix``.
+
+        Parameters
+        ----------
+        other_matrix : |CharacterMatrix|
+            Matrix from which to extend sequences.
+
+        Notes
+        -----
+            1. ``other_matrix`` must be of same type as ``self``.
+            2. ``other_matrix`` must have the same |TaxonNamespace| as ``self``.
+            3. Each sequence associated with a |Taxon| reference in
+               ``other_matrix`` that is also in ``self`` will be appended to the
+               sequence currently associated with that |Taxon| reference
+               in ``self``.
+            4. All other sequences will be ignored.
+        """
+        if other_matrix.taxon_namespace is not self.taxon_namespace:
+            raise error.TaxonNamespaceIdentityError(self, other_matrix)
+        for taxon in other_matrix._taxon_sequence_map:
+            if taxon in self._taxon_sequence_map:
+                self._taxon_sequence_map[taxon].extend(other_matrix._taxon_sequence_map[taxon])
+
+    def extend_matrix(self, other_matrix):
+        """
+        Extends sequences in ``self`` with characters associated with
+        corresponding |Taxon| objects in ``other_matrix`` and adds
+        sequences for |Taxon| objects that are in ``other_matrix`` but not
+        in ``self``.
+
+        Parameters
+        ----------
+        other_matrix : |CharacterMatrix|
+            Matrix from which to extend.
+
+        Notes
+        -----
+            1. ``other_matrix`` must be of same type as ``self``.
+            2. ``other_matrix`` must have the same |TaxonNamespace| as ``self``.
+            3. Each sequence associated with a |Taxon| reference in ``other_matrix``
+               that is also in ``self`` will be appending
+               to the sequence currently associated with that |Taxon|
+               reference in ``self``.
+            4. Each sequence associated with a |Taxon| reference in
+               ``other_matrix`` that is also in ``self`` will replace the sequence
+               currently associated with that |Taxon| reference in ``self``.
+        """
+        if other_matrix.taxon_namespace is not self.taxon_namespace:
+            raise error.TaxonNamespaceIdentityError(self, other_matrix)
+        for taxon in other_matrix._taxon_sequence_map:
+            if taxon in self._taxon_sequence_map:
+                self._taxon_sequence_map[taxon].extend(other_matrix._taxon_sequence_map[taxon])
+            else:
+                self._taxon_sequence_map[taxon]= self.__class__.character_sequence_type(other_matrix._taxon_sequence_map[taxon])
+
+    def remove_sequences(self, taxa):
+        """
+        Removes sequences associated with |Taxon| instances specified in
+        ``taxa``. A KeyError is raised if a |Taxon| instance is
+        specified for which there is no associated sequences.
+
+        Parameters
+        ----------
+        taxa : iterable[|Taxon|]
+            List or some other iterable of |Taxon| instances.
+        """
+        for taxon in taxa:
+            del self._taxon_sequence_map[taxon]
+
+    def discard_sequences(self, taxa):
+        """
+        Removes sequences associated with |Taxon| instances specified in
+        ``taxa`` if they exist.
+
+        Parameters
+        ----------
+        taxa : iterable[|Taxon|]
+            List or some other iterable of |Taxon| instances.
+        """
+        for taxon in taxa:
+            try:
+                del self._taxon_sequence_map[taxon]
+            except KeyError:
+                pass
+
+    def keep_sequences(self, taxa):
+        """
+        Discards all sequences *not* associated with any of the |Taxon| instances.
+
+        Parameters
+        ----------
+        taxa : iterable[|Taxon|]
+            List or some other iterable of |Taxon| instances.
+        """
+        to_keep = set(taxa)
+        for taxon in self._taxon_sequence_map:
+            if taxon not in to_keep:
+                del self._taxon_sequence_map[taxon]
+
+    # def extend_characters(self, other_matrix):
+    #     """
+    #     DEPRECATED
+    #     Extends this matrix by adding characters from sequences of taxa
+    #     in given matrix to sequences of taxa with correspond labels in
+    #     this one. Taxa in the second matrix that do not exist in the
+    #     current one are ignored.
+    #     """
+    #     self._taxon_sequence_map.extend_characters(other_matrix.taxon_seq_map)
+
+    # def extend_map(self,
+    #                   other_map,
+    #                   overwrite_existing=False,
+    #                   extend_existing=False):
+    #     """
+    #     DEPRECATED
+    #     Extends this matrix by adding taxa and characters from the given
+    #     map to this one.  If ``overwrite_existing`` is True and a taxon
+    #     in the other map is already present in the current one, then
+    #     the sequence associated with the taxon in the second map
+    #     replaces the sequence in the current one. If ``extend_existing``
+    #     is True and a taxon in the other matrix is already present in
+    #     the current one, then the squence map with the taxon in
+    #     the second map will be added to the sequence in the current
+    #     one. If both are True, then an exception is raised. If neither
+    #     are True,  and a taxon in the other map is already present in
+    #     the current one, then the sequence is ignored.
+    #     """
+    #     self._taxon_sequence_map.extend(other_map,
+    #         overwrite_existing=overwrite_existing,
+    #         extend_existing=extend_existing)
+    #     self.update_taxon_namespace()
+
+    # def extend(self,
+    #            other_matrix,
+    #            overwrite_existing=False,
+    #            extend_existing=False):
+    #     """
+    #     Extends this matrix by adding taxa and characters from the given
+    #     matrix to this one.  If ``overwrite_existing`` is True and a taxon
+    #     in the other matrix is already present in the current one, then
+    #     the sequence associated with the taxon in the second matrix
+    #     replaces the sequence in the current one. If ``extend_existing``
+    #     is True and a taxon in the other matrix is already present in
+    #     the current one, then the sequence associated with the taxon in
+    #     the second matrix will be added to the sequence in the current
+    #     one. If both are True, then an exception is raised. If neither
+    #     are True, and a taxon in the other matrix is already present in
+    #     the current one, then the sequence is ignored.
+    #     """
+    #     self._taxon_sequence_map.extend(other_matrix.taxon_seq_map,
+    #         overwrite_existing=overwrite_existing,
+    #         extend_existing=extend_existing)
+    #     self.update_taxon_namespace()
+
+    ###########################################################################
+    ### Character Subset Management
+
+    def add_character_subset(self, char_subset):
+        """
+        Adds a CharacterSubset object. Raises an error if one already exists
+        with the same label.
+        """
+        label = char_subset.label
+        if label in self.character_subsets:
+            raise ValueError("Character subset '%s' already defined" % label)
+        self.character_subsets[label] = char_subset
+        return self.character_subsets[label]
+
+    def new_character_subset(self, label, character_indices):
+        """
+        Defines a set of character (columns) that make up a character set.
+        Raises an error if one already exists with the same label. Column
+        indices are 0-based.
+        """
+        cs = CharacterSubset(character_indices=character_indices, label=label)
+        return self.add_character_subset(cs)
+
+    ###########################################################################
+    ### CharacterType Management
+
+    def new_character_type(self, *args, **kwargs):
+        return CharacterType(*args, **kwargs)
+
+    ###########################################################################
+    ### Export
+
+    def export_character_subset(self, character_subset):
+        """
+        Returns a new CharacterMatrix (of the same type) consisting only
+        of columns given by the CharacterSubset, ``character_subset``.
+        Note that this new matrix will still reference the same taxon set.
+        """
+        if isinstance(character_subset, str):
+            if character_subset not in self.character_subsets:
+                raise KeyError(character_subset)
+            else:
+                character_subset = self.character_subsets[character_subset]
+        return self.export_character_indices(character_subset.character_indices)
+
+    def export_character_indices(self, indices):
+        """
+        Returns a new CharacterMatrix (of the same type) consisting only
+        of columns given by the 0-based indices in ``indices``.
+        Note that this new matrix will still reference the same taxon set.
+        """
+        clone = self.__class__(self)
+        # clone.clone_from(self)
+        for vec in clone.values():
+            for cell_idx in range(len(vec)-1, -1, -1):
+                if cell_idx not in indices:
+                    del(vec[cell_idx])
+        return clone
+
+    ###########################################################################
+    ### Representation
+
+    def description(self, depth=1, indent=0, itemize="", output=None):
+        """
+        Returns description of object, up to level ``depth``.
+        """
+        if depth is None or depth < 0:
+            return
+        output_strio = StringIO()
+        label = " (%s: '%s')" % (id(self), self.label)
+        output_strio.write('%s%s%s object at %s%s'
+                % (indent*' ',
+                   itemize,
+                   self.__class__.__name__,
+                   hex(id(self)),
+                   label))
+        if depth >= 1:
+            output_strio.write(':  %d Sequences' % len(self))
+            if depth >= 2:
+                if self.taxon_namespace is not None:
+                    tlead = "\n%s[Taxon Set]\n" % (" " * (indent+4))
+                    output_strio.write(tlead)
+                    self.taxon_namespace.description(depth=depth-1, indent=indent+8, itemize="", output=output_strio)
+                tlead = "\n%s[Characters]\n" % (" " * (indent+4))
+                output_strio.write(tlead)
+                indent += 8
+                maxlabel = max([len(str(t.label)) for t in self.taxon_namespace])
+                for i, t in enumerate(self.taxon_namespace):
+                    output_strio.write('%s%s%s : %s characters\n' \
+                        % (" " * indent,
+                           "[%d] " % i,
+                           str(t.label),
+                           len(self._taxon_sequence_map[t])))
+
+        s = output_strio.getvalue()
+        if output is not None:
+            output.write(s)
+        return s
+
+    ###########################################################################
+    ### Legacy
+
+    def _get_taxon_seq_map(self):
+        warnings.warn("All methods and features of 'CharacterMatrix.taxon_seq_map' have been integrated directly into 'CharacterMatrix', or otherwise replaced entirely",
+                stacklevel=2)
+        return self
+    taxon_seq_map = property(_get_taxon_seq_map)
+
+###############################################################################
+## Specialized Matrices
+
+class ContinuousCharacterMatrix(CharacterMatrix):
+    """
+    Specializes |CharacterMatrix| for continuous data.
+
+    Sequences stored using |ContinuousCharacterDataSequence|, with values of
+    elements assumed to be ``float`` .
+    """
+
+    class ContinuousCharacterDataSequence(CharacterDataSequence):
+        """
+        A sequence of continuous character values for a particular taxon or entry
+        in a data matrix. Specializes `CharacterDataSequence` by assuming all
+        values are primitive numerics (i.e., either floats or integers) when
+        copying or representing self.
+        """
+
+        def symbols_as_list(self):
+            """
+            Returns list of string representation of values of this vector.
+
+            Returns
+            -------
+            v : list
+                List of string representation of values making up this vector.
+            """
+            return [str(v) for v in self]
+
+        def symbols_as_string(self, sep=" "):
+            # different default
+            return CharacterDataSequence.symbols_as_string(self, sep=sep)
+
+    character_sequence_type = ContinuousCharacterDataSequence
+    data_type = "continuous"
+
+    def __init__(self, *args, **kwargs):
+        CharacterMatrix.__init__(self, *args, **kwargs)
+
+class DiscreteCharacterMatrix(CharacterMatrix):
+
+    class DiscreteCharacterDataSequence(CharacterDataSequence):
+        pass
+    character_sequence_type = DiscreteCharacterDataSequence
+
+    data_type = "discrete"
+
+    def __init__(self, *args, **kwargs):
+        CharacterMatrix.__init__(self, *args, **kwargs)
+        self.state_alphabets = []
+        self._default_state_alphabet = None
+
+    def _get_default_state_alphabet(self):
+        if self._default_state_alphabet is not None:
+            return self._default_state_alphabet
+        elif len(self.state_alphabets) == 1:
+            return self.state_alphabets[0]
+        elif len(self.state_alphabets) > 1:
+            raise TypeError("Multiple state alphabets defined for this matrix with no default specified")
+        elif len(self.state_alphabets) == 0:
+            raise TypeError("No state alphabets defined for this matrix")
+        return None
+    def _set_default_state_alphabet(self, s):
+        if s not in self.state_alphabets:
+            self.state_alphabets.append(s)
+        self._default_state_alphabet = s
+    default_state_alphabet = property(_get_default_state_alphabet, _set_default_state_alphabet)
+
+    def append_taxon_sequence(self, taxon, state_symbols):
+        if taxon not in self:
+            self[taxon] = CharacterDataSequence()
+        for value in state_symbols:
+            if isinstance(value, str):
+                symbol = value
+            else:
+                symbol = str(value)
+            self[taxon].append(CharacterDataCell(value=self.default_symbol_state_map[symbol]))
+
+    def remap_to_state_alphabet_by_symbol(self,
+            state_alphabet,
+            purge_other_state_alphabets=True):
+        """
+        All entities with any reference to a state alphabet will be have the
+        reference reassigned to state alphabet ``sa``, and all entities with
+        any reference to a state alphabet element will be have the reference
+        reassigned to any state alphabet element in ``sa`` that has the same
+        symbol. Raises KeyError if no matching symbol can be found.
+        """
+        symbol_state_map = state_alphabet.symbol_state_map()
+        for vi, vec in enumerate(self._taxon_sequence_map.values()):
+            for ci, cell in enumerate(vec):
+                cell.value = symbol_state_map[cell.value.symbol]
+        for ct in self.character_types:
+            ct.state_alphabet = state_alphabet
+        if purge_other_state_alphabets:
+            self.state_alphabets = [state_alphabet]
+            self.default_state_alphabet = state_alphabet
+
+    def remap_to_default_state_alphabet_by_symbol(self,
+            purge_other_state_alphabets=True):
+        """
+        All entities with any reference to a state alphabet will be have the
+        reference reassigned to the default state alphabet, and all entities
+        with any reference to a state alphabet element will be have the
+        reference reassigned to any state alphabet element in the default
+        state alphabet that has the same symbol. Raises ValueError if no
+        matching symbol can be found.
+        """
+        self.remap_to_state_alphabet_by_symbol(
+                state_alphabet=self.default_state_alphabet,
+                purge_other_state_alphabets=purge_other_state_alphabets)
+
+    def taxon_state_sets_map(self,
+            char_indices=None,
+            gaps_as_missing=True,
+            gap_state=None,
+            no_data_state=None):
+        """
+        Returns a dictionary that maps taxon objects to lists of sets of
+        fundamental state indices.
+
+        Parameters
+        ----------
+
+        char_indices : iterable of ints
+            An iterable of indexes of characters to include (by column). If not
+            given or `None` [default], then all characters are included.
+
+        gaps_as_missing : boolean
+            If `True` [default] then gap characters will be treated as missing
+            data values. If `False`, then they will be treated as an additional
+            (fundamental) state.`
+
+        Returns
+        -------
+        d : dict
+            A dictionary with class:|Taxon| objects as keys and a list of sets
+            of fundamental state indexes as values.
+
+            E.g., Given the following matrix of DNA characters:
+
+                T1 AGN
+                T2 C-T
+                T3 GC?
+
+            Return with ``gaps_as_missing==True`` ::
+
+                {
+                    <T1> : [ set([0]), set([2]),        set([0,1,2,3]) ],
+                    <T2> : [ set([1]), set([0,1,2,3]),  set([3]) ],
+                    <T3> : [ set([2]), set([1]),        set([0,1,2,3]) ],
+                }
+
+            Return with ``gaps_as_missing==False`` ::
+
+                {
+                    <T1> : [ set([0]), set([2]),        set([0,1,2,3]) ],
+                    <T2> : [ set([1]), set([4]),        set([3]) ],
+                    <T3> : [ set([2]), set([1]),        set([0,1,2,3,4]) ],
+                }
+
+            Note that when gaps are treated as a fundamental state, not only
+            does '-' map to a distinct and unique state (4), but '?' (missing
+            data) maps to set consisting of all bases *and* the gap
+            state, whereas 'N' maps to a set of all bases but not including the
+            gap state.
+
+            When gaps are treated as missing, on the other hand, then '?' and
+            'N' and '-' all map to the same set, i.e. of all the bases.
+
+        """
+        taxon_to_state_indices = {}
+        for t in self:
+            cdv = self[t]
+            if char_indices is None:
+                ci = range(len(cdv))
+            else:
+                ci = char_indices
+            v = []
+            for char_index in ci:
+                state = cdv[char_index]
+                if gaps_as_missing:
+                    v.append(set(state.fundamental_indexes_with_gaps_as_missing))
+                else:
+                    v.append(set(state.fundamental_indexes))
+            taxon_to_state_indices[t] = v
+        return taxon_to_state_indices
+
+class FixedAlphabetCharacterMatrix(DiscreteCharacterMatrix):
+
+    class FixedAlphabetCharacterDataSequence(CharacterDataSequence):
+        pass
+    character_sequence_type = FixedAlphabetCharacterDataSequence
+    data_type = "fixed"
+    datatype_alphabet = None
+
+    def __init__(self, *args, **kwargs):
+        DiscreteCharacterMatrix.__init__(self, *args, **kwargs)
+        self.state_alphabets.append(self.__class__.datatype_alphabet)
+        self._default_state_alphabet = self.__class__.datatype_alphabet
+
+class DnaCharacterMatrix(FixedAlphabetCharacterMatrix):
+    """
+    Specializes |CharacterMatrix| for DNA data.
+    """
+    class DnaCharacterDataSequence(FixedAlphabetCharacterMatrix.FixedAlphabetCharacterDataSequence):
+        pass
+    character_sequence_type = DnaCharacterDataSequence
+    data_type = "dna"
+    datatype_alphabet = DNA_STATE_ALPHABET
+
+class RnaCharacterMatrix(FixedAlphabetCharacterMatrix):
+    """
+    Specializes |CharacterMatrix| for DNA data.
+    """
+    class RnaCharacterDataSequence(FixedAlphabetCharacterMatrix.FixedAlphabetCharacterDataSequence):
+        pass
+    character_sequence_type = RnaCharacterDataSequence
+    data_type = "rna"
+    datatype_alphabet = RNA_STATE_ALPHABET
+
+class NucleotideCharacterMatrix(FixedAlphabetCharacterMatrix):
+    """
+    Specializes |CharacterMatrix| for RNA data.
+    """
+    class NucleotideCharacterDataSequence(FixedAlphabetCharacterMatrix.FixedAlphabetCharacterDataSequence):
+        pass
+    character_sequence_type = NucleotideCharacterDataSequence
+    data_type = "nucleotide"
+    datatype_alphabet = NUCLEOTIDE_STATE_ALPHABET
+
+class ProteinCharacterMatrix(FixedAlphabetCharacterMatrix):
+    """
+    Specializes |CharacterMatrix| for protein or amino acid data.
+    """
+    class ProteinCharacterDataSequence(FixedAlphabetCharacterMatrix.FixedAlphabetCharacterDataSequence):
+        pass
+    character_sequence_type = ProteinCharacterDataSequence
+    data_type = "protein"
+    datatype_alphabet = PROTEIN_STATE_ALPHABET
+
+class RestrictionSitesCharacterMatrix(FixedAlphabetCharacterMatrix):
+    """
+    Specializes |CharacterMatrix| for restriction site data.
+    """
+    class RestrictionSitesCharacterDataSequence(FixedAlphabetCharacterMatrix.FixedAlphabetCharacterDataSequence):
+        pass
+    character_sequence_type = RestrictionSitesCharacterDataSequence
+    data_type = "restriction"
+    datatype_alphabet = RESTRICTION_SITES_STATE_ALPHABET
+
+class InfiniteSitesCharacterMatrix(FixedAlphabetCharacterMatrix):
+    """
+    Specializes |CharacterMatrix| for infinite sites data.
+    """
+    class InfiniteSitesCharacterDataSequence(FixedAlphabetCharacterMatrix.FixedAlphabetCharacterDataSequence):
+        pass
+    character_sequence_type = InfiniteSitesCharacterDataSequence
+    data_type = "infinite"
+    datatype_alphabet = INFINITE_SITES_STATE_ALPHABET
+
+class StandardCharacterMatrix(DiscreteCharacterMatrix):
+    """
+    Specializes |CharacterMatrix| for "standard" data (i.e., generic discrete
+    character data).
+
+    """
+    class StandardCharacterDataSequence(DiscreteCharacterMatrix.DiscreteCharacterDataSequence):
+        pass
+    character_sequence_type = StandardCharacterDataSequence
+
+    data_type = "standard"
+
+    def __init__(self, *args, **kwargs):
+        """
+        A default state alphabet consisting of state symbols of 0-9 will
+        automatically be created unless the ``default_state_alphabet=None`` is
+        passed in. To specify a different default state alphabet::
+
+            default_state_alphabet=dendropy.new_standard_state_alphabet("abc")
+            default_state_alphabet=dendropy.new_standard_state_alphabet("ij")
+
+        """
+        if "default_state_alphabet" in kwargs:
+            default_state_alphabet = kwargs.pop("default_state_alphabet")
+        else:
+            default_state_alphabet = charstatemodel.new_standard_state_alphabet()
+        DiscreteCharacterMatrix.__init__(self, *args, **kwargs)
+        if default_state_alphabet is not None:
+            self.default_state_alphabet = default_state_alphabet
+
+###############################################################################
+## Main Character Matrix Factory Function
+
+data_type_matrix_map = {
+    'continuous' : ContinuousCharacterMatrix,
+    'dna' : DnaCharacterMatrix,
+    'rna' : RnaCharacterMatrix,
+    'protein' : ProteinCharacterMatrix,
+    'standard' : StandardCharacterMatrix,
+    'restriction' : RestrictionSitesCharacterMatrix,
+    'infinite' : InfiniteSitesCharacterMatrix,
+}
+
+def get_char_matrix_type(data_type):
+    if data_type is None:
+        raise TypeError("'data_type' must be specified")
+    matrix_type = data_type_matrix_map.get(data_type, None)
+    if matrix_type is None:
+        raise KeyError("Unrecognized data type specification: '{}'".format(data_type,
+            sorted(data_type_matrix_map.keys())))
+    return matrix_type
+
+def new_char_matrix(data_type, **kwargs):
+    matrix_type = get_char_matrix_type(data_type=data_type)
+    m = matrix_type(**kwargs)
+    return m
diff --git a/dendropy/datamodel/charstatemodel.py b/dendropy/datamodel/charstatemodel.py
new file mode 100644
index 0000000..f1b0e98
--- /dev/null
+++ b/dendropy/datamodel/charstatemodel.py
@@ -0,0 +1,1425 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Character state definitions and alphabets. Certain state alphabets, such as
+DNA, RNA, protein, etc.  are defined here. These are termed "fixed" state
+alphabets, and for each distinct state alphabet concept (e.g., DNA), there is
+one and only one instance of a representation of that concept (i.e., all
+DNA-type data in DendroPy, regardless of the source, refer to the same instance
+of the state alphabet and state alphabet elements).
+"""
+
+import collections
+import itertools
+from dendropy.datamodel import basemodel
+from dendropy.utility import container
+
+###############################################################################
+## StateAlphabet
+
+class StateAlphabet(
+        basemodel.DataObject,
+        basemodel.Annotable):
+    """
+    A master registry mapping state symbols to their definitions.
+
+    There are two classes or "denominations" of states:
+
+        - fundamental states
+            These are the basic, atomic, self-contained states of the alphabet,
+            distinct and mutually-exclusive from every other fundamental state.
+            E.g., for DNA: adenine, guanine, cytosine, and thymine.
+
+        - multi-state states
+            The states are second-level or "pseudo-states", in that they are
+            not properly states in and of themselves, but rather each consist
+            of a set of other states. That is, a multi-state state is a set of
+            two or more fundamental states.  Multi-state states are of one of
+            two types: "ambiguous" and "polymorphic" states. "Ambiguous" states
+            represent states in which the true fundamental state is unknown,
+            but consists of one of the fundamental states to which the
+            ambiguous states map. "Polymorphic" states represent states in
+            which the entity actually has multiple fundamental states
+            simultaneously. "Ambiguous" states are an expression of uncertainty
+            or lack of knowledge about the identity of state. With
+            "polymorphic" states, on the other hand, there is no uncertaintly
+            or lack of knowledge about the state: the state is known
+            definitively, and it consists of multiple fundamental states.  An
+            example of an ambiguous state would be 'N', representing any base
+            in molecular sequence data. An example of a polymorphic state would
+            be the range of a widespread species found in multiple geographic
+            units.  Note that multi-state states can be specified in terms of
+            other multi-state states, but that upon instantiation, these member
+            multi-states will be expanded to their fundamental states.
+
+    State definitions or identities are immutable: their symbology and mappings
+    cannot be changed after creation/initialization. State definitions and
+    identities, however, can be added/removed from a state alphabet.
+
+    Parameters
+    ----------
+
+    label : string, optional
+        The name for this state alphabet.
+
+    fundamental_states : iterable of strings
+        An iterable of symbols defining the fundamental (i.e., non-ambiguous
+        and non-polymorphic states of this alphabet), with a 1-to-1
+        correspodence between symbols and states. Each state will also be
+        automatically indexed base on its position in this list. For DNA, this
+        would be something like: ``'ACGT'`` or ``('A', 'C', 'G', T')``. For
+        "standard" characters, this would be something like ``'01'`` or ``('0',
+        '1')``.
+
+    no_data_symbol : string
+        If specified, automatically creates a "no data" ambiguous state,
+        represented by the (canonical, or primary) symbol
+        "no_data_symbol", which maps to all fundamental states.
+        This will also insert `None` into all symbol look-up maps, which, when
+        dereferenced will return this state. Furthermore, the attribute
+        ``self.no_data_symbol`` will return this symbol and ``self.no_data_state``
+        will return this state. The 'no data' state will be an ambiguous
+        multistate type.
+
+    ambiguous_states : iterable of tuples
+        An iterable consisting of tuples expressing ambiguous state symbols and
+        the set of symbols representing the fundamental states to which they
+        map. The first element in the tuple is the symbol used to represent the
+        ambiguous state; this can be blank (""), but if not blank it needs to
+        be unique across all symbols (including case-variants if the state
+        alphabet is case-insensitive).  The second element is an
+        iterable of fundamental state symbols to which this ambiguous state
+        maps. The fundamental state symbols *must* have already been defined,
+        i.e. given in the value passed to ``fundamental_states``. Note: a
+        dictionary may seem like a more tractable structure than iterable of
+        tuples, but we may need to specify multiple anonymous or blank
+        ambiguous states.
+
+    polymorphic_states : iterable of tuples
+        An iterable consisting of tuples expressing polymorphic state symbols and
+        the set of symbols representing the fundamental states to which they
+        map. The first element in the tuple is the symbol used to represent the
+        polymorphic state; this can be blank (""), but if not blank it needs to
+        be unique across all symbols (including case-variants if the state
+        alphabet is case-insensitive).  The second element is an
+        iterable of fundamental state symbols to which this polymorphic state
+        maps. The fundamental state symbols *must* have already been defined,
+        i.e. given in the value passed to ``fundamental_states``. Note: a
+        dictionary may seem like a more tractable structure than iterable of
+        tuples, but we may need to specify multiple anonymous or blank
+        polymorphic states.
+
+    symbol_synonyms : dictionary
+        A mapping of symbols, with keys being the new symbols and values being
+        (already-defined) symbols of states to which they map. This provides a
+        mechanism by which states with multiple symbols can be managed. For
+        example, an ambiguous state, "unknown", representing all fundamental
+        states might be defined with '?' as its primary symbol, and a synonym
+        symbol for this state might be 'X'.
+
+    """
+
+    ###########################################################################
+    ### CLass-level Constants
+
+    FUNDAMENTAL_STATE = 0
+    AMBIGUOUS_STATE = 1
+    POLYMORPHIC_STATE = 2
+
+    ###########################################################################
+    ### Life-Cycle and Identity
+
+    def __init__(self,
+            fundamental_states=None,
+            ambiguous_states=None,
+            polymorphic_states=None,
+            symbol_synonyms=None,
+            no_data_symbol=None,
+            gap_symbol=None,
+            label=None,
+            case_sensitive=True):
+
+        basemodel.DataObject.__init__(self, label=label)
+        self._is_case_sensitive = case_sensitive
+
+        # Core collection underlying alphabet
+        self._fundamental_states = []
+        self._ambiguous_states = []
+        self._polymorphic_states = []
+
+        # Look-up mappings
+        self._state_identities = None
+        self._canonical_state_symbols = None
+        self._canonical_symbol_state_map = None
+        self._full_symbol_state_map = None
+        self._index_state_map = None
+        self._fundamental_states_to_ambiguous_state_map = None
+        self._fundamental_states_to_polymorphic_state_map = None
+
+        # Suppress for initialization
+        self.autocompile_lookup_tables = False
+
+        # This identifies the gap state when compiling the state alphabet. The
+        # principle purpose behind this is to be able to tell the gap state
+        # that it is, indeed, a gap state. And the purpose of this, in turn,
+        # is so that the when the gap state is asked for its fundamental
+        # indexes, it will return the fundamental indexes of the missing data
+        # state in its place if it is *NOT* to be treated as a fifth
+        # fundamental state.
+        self.gap_state = None
+        self._gap_symbol = None
+        self.no_data_state = None
+        self._no_data_symbol = None
+
+        # Populate core collection
+        if fundamental_states:
+            for symbol in fundamental_states:
+                self.new_fundamental_state(symbol)
+            if gap_symbol:
+                self.gap_state = self.new_fundamental_state(gap_symbol)
+                self._gap_symbol = gap_symbol
+            if no_data_symbol:
+                self.no_data_state = self.new_ambiguous_state(
+                        symbol=no_data_symbol,
+                        member_states=self._fundamental_states)
+                self._no_data_symbol = no_data_symbol
+            if ambiguous_states:
+                for ss in ambiguous_states:
+                    self.new_ambiguous_state(symbol=ss[0], member_state_symbols=ss[1])
+            if polymorphic_states:
+                for ss in polymorphic_states:
+                    self.new_polymorphic_state(symbol=ss[0], member_state_symbols=ss[1])
+            if symbol_synonyms:
+                for k in symbol_synonyms:
+                    self.new_symbol_synonym(k, symbol_synonyms[k])
+            # Build mappings
+            self.compile_lookup_mappings()
+
+        # Post-initialization
+        self.autocompile_lookup_tables = True
+
+    def __hash__(self):
+        return id(self)
+
+    def __eq__(self, other):
+        return other is self
+
+    def __copy__(self, memo=None):
+        return self
+
+    def taxon_namespace_scoped_copy(self, memo=None):
+        return self
+
+    def __deepcopy__(self, memo=None):
+        return self
+
+    ###########################################################################
+    ### Symbol Management
+
+    def _direct_get_state_for_symbol(self, symbol):
+        """
+        Returns the |StateIdentity| instance corresponding to ``symbol``.
+        """
+        for state_symbol, state in self.symbol_state_pair_iter(include_synonyms=True):
+            if state_symbol == symbol:
+                return state
+        raise KeyError(symbol)
+
+    def _direct_get_fundamental_states_for_symbols(self, symbols):
+        """
+        Returns the list of |StateIdentity| instances corresponding to
+        the iterable of symbols given by ``symbols``, with each element in
+        ``symbols`` corresponding to a single symbol.
+        """
+        ss = []
+        for symbol in symbols:
+            state = self._direct_get_state_for_symbol(symbol)
+            ss.extend(state.fundamental_states)
+        return tuple(ss)
+
+    def _validate_new_symbol(self, symbol):
+        if symbol is None or symbol == "":
+            raise ValueError("Cannot validate empty symbol")
+        symbol = str(symbol)
+        for state_symbol, state in self.symbol_state_pair_iter(include_synonyms=True):
+            if state_symbol == symbol:
+                raise ValueError("State with symbol or symbol synonym of '{}' already defined is this alphabet".format(symbol))
+        return symbol
+
+    def new_fundamental_state(self, symbol):
+        """
+        Adds a new fundamental state to the collection
+        of states in this alphabet.
+
+        Parameters
+        ----------
+        symbol : string
+            The symbol used to represent this state. Cannot have previously
+            been used to refer to any other state, fundamental or otherwise, as
+            a primary or synonymous symbol (including implicit synonyms given
+            by case-variants if the state alphabet is not case-sensitive).
+            Cannot be blank ("") or `None`.
+
+        Returns
+        -------
+        s : |StateIdentity|
+            The new state created and added.
+        """
+        if symbol is None or symbol == "":
+            raise ValueError("Fundamental states cannot be defined without a valid symbol")
+        symbol = self._validate_new_symbol(symbol)
+        index = len(self._fundamental_states)
+        new_state = StateIdentity(
+                symbol=symbol,
+                index=index,
+                state_denomination=StateAlphabet.FUNDAMENTAL_STATE,
+                member_states=None)
+        self._fundamental_states.append(new_state)
+        if not self._is_case_sensitive:
+            for s in (symbol.upper(), symbol.lower()):
+                if s != symbol:
+                    self.new_symbol_synonym(s, symbol)
+        if self.autocompile_lookup_tables:
+            self.compile_symbol_lookup_mappings()
+        return new_state
+
+    def new_ambiguous_state(self, symbol, **kwargs):
+        """
+        Adds a new ambiguous state to the collection
+        of states in this alphabet.
+
+        Parameters
+        ----------
+        symbol : string or None
+            The symbol used to represent this state. Cannot have previously
+            been used to refer to any other state, fundamental or otherwise, as
+            a primary or synonymous symbol (including implicit synonyms given
+            by case-variants if the state alphabet is not case-sensitive). Can
+            be blank ("") or `None` if there.
+
+        \*\*kwargs : keyword arguments, mandatory
+            Exactly one of the following must be specified:
+
+                member_state_symbols : iterable of strings
+                    List of symbols representing states to which this state maps. Symbols
+                    representing multistates will taken to refer to the set of
+                    fundamental states to which they, in turn, map.
+
+                member_states : iterable of |StateIdentity| objects
+                    List of |StateIdentity| representing states to which this state maps.
+
+        Returns
+        -------
+        s : |StateIdentity|
+            The new state created and added.
+        """
+        return self.new_multistate(
+                symbol=symbol,
+                state_denomination=StateAlphabet.AMBIGUOUS_STATE,
+                **kwargs)
+
+    def new_polymorphic_state(self,
+            symbol,
+            **kwargs):
+        """
+        Adds a new polymorphic state to the collection
+        of states in this alphabet.
+
+        Parameters
+        ----------
+        symbol : string or None
+            The symbol used to represent this state. Cannot have previously
+            been used to refer to any other state, fundamental or otherwise, as
+            a primary or synonymous symbol (including implicit synonyms given
+            by case-variants if the state alphabet is not case-sensitive). Can
+            be blank ("") or `None` if there.
+
+        \*\*kwargs : keyword arguments, mandatory
+            Exactly one of the following must be specified:
+
+                member_state_symbols : iterable of strings
+                    List of symbols representing states to which this state maps. Symbols
+                    representing multistates will taken to refer to the set of
+                    fundamental states to which they, in turn, map.
+
+                member_states : iterable of |StateIdentity| objects
+                    List of |StateIdentity| representing states to which this state maps.
+
+        Returns
+        -------
+        s : |StateIdentity|
+            The new state created and added.
+        """
+        return self.new_multistate(
+                symbol=symbol,
+                state_denomination=StateAlphabet.POLYMORPHIC_STATE,
+                **kwargs)
+
+    def new_multistate(self,
+            symbol,
+            state_denomination,
+            **kwargs):
+        """
+        Adds a new polymorphic or ambiguous state to the collection
+        of states in this alphabet.
+
+        Parameters
+        ----------
+        symbol : string or None
+            The symbol used to represent this state. Cannot have previously
+            been used to refer to any other state, fundamental or otherwise, as
+            a primary or synonymous symbol (including implicit synonyms given
+            by case-variants if the state alphabet is not case-sensitive). Can
+            be blank ("") or `None` if there.
+
+        state_denomination : enum
+            StateAlphabet.POLYMORPHIC_STATE or StateAlphabet.AMBIGUOUS_STATE
+
+        \*\*kwargs : keyword arguments, mandatory
+            Exactly one of the following must be specified:
+
+                member_state_symbols : iterable of strings
+                    List of symbols representing states to which this state maps. Symbols
+                    representing multistates will taken to refer to the set of
+                    fundamental states to which they, in turn, map.
+
+                member_states : iterable of |StateIdentity| objects
+                    List of |StateIdentity| representing states to which this state maps.
+
+        Returns
+        -------
+        s : |StateIdentity|
+            The new state created and added.
+        """
+        if symbol is not None and symbol != "":
+            symbol = self._validate_new_symbol(symbol)
+        if len(kwargs) != 1:
+            raise TypeError("Exactly one of 'member_state_symbols' or 'member_states' is required")
+        if "member_state_symbols" in kwargs:
+            member_states = self._direct_get_fundamental_states_for_symbols(kwargs["member_state_symbols"])
+        elif "member_states" in kwargs:
+            member_states = kwargs["member_states"]
+        else:
+            raise ValueError("Exactly one of 'member_state_symbols' or 'member_states' is required")
+        new_state = StateIdentity(
+                symbol=symbol,
+                index=None,
+                state_denomination=state_denomination,
+                member_states=member_states)
+        if state_denomination == StateAlphabet.POLYMORPHIC_STATE:
+            self._polymorphic_states.append(new_state)
+        elif state_denomination == StateAlphabet.AMBIGUOUS_STATE:
+            self._ambiguous_states.append(new_state)
+        else:
+            raise ValueError(state_denomination)
+        if symbol and not self._is_case_sensitive:
+            for s in (symbol.upper(), symbol.lower()):
+                if s != symbol:
+                    self.new_symbol_synonym(s, symbol)
+        if self.autocompile_lookup_tables:
+            if symbol:
+                self.compile_symbol_lookup_mappings()
+            self.compile_member_states_lookup_mappings()
+        return new_state
+
+    def new_symbol_synonym(self,
+            symbol_synonym, referenced_symbol):
+        """
+        Defines an alternative symbol mapping for an existing state.
+
+        Parameters
+        ----------
+        symbol_synonym : string
+            The (new) alternative symbol.
+
+        referenced_symbol : string
+            The symbol for the state to which the alternative symbol will also
+            map.
+
+        Returns
+        -------
+        s : |StateIdentity|
+            The state to which this synonym maps.
+        ------
+        """
+        if symbol_synonym is None or symbol_synonym == "":
+            raise ValueError("Symbol synonym cannot be empty")
+        symbol_synonym = self._validate_new_symbol(symbol_synonym)
+        state = self._direct_get_state_for_symbol(referenced_symbol)
+        if symbol_synonym in state.symbol_synonyms:
+            raise ValueError("Symbol synonym '{}' already defined for state '{}".format(symbol_synonym, state))
+        state.symbol_synonyms.append(symbol_synonym)
+        if self.autocompile_lookup_tables:
+            self.compile_symbol_lookup_mappings()
+        return state
+
+    ###########################################################################
+    ### Optimization/Sugar: Lookup Mappings and Attribute Settings
+
+    def compile_lookup_mappings(self):
+        """
+        Builds lookup tables/mappings for quick referencing and dereferencing
+        of symbols/states.
+        """
+        self.compile_symbol_lookup_mappings()
+        self.compile_member_states_lookup_mappings()
+
+    def compile_member_states_lookup_mappings(self):
+        """
+        Builds lookup tables/mappings for quick referencing and dereferencing
+        of ambiguous/polymorphic states based on the fundamental states to
+        which they map.
+        """
+        temp_fundamental_states_to_ambiguous_state_map = {}
+        temp_fundamental_states_to_polymorphic_state_map = {}
+        if self.no_data_state is not None:
+            assert self.no_data_state in self._ambiguous_states
+            self.no_data_state.member_states = tuple(self.fundamental_state_iter())
+        for idx, state in enumerate(self.state_iter()):
+            if state.state_denomination == StateAlphabet.AMBIGUOUS_STATE:
+                member_states = frozenset(state.member_states)
+                if member_states in temp_fundamental_states_to_ambiguous_state_map:
+                    pass
+                    # raise ValueError("Multiple definitions of ambiguous state with member states of '{}': {}, {}. Define a symbol synonym instead.".format(
+                    #     state.member_states_str, temp_fundamental_states_to_ambiguous_state_map[member_states], state))
+                else:
+                    temp_fundamental_states_to_ambiguous_state_map[member_states] = state
+            elif state.state_denomination == StateAlphabet.POLYMORPHIC_STATE:
+                member_states = frozenset(state.member_states)
+                if member_states in temp_fundamental_states_to_polymorphic_state_map:
+                    pass
+                    # raise ValueError("Multiple definitions of polymorphic state with member states of '{}': {}, {}. Define a symbol synonym instead.".format(
+                    #     state.member_states_str, temp_fundamental_states_to_polymorphic_state_map[member_states], state))
+                else:
+                    temp_fundamental_states_to_polymorphic_state_map[member_states] = state
+        self._fundamental_states_to_ambiguous_state_map = container.FrozenOrderedDict(temp_fundamental_states_to_ambiguous_state_map)
+        self._fundamental_states_to_polymorphic_state_map = container.FrozenOrderedDict(temp_fundamental_states_to_polymorphic_state_map)
+
+    def _set_symbol_mapping(self, d, symbol, state):
+        if symbol is None or symbol == "":
+            raise ValueError("Symbol synonym cannot be empty")
+        assert symbol not in d
+        d[symbol] = state
+
+    def compile_symbol_lookup_mappings(self):
+        """
+        Builds lookup tables/mappings for quick referencing and dereferencing
+        of state symbology.
+        """
+        temp_states = []
+        temp_symbols = []
+        temp_canonical_symbol_state_map = collections.OrderedDict()
+        temp_full_symbol_state_map = collections.OrderedDict()
+        temp_index_state_map = collections.OrderedDict()
+        # if self._gap_symbol is not None and self.no_data_state is None:
+        #     self.no_data_state = self.new_ambiguous_state(symbol=None,
+        #                 member_states=self._fundamental_states)
+        if self.no_data_state is not None:
+            assert self.no_data_symbol == self.no_data_state.symbol, "{} != {}".format(self.no_data_symbol, self.no_data_state.symbol)
+            temp_full_symbol_state_map[None] = self.no_data_state
+        for idx, state in enumerate(self.state_iter()):
+            temp_states.append(state)
+            if state.symbol:
+                temp_symbols.append(state.symbol)
+                assert state.symbol not in temp_canonical_symbol_state_map
+                temp_canonical_symbol_state_map[state.symbol] = state
+                self._set_symbol_mapping(
+                        temp_full_symbol_state_map,
+                        state.symbol,
+                        state)
+                if state.symbol_synonyms:
+                    for ss in state.symbol_synonyms:
+                        self._set_symbol_mapping(
+                                temp_full_symbol_state_map,
+                                ss,
+                                state)
+            else:
+                assert state.state_denomination != StateAlphabet.FUNDAMENTAL_STATE
+            state._index = idx
+            if self.gap_state is not None and state is self.gap_state and self.no_data_state is not None:
+                state.is_gap_state = True
+                state.gap_state_as_no_data_state = self.no_data_state
+            else:
+                state.is_gap_state = False
+                state.gap_state_as_no_data_state = None
+            temp_index_state_map[idx] = state
+        self._state_identities = tuple(temp_states)
+        self._canonical_state_symbols = tuple(temp_symbols)
+        self._canonical_symbol_state_map = container.FrozenOrderedDict(temp_canonical_symbol_state_map)
+        self._full_symbol_state_map = container.FrozenOrderedDict(temp_full_symbol_state_map)
+        self._index_state_map = container.FrozenOrderedDict(temp_index_state_map)
+
+    def set_state_as_attribute(self, state, attr_name=None):
+        """
+        Sets the given state as an attribute of this alphabet.
+        The name of the attribute will be ``attr_name`` if specified,
+        or the state symbol otherwise.
+
+        Parameters
+        ----------
+        state : |StateIdentity|
+            The state to be made an attribute of this alphabet.
+        attr_name : string
+            The name of the attribute. If not specified, the state
+            symbol will be used.
+        """
+        if (state not in self._fundamental_states
+                and state not in self._ambiguous_states
+                and state not in self._polymorphic_states):
+            raise ValueError("State {} not defined in current alphabet".format(state))
+        if attr_name is None:
+            attr_name = state.symbol
+        if attr_name is None:
+            raise TypeError("Cannot set attribute: non-None symbol needed for state or non-None attribute name needs to be provided")
+        setattr(self, attr_name, state)
+
+    ###########################################################################
+    ### Special handling to designate gap
+
+    def _get_gap_symbol(self):
+        return self._gap_symbol
+
+    def _set_gap_symbol(self, gap_symbol):
+        """
+        For state alphabets with no explicitly-defined gap and no data (missing) symbols,
+        this method will allow creation of mapping of gaps to no data states,
+        so that tree/data scoring methods that require gaps to be treated as
+        missing data can be used. Note that the gap state needs to be already
+        defined in the state alphabet and already associated with the
+        designated symbol.
+        """
+        if gap_symbol is not None:
+            self.gap_state = self[gap_symbol]
+            self._gap_symbol = gap_symbol
+        else:
+            self.gap_state = None
+            self._gap_symbol = None
+
+    gap_symbol = property(_get_gap_symbol, _set_gap_symbol)
+
+    def _get_no_data_symbol(self):
+        return self._no_data_symbol
+
+    def _set_no_data_symbol(self, no_data_symbol):
+        if no_data_symbol is not None:
+            self.no_data_state = self[no_data_symbol]
+            self._no_data_symbol = no_data_symbol
+        else:
+            self.no_data_state = None
+            self._no_data_symbol = None
+
+    no_data_symbol = property(_get_no_data_symbol, _set_no_data_symbol)
+
+
+    ###########################################################################
+    ### Symbol Access
+
+    def __len__(self):
+        """
+        Number of states.
+        """
+        return ( len(self._fundamental_states)
+                + len(self._ambiguous_states)
+                + len(self._polymorphic_states) )
+
+    def __iter__(self):
+        """
+        Returns :meth:`StateAlphabet.state_iter()`: iterator over all state
+        identities.
+        """
+        return self.state_iter()
+
+    def state_iter(self):
+        """
+        Returns an iterator over all state identities.
+        """
+        return itertools.chain(
+                self._fundamental_states,
+                self._ambiguous_states,
+                self._polymorphic_states)
+
+    def fundamental_state_iter(self):
+        """
+        Returns an iterator over all fundamental state identities.
+        """
+        return itertools.chain(self._fundamental_states)
+
+    def ambiguous_state_iter(self):
+        """
+        Returns an iterator over all ambiguous state identities.
+        """
+        return itertools.chain(self._ambiguous_states)
+
+    def polymorphic_state_iter(self):
+        """
+        Returns an iterator over all polymorphic state identities.
+        """
+        return itertools.chain(self._polymorphic_states)
+
+    def multistate_state_iter(self):
+        """
+        Returns an iterator over all ambiguous and polymorphic state
+        identities.
+        """
+        return itertools.chain(self._ambiguous_states, self._polymorphic_states)
+
+    def fundamental_symbol_iter(self, include_synonyms=True):
+        """
+        Returns an iterator over all symbols (including synonyms, unless
+        ``include_synonyms`` is `False`) that map to fundamental states.
+        """
+        for state in self.fundamental_state_iter():
+            yield state.symbol
+            if state.symbol_synonyms and include_synonyms:
+                for symbol in state.symbol_synonyms:
+                    yield symbol
+
+    def ambiguous_symbol_iter(self, include_synonyms=True):
+        """
+        Returns an iterator over all symbols (including synonyms, unless
+        ``include_synonyms`` is `False`) that map to ambiguous states.
+        """
+        for state in self.ambiguous_state_iter():
+            yield state.symbol
+            if state.symbol_synonyms and include_synonyms:
+                for symbol in state.symbol_synonyms:
+                    yield symbol
+
+    def polymorphic_symbol_iter(self, include_synonyms=True):
+        """
+        Returns an iterator over all symbols (including synonyms, unless
+        ``include_synonyms`` is `False`) that map to polymorphic states.
+        """
+        for state in self.polymorphic_state_iter():
+            yield state.symbol
+            if state.symbol_synonyms and include_synonyms:
+                for symbol in state.symbol_synonyms:
+                    yield symbol
+
+    def multistate_symbol_iter(self, include_synonyms=True):
+        """
+        Returns an iterator over all symbols (including synonyms, unless
+        ``include_synonyms`` is `False`) that map to multistate states.
+        """
+        for state in self.multistate_state_iter():
+            yield state.symbol
+            if state.symbol_synonyms and include_synonyms:
+                for symbol in state.symbol_synonyms:
+                    yield symbol
+
+    def symbol_state_pair_iter(self, include_synonyms=True):
+        """
+        Returns an iterator over all symbols paired with the state to which the
+        they symbols map.
+        """
+        for state in self.state_iter():
+            yield (state.symbol, state)
+            if include_synonyms:
+                for synonym in state.symbol_synonyms:
+                    yield (synonym, state)
+
+    def _get_state_identities(self):
+        """
+        Tuple of all state identities in this alphabet.
+        """
+        return self._state_identities
+    states = property(_get_state_identities)
+
+    def _get_canonical_state_symbols(self):
+        """
+        Tuple of all state symbols in this alphabet.
+        """
+        return self._canonical_state_symbols
+    symbols = property(_get_canonical_state_symbols)
+
+    def _get_canonical_symbol_state_map(self):
+        """
+        Dictionary with state symbols as keys and states as values. Does not
+        include symbol synonyms or case variations.
+        """
+        return self._canonical_symbol_state_map
+    canonical_symbol_state_map = property(_get_canonical_symbol_state_map)
+
+    def _get_full_symbol_state_map(self):
+        """
+        Dictionary with state symbols as keys and states as values.
+        Includes symbol synonyms or case variations.
+        """
+        return self._full_symbol_state_map
+    full_symbol_state_map = property(_get_full_symbol_state_map)
+
+    def __getitem__(self, key):
+        """
+        Returns state identity corresponding to ``key``.
+
+        Parameters
+        ----------
+        key : integer or string
+            If and integer value, looks up and returns state identity by index.
+            If a string value, looks up and returns state identity by symbol.
+
+        Returns
+        -------
+        s : |StateIdentity| instance
+            Returns a |StateIdentity| corresponding to ``key``.
+
+        Raises
+        ------
+        KeyError if ``key`` is not valid.
+
+        """
+        if isinstance(key, int):
+            return self._index_state_map[key]
+        else:
+            return self._full_symbol_state_map[key]
+
+    def get_states_for_symbols(self, symbols):
+        """
+        Returns list of states corresponding to symbols.
+
+        Parameters
+        ----------
+        symbols : iterable of symbols
+
+        Returns
+        -------
+        s : list of |StateIdentity|
+            A list of |StateIdentity| instances corresponding to symbols
+            given in ``symbols``.
+        """
+        states = [self.full_symbol_state_map[s] for s in symbols]
+        return states
+
+    def get_fundamental_states_for_symbols(self, symbols):
+        """
+        Returns list of *fundamental* states corresponding to symbols.
+
+        Parameters
+        ----------
+        symbols : iterable of symbols
+
+        Returns
+        -------
+        s : list of |StateIdentity|
+            A list of fundamental |StateIdentity| instances corresponding
+            to symbols given in ``symbols``, with multi-state states expanded
+            into their fundamental symbols.
+        """
+        states = []
+        for symbol in symbols:
+            state = self._full_symbol_state_map[symbol]
+            states.extend(state.fundamental_states)
+        return states
+
+    def get_canonical_symbol_for_symbol(self, symbol):
+        """
+        Returns the canonical state symbol for the state to which ``symbol``
+        maps. E.g., in a DNA alphabet, return 'A' for 'a'.
+
+        Parameters
+        ----------
+        symbol : string
+
+        Returns
+        -------
+        s : string
+            Canonical symbol for state with symbol or synonym symbol of
+            ``symbol``.
+        """
+        return self[symbol].symbol
+
+    def match_ambiguous_state(self, symbols):
+        """
+        Returns ambiguous state with fundamental member states
+        represented by symbols given in ``symbols``.
+
+        Parameters
+        ----------
+        symbols : iterable of symbols
+
+        Returns
+        -------
+        s : |StateIdentity| instance
+        """
+        states = frozenset(self.get_fundamental_states_for_symbols(symbols))
+        return self._fundamental_states_to_ambiguous_state_map[states]
+
+    def match_polymorphic_state(self, symbols):
+        """
+        Returns polymorphic state with fundamental member states
+        represented by symbols given in ``symbols``.
+
+        Parameters
+        ----------
+        symbols : iterable of symbols
+
+        Returns
+        -------
+        s : |StateIdentity| instance
+        """
+        states = frozenset(self.get_fundamental_states_for_symbols(symbols))
+        return self._fundamental_states_to_polymorphic_state_map[states]
+
+    def match_state(self, symbols, state_denomination):
+        """
+        Returns ambiguous or polymorphic state with fundamental member states
+        represented by symbols given in ``symbols``.
+
+        Parameters
+        ----------
+        symbols : iterable of string symbols
+            Symbols representing states to be dereferenced.
+        state_denomination : {StateAlphabet.AMBIGUOUS or StateAlphabet.POLYPMORPHIC_STATE}
+
+        Returns
+        -------
+        s : |StateIdentity| instance
+        """
+        if state_denomination == StateAlphabet.AMBIGUOUS_STATE:
+            return self.match_ambiguous_state(symbols=symbols)
+        else:
+            return self.match_polymorphic_state(symbols=symbols)
+
+###############################################################################
+## StateIdentity
+
+class StateIdentity(
+        basemodel.DataObject,
+        basemodel.Annotable):
+    """
+    A character state definition, which can either be a fundamental state or
+    a mapping to a set of other character states (for polymorphic or ambiguous
+    characters).
+    """
+
+    def __init__(self,
+            symbol=None,
+            index=None,
+            state_denomination=StateAlphabet.FUNDAMENTAL_STATE,
+            member_states=None):
+        """
+        A state is immutable with respect to its definition and identity.
+        Specifically, it 'symbol', 'index', 'multistate', and 'member_states'
+        properties are set upon definition/creation, and after that are
+        read-only.
+
+        Parameters
+        ----------
+        symbol : string
+            A text symbol or token representation of this character state.
+            E.g., 'G' for the base guanine in a DNA state alphabet, or '1' for
+            presence of a wing in a morphological data set.
+        index : integer
+            The (0-based) numeric index for this state in the state alphabet.
+            E.g., for a DNA alphabet: 0 = 'A'/adenine, 1 = 'C'/cytosine, 2 =
+            'G'/guanine, 3 = 'T'/thymine. Or for a "standard" alphabet: 0 =
+            '0', 1 = '1'. Note that ambiguous and polymorphic state definitions
+            typically are not indexed.
+        state_denomination : 'enum'
+            One of: ``StateAlphabet.FUNDAMENTAL_STATE``,
+            ``StateAlphabet.AMBIGUOUS_STATE``, or
+            ``StateAlphabet.POLYMORPHIC_STATE``.
+        member_states : iterable of |StateIdentity| instances.
+            If a multi-state, then a collection of |StateIdentity|
+            instances to which this state maps.
+        """
+        basemodel.DataObject.__init__(self, label=symbol)
+        self._symbol = symbol
+        self._index = index
+        self._state_denomination = state_denomination
+        self._member_states = None
+        self._fundamental_states = None
+        self._fundamental_symbols = None
+        self._fundamental_indexes = None
+        self._fundamental_indexes_with_gaps_as_missing = None
+        self._partials_vector = None
+        if member_states is not None:
+            self._member_states = tuple(member_states)
+        else:
+            self._member_states = None
+        self._str = None
+        self._repr = None
+        self._member_states_str = None
+        self._symbol_synonyms = []
+
+        # special handling for treating gap states as missing/no-data states
+        self.is_gap_state = None
+        self.gap_state_as_no_data_state = None
+
+    def __hash__(self):
+        return id(self)
+
+    def __eq__(self, other):
+        return other is self
+
+    def __copy__(self, memo=None):
+        return self
+
+    def taxon_namespace_scoped_copy(self, memo=None):
+        return self
+
+    def __deepcopy__(self, memo=None):
+        return self
+
+    def _get_index(self):
+        return self._index
+    index = property(_get_index)
+
+    def __str__(self):
+        if self._str is None:
+            if self._symbol:
+                self._str = str(self._symbol)
+            elif self._state_denomination == StateAlphabet.FUNDAMENTAL_STATE:
+                self._str = ""
+            else:
+                self._str = self.member_states_str
+        return self._str
+
+    def __repr__(self):
+        if self._repr is None:
+            s = str(self)
+            self._repr = "<{} at {}: '{}'>".format(self.__class__.__name__,
+                    hex(id(self)), str(s))
+        return self._repr
+
+    def _get_member_states_str(self):
+        """
+        Representation of member states of self.
+        """
+        if self._member_states_str is None:
+            if self._state_denomination == StateAlphabet.FUNDAMENTAL_STATE:
+                self._member_states_str = str(self)
+            else:
+                s = ",".join([m._symbol for m in self._member_states])
+                if self._state_denomination == StateAlphabet.AMBIGUOUS_STATE:
+                    self._member_states_str = "{" + s + "}"
+                elif self._state_denomination == StateAlphabet.POLYMORPHIC_STATE:
+                    self._member_states_str = "(" + s + ")"
+                else:
+                    raise ValueError("Unrecognized state denomination: '{}'".format(self._state_denomination))
+        return self._member_states_str
+    member_states_str = property(_get_member_states_str)
+
+    def _get_symbol(self):
+        """
+        Canonical (primary) symbol of this state.
+        """
+        return self._symbol
+    symbol = property(_get_symbol)
+
+    def _get_state_denomination(self):
+        """
+        Type of multi-statedness: FUNDAMENTAL (not a multistate), AMBIGUOUS, or
+        POLYMORPHIC.
+        """
+        return self._state_denomination
+    state_denomination = property(_get_state_denomination)
+
+    def _is_single_state(self):
+        """
+        `True` if a FUNDAMENTAL state.
+        """
+        return self._state_denomination == StateAlphabet.FUNDAMENTAL_STATE
+    is_single_state = property(_is_single_state)
+
+    def _is_fundamental_state(self):
+        """
+        `True` if a FUNDAMENTAL state.
+        """
+        return self._state_denomination == StateAlphabet.FUNDAMENTAL_STATE
+    is_fundamental_state = property(_is_fundamental_state)
+
+    def _get_member_states(self):
+        """
+        Returns the (fundamental) member states that this state maps to if not
+        itself a fundamental state.
+        """
+        return self._member_states
+
+    def _set_member_states(self, member_states):
+        """
+        Rebuilds member state set.
+        """
+        if member_states is not None:
+            self._member_states = tuple(member_states)
+        else:
+            self._member_states = None
+        self._fundamental_states = None
+        self._fundamental_symbols = None
+        self._fundamental_indexes = None
+        self._fundamental_indexes_with_gaps_as_missing = None
+        self._partials_vector = None
+        self._str = None
+        self._repr = None
+        self._member_states_str = None
+    member_states = property(_get_member_states, _set_member_states)
+
+    def _get_fundamental_states(self):
+        """
+        Returns a tuple of fundamental states (i.e., tupe of single states)
+        to which this state maps.
+        """
+        if self._fundamental_states is None:
+            if self._member_states is None:
+                states = {self:True}
+            else:
+                states = collections.OrderedDict()
+                for state in self._member_states:
+                    assert state is not self
+                    for s in state.fundamental_states:
+                        states[s] = True
+            self._fundamental_states = tuple(states.keys())
+        return self._fundamental_states
+    fundamental_states = property(_get_fundamental_states)
+
+    def _get_fundamental_symbols(self):
+        """
+        Returns a tuple of fundamental state symbols (i.e., tuple of symbols
+        representing single states) to which this state maps.
+        """
+        if self._fundamental_symbols is None:
+            self._fundamental_symbols = tuple(state.symbol for state in self.fundamental_states)
+        return self._fundamental_symbols
+    fundamental_symbols = property(_get_fundamental_symbols)
+
+    def _get_fundamental_indexes(self):
+        """
+        Returns a tuple of fundamental state indexes (i.e., tuple of index
+        values of single states) to which this state maps.
+        """
+        if self._fundamental_indexes is None:
+            self._fundamental_indexes = tuple([state._index for state in self.fundamental_states])
+        return self._fundamental_indexes
+    fundamental_indexes = property(_get_fundamental_indexes)
+
+    def _get_fundamental_indexes_with_gaps_as_missing(self):
+        """
+        Returns a tuple of fundamental state indexes (i.e., tuple of index
+        values of single states) to which this state maps, with gaps being
+        substituted with missing (no-data) states.
+        """
+        if self._fundamental_indexes_with_gaps_as_missing is None:
+            if self.is_gap_state:
+                if self.gap_state_as_no_data_state is not None:
+                    self._fundamental_indexes_with_gaps_as_missing = tuple(self.gap_state_as_no_data_state.fundamental_indexes_with_gaps_as_missing)
+                else:
+                    raise ValueError("No data state not specified")
+            else:
+                fstates = [s for s in self.fundamental_states if not s.is_gap_state]
+                self._fundamental_indexes_with_gaps_as_missing = tuple([s._index for s in fstates])
+        return self._fundamental_indexes_with_gaps_as_missing
+    fundamental_indexes_with_gaps_as_missing = property(_get_fundamental_indexes_with_gaps_as_missing)
+
+    def _get_symbol_synonyms(self):
+        """
+        The collection of symbol synonyms (alternatives/equivalents to the
+        canonical symbol) which also map to this state.
+        """
+        return self._symbol_synonyms
+    # def _set_symbol_synonyms(self, value):
+    #     self._symbol_synonyms = value
+    # symbol_synonyms = property(_get_symbol_synonyms, _set_symbol_synonyms)
+    symbol_synonyms = property(_get_symbol_synonyms)
+
+    def is_exact_correspondence(self, other):
+        """
+        Tries to determine if two StateIdentity definitions
+        are equivalent by matching symbols.
+        """
+        match = True
+        if self._state_denomination != other._state_denomination:
+            return False
+        if self._state_denomination != StateAlphabet.FUNDAMENTAL_STATE and other._state_denomination != StateAlphabet.FUNDAMENTAL_STATE:
+            xf1 = self.fundamental_states
+            xf2 = other.fundamental_states
+            if len(xf1) != len(xf2):
+                match = False
+            else:
+                f1 = set(xf1)
+                f2 = set(xf2)
+                for m1 in f1:
+                    member_match = False
+                    for m2 in f2:
+                        if m1.is_exact_correspondence(m2):
+                            member_match = True
+                            f2.remove(m2)
+                            break
+                    if not member_match:
+                        match = False
+                        break
+                if match:
+                    f1 = set(xf1)
+                    f2 = set(xf2)
+                    for m2 in f2:
+                        member_match = False
+                        for m1 in f1:
+                            if m1.is_exact_correspondence(m2):
+                                f1.remove(m1)
+                                member_match = True
+                                break
+                        if not member_match:
+                            match = False
+                            break
+            return match
+        else:
+            return self._symbol == other._symbol
+
+###############################################################################
+## DnaStateAlphabet
+
+class DnaStateAlphabet(StateAlphabet):
+
+    def __init__(self):
+        fundamental_states = "ACGT"
+        polymorphic_states = None
+        ambiguous_states = (
+                ("N", "ACGT"),
+                ("R", "AG"  ),
+                ("Y", "CT"  ),
+                ("M", "AC"  ),
+                ("W", "AT"  ),
+                ("S", "CG"  ),
+                ("K", "GT"  ),
+                ("V", "ACG" ),
+                ("H", "ACT" ),
+                ("D", "AGT" ),
+                ("B", "CGT" ),
+                )
+        symbol_synonyms = {"X": "N"}
+        StateAlphabet.__init__(self,
+                fundamental_states=fundamental_states,
+                no_data_symbol="?",
+                gap_symbol="-",
+                polymorphic_states=polymorphic_states,
+                ambiguous_states=ambiguous_states,
+                symbol_synonyms=symbol_synonyms,
+                label="DNA",
+                case_sensitive=False)
+        for state in self.state_iter():
+            if state.symbol == "-":
+                attr_name = "gap"
+            else:
+                attr_name = state.symbol
+            self.set_state_as_attribute(state, attr_name)
+        self.any_residue = self.N
+        self.unknown_state_symbol = 'N'
+
+###############################################################################
+## RnaStateAlphabet
+
+class RnaStateAlphabet(StateAlphabet):
+
+    def __init__(self):
+        fundamental_states = "ACGU"
+        polymorphic_states = None
+        ambiguous_states = (
+                ("N", "ACGU"),
+                ("R", "AG"  ),
+                ("Y", "CU"  ),
+                ("M", "AC"  ),
+                ("W", "AU"  ),
+                ("S", "CG"  ),
+                ("K", "GU"  ),
+                ("V", "ACG" ),
+                ("H", "ACU" ),
+                ("D", "AGU" ),
+                ("B", "CGU" ),
+                )
+        symbol_synonyms = {"X": "N"}
+        StateAlphabet.__init__(self,
+                fundamental_states=fundamental_states,
+                no_data_symbol="?",
+                gap_symbol="-",
+                polymorphic_states=polymorphic_states,
+                ambiguous_states=ambiguous_states,
+                symbol_synonyms=symbol_synonyms,
+                label="RNA",
+                case_sensitive=False)
+        for state in self.state_iter():
+            if state.symbol == "-":
+                attr_name = "gap"
+            else:
+                attr_name = state.symbol
+            self.set_state_as_attribute(state, attr_name)
+        self.any_residue = self.N
+        self.unknown_state_symbol = 'N'
+
+###############################################################################
+## NucleotideStateAlphabet
+
+class NucleotideStateAlphabet(StateAlphabet):
+
+    def __init__(self):
+        fundamental_states = "ACGTU"
+        polymorphic_states = None
+        ambiguous_states = (
+                ("N", "ACGTU"),
+                ("R", "AG"  ),
+                ("Y", "CTU"  ),
+                ("M", "AC"  ),
+                ("W", "ATU"  ),
+                ("S", "CG"  ),
+                ("K", "GTU"  ),
+                ("V", "ACG" ),
+                ("H", "ACTU" ),
+                ("D", "AGTU" ),
+                ("B", "CGTU" ),
+                )
+        symbol_synonyms = {"X": "N"}
+        StateAlphabet.__init__(self,
+                fundamental_states=fundamental_states,
+                no_data_symbol="?",
+                gap_symbol="-",
+                polymorphic_states=polymorphic_states,
+                ambiguous_states=ambiguous_states,
+                symbol_synonyms=symbol_synonyms,
+                label="Nucleotide",
+                case_sensitive=False)
+        for state in self.state_iter():
+            if state.symbol == "-":
+                attr_name = "gap"
+            else:
+                attr_name = state.symbol
+            self.set_state_as_attribute(state, attr_name)
+        self.any_residue = self.N
+        self.unknown_state_symbol = 'N'
+
+###############################################################################
+## ProteinStateAlphabet
+
+class ProteinStateAlphabet(StateAlphabet):
+
+    def __init__(self):
+        fundamental_states = "ACDEFGHIKLMNPQRSTVWY*"
+        polymorphic_states = None
+        ambiguous_states = (
+                ("B", "DN"),
+                ("Z", "EQ"),
+                ("X", "ACDEFGHIKLMNPQRSTVWY*"),
+                )
+        symbol_synonyms = {}
+        StateAlphabet.__init__(self,
+                fundamental_states=fundamental_states,
+                no_data_symbol="?",
+                gap_symbol="-",
+                polymorphic_states=polymorphic_states,
+                ambiguous_states=ambiguous_states,
+                symbol_synonyms=symbol_synonyms,
+                label="Protein",
+                case_sensitive=False)
+        for state in self.state_iter():
+            if state.symbol == "-":
+                attr_name = "gap"
+            elif state.symbol == "*":
+                attr_name = "stop"
+            else:
+                attr_name = state.symbol
+            self.set_state_as_attribute(state, attr_name)
+        self.any_residue = self.X
+        self.unknown_state_symbol = 'X'
+
+###############################################################################
+## BinaryStateAlphabet
+
+class BinaryStateAlphabet(StateAlphabet):
+
+    def __init__(self, allow_gaps=False, allow_missing=False):
+        fundamental_states = "10"
+        if allow_gaps:
+            gap_symbol = "-"
+        else:
+            gap_symbol = None
+        polymorphic_states = None
+        ambiguous_states = []
+        if allow_missing:
+            no_data_symbol = "?"
+        else:
+            no_data_symbol = None
+        symbol_synonyms = {}
+        StateAlphabet.__init__(self,
+                fundamental_states=fundamental_states,
+                no_data_symbol=no_data_symbol,
+                gap_symbol=gap_symbol,
+                polymorphic_states=polymorphic_states,
+                ambiguous_states=ambiguous_states,
+                symbol_synonyms=symbol_synonyms,
+                label="Binary",
+                case_sensitive=False)
+        for state in self.state_iter():
+            if state.symbol == "-":
+                attr_name = "gap"
+            elif state.symbol == "?":
+                attr_name = "missing"
+            elif state.symbol == "*":
+                attr_name = "stop"
+            else:
+                attr_name = state.symbol
+            self.set_state_as_attribute(state, attr_name)
+
+###############################################################################
+## RestrictionSitesStateAlphabet
+
+class RestrictionSitesStateAlphabet(BinaryStateAlphabet):
+
+    def __init__(self, allow_gaps=False, allow_missing=False):
+        BinaryStateAlphabet.__init__(self, allow_gaps=allow_gaps, allow_missing=allow_missing)
+
+###############################################################################
+## InfiniteSitesStateAlphabet
+
+class InfiniteSitesStateAlphabet(BinaryStateAlphabet):
+
+    def __init__(self, allow_gaps=False, allow_missing=False):
+        BinaryStateAlphabet.__init__(self, allow_gaps=allow_gaps, allow_missing=allow_missing)
+
+
+###############################################################################
+## GLOBAL STATE ALPHABETS
+
+DNA_STATE_ALPHABET                =  DnaStateAlphabet()
+RNA_STATE_ALPHABET                =  RnaStateAlphabet()
+NUCLEOTIDE_STATE_ALPHABET         =  NucleotideStateAlphabet()
+BINARY_STATE_ALPHABET             =  BinaryStateAlphabet()
+PROTEIN_STATE_ALPHABET            =  ProteinStateAlphabet()
+RESTRICTION_SITES_STATE_ALPHABET  =  RestrictionSitesStateAlphabet()
+INFINITE_SITES_STATE_ALPHABET     =  InfiniteSitesStateAlphabet()
+
+def new_standard_state_alphabet(
+        fundamental_state_symbols=None,
+        case_sensitive=False):
+    if fundamental_state_symbols is None:
+        fundamental_state_symbols = "0123456789"
+    s = StateAlphabet(
+            fundamental_states=fundamental_state_symbols,
+            no_data_symbol="?",
+            gap_symbol="-",
+            # polymorphic_states=polymorphic_states,
+            # ambiguous_states=ambiguous_states,
+            # symbol_synonyms=symbol_synonyms,
+            label="Standard",
+            case_sensitive=case_sensitive)
+    for state in s.state_iter():
+        if state.symbol == "-":
+            attr_name = "gap"
+        else:
+            attr_name = state.symbol
+        s.set_state_as_attribute(state, attr_name)
+    return s
+
+
diff --git a/dendropy/datamodel/datasetmodel.py b/dendropy/datamodel/datasetmodel.py
new file mode 100644
index 0000000..f23c677
--- /dev/null
+++ b/dendropy/datamodel/datasetmodel.py
@@ -0,0 +1,652 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+This module defines the |DataSet|: a top-level data container object
+that manages collections of |TaxonNamespace|, |TreeList|, and
+(various kinds of) |CharacterMatrix| objects.
+"""
+
+import warnings
+try:
+    from StringIO import StringIO # Python 2 legacy support: StringIO in this module is the one needed (not io)
+except ImportError:
+    from io import StringIO # Python 3
+import copy
+import sys
+from dendropy.utility import container
+from dendropy.utility import error
+from dendropy.utility import deprecate
+from dendropy.datamodel import basemodel
+from dendropy.datamodel import taxonmodel
+from dendropy.datamodel import treecollectionmodel
+from dendropy.datamodel import charmatrixmodel
+from dendropy.datamodel import charstatemodel
+from dendropy import dataio
+
+###############################################################################
+## DataSet
+
+class DataSet(
+        basemodel.Annotable,
+        basemodel.Deserializable,
+        basemodel.MultiReadable,
+        basemodel.Serializable,
+        basemodel.DataObject):
+    """
+    A phylogenetic data object that coordinates collections of
+    |TaxonNamespace|, |TreeList|, and (various kinds of)
+    |CharacterMatrix| objects.
+
+    A |DataSet| has three attributes:
+
+        ``taxon_namespaces``
+            A list of |TaxonNamespace| objects, each representing
+            a distinct namespace for operational taxononomic unit concept
+            definitions.
+
+        ``tree_lists``
+            A list of |TreeList| objects, each representing a
+            collection of |Tree| objects.
+
+        ``char_matrices``
+            A list of |CharacterMatrix|-derived objects (e.g.
+            |DnaCharacterMatrix|).
+
+    Multiple |TaxonNamespace| objects within a |DataSet| are
+    allowed so as to support reading/loading of data from external sources that
+    have multiple independent taxon namespaces defined within the same source
+    or document (e.g., a Mesquite file with multiple taxa blocks, or a NeXML
+    file with multiple OTU sections). Ideally, however, this would not
+    be how data is managed. Recommended idiomatic usage would be to use a
+    |DataSet| to manage multiple types of data that all share and
+    reference the same, single taxon namespace.
+
+    This convention can be enforced by setting the DataSet instance to
+    "attached taxon namespace" mode::
+
+        ds = dendropy.DataSet()
+        tns = dendropy.TaxonNamespace()
+        ds.attach_taxon_namespace(tns)
+
+    After setting this mode, all subsequent data read or created will be
+    coerced to use the same, common operational taxonomic unit concept
+    namespace.
+
+    Note that unless there is a need to collect and serialize a collection of
+    data to the same file or external source, it is probably better
+    semantically to use more specific data structures (e.g., a
+    |TreeList| object for trees or a |DnaCharacterMatrix|
+    object for an alignment). Similarly, when deserializing an external
+    data source, if just a single type or collection of data is needed (e.g.,
+    the collection of trees from a file that includes both trees and an
+    alignment), then it is semantically cleaner to deserialize the data
+    into a more specific structure (e.g., a |TreeList| to get all the
+    trees). However, when deserializing a mixed external data source
+    with, e.g. multiple alignments or trees and one or more alignments, and you
+    need to access/use more than a single collection, it is more efficient to
+    read the entire data source at once into a |DataSet| object and then
+    independently extract the data objects as you need them from the various
+    collections.
+
+    """
+
+    def _parse_and_create_from_stream(cls,
+            stream,
+            schema,
+            **kwargs):
+        """
+        Constructs a new |DataSet| object and populates it with data
+        from file-like object ``stream``.
+        """
+        exclude_trees = kwargs.pop("exclude_trees", False)
+        exclude_chars = kwargs.pop("exclude_chars", False)
+        taxon_namespace = taxonmodel.process_kwargs_dict_for_taxon_namespace(kwargs, None)
+        label = kwargs.pop("label", None)
+        dataset = DataSet(label=label)
+        if taxon_namespace is not None:
+            dataset.attached_taxon_namespace = taxon_namespace
+        reader = dataio.get_reader(schema, **kwargs)
+        reader.read_dataset(
+                stream=stream,
+                dataset=dataset,
+                taxon_namespace=taxon_namespace,
+                exclude_trees=exclude_trees,
+                exclude_chars=exclude_chars,
+                state_alphabet_factory=charstatemodel.StateAlphabet,
+                )
+        return dataset
+    _parse_and_create_from_stream = classmethod(_parse_and_create_from_stream)
+
+    @classmethod
+    def get(cls, **kwargs):
+        """
+        Instantiate and return a *new* |TreeList| object from a data source.
+
+        **Mandatory Source-Specification Keyword Argument (Exactly One Required):**
+
+            - **file** (*file*) -- File or file-like object of data opened for reading.
+            - **path** (*str*) -- Path to file of data.
+            - **url** (*str*) -- URL of data.
+            - **data** (*str*) -- Data given directly.
+
+        **Mandatory Schema-Specification Keyword Argument:**
+
+            - **schema** (*str*) -- Identifier of format of data given by the
+              "``file``", "``path``", "``data``", or "``url``" argument
+              specified above: ":doc:`fasta </schemas/fasta>`", ":doc:`newick
+              </schemas/newick>`", ":doc:`nexus </schemas/nexus>`", or
+              ":doc:`nexml </schemas/nexml>`", ":doc:`phylip
+              </schemas/phylip>`", etc. See "|Schemas|" for more details.
+
+        **Optional General Keyword Arguments:**
+
+            - **exclude_trees** (*bool*) -- If ``True``, then all tree data in the data
+              source will be skipped.
+            - **exclude_chars** (*bool*) -- If ``True``, then all character
+              data in the data source will be skipped.
+            - **taxon_namespace** (|TaxonNamespace|) -- The |TaxonNamespace|
+              instance to use to :doc:`manage the taxon names </primer/taxa>`.
+              If not specified, a new one will be created.
+            - **ignore_unrecognized_keyword_arguments** (*bool*) -- If `True`,
+              then unsupported or unrecognized keyword arguments will not
+              result in an error. Default is `False`: unsupported keyword
+              arguments will result in an error.
+
+        **Optional Schema-Specific Keyword Arguments:**
+
+            These provide control over how the data is interpreted and
+            processed, and supported argument names and values depend on
+            the schema as specified by the value passed as the "``schema``"
+            argument. See "|Schemas|" for more details.
+
+        **Examples:**
+
+        ::
+
+            dataset1 = dendropy.DataSet.get(
+                    path="pythonidae.chars_and_trees.nex",
+                    schema="nexus")
+            dataset2 = dendropy.DataSet.get(
+                    url="http://purl.org/phylo/treebase/phylows/study/TB2:S1925?format=nexml",
+                    schema="nexml")
+
+        """
+        return cls._get_from(**kwargs)
+
+    ###########################################################################
+    ### Lifecycle and Identity
+
+    def __init__(self, *args, **kwargs):
+        """
+        The constructor can take one argument. This can either be another
+        |DataSet| instance or an iterable of |TaxonNamespace|,
+        |TreeList|, or |CharacterMatrix|-derived instances.
+
+        In the former case, the newly-constructed |DataSet| will be a
+        shallow-copy clone of the argument.
+
+        In the latter case, the newly-constructed |DataSet| will have
+        the elements of the iterable added to the respective collections
+        (``taxon_namespaces``, ``tree_lists``, or ``char_matrices``, as
+        appropriate). This is essentially like calling :meth:`DataSet.add()`
+        on each element separately.
+        """
+        if len(args) > 1:
+            # only allow 1 positional argument
+            raise error.TooManyArgumentsError(func_name=self.__class__.__name__, max_args=1, args=args)
+        if "stream" in kwargs or "schema" in kwargs:
+            raise TypeError("Constructing from an external stream is no longer supported: use the factory method 'DataSet.get_from_stream()'")
+        elif len(args) == 1 and isinstance(args[0], DataSet):
+            self._clone_from(args[0], kwargs)
+        else:
+            basemodel.DataObject.__init__(self, label=kwargs.pop("label", None))
+            self.taxon_namespaces = container.OrderedSet()
+            self.tree_lists = container.OrderedSet()
+            self.char_matrices = container.OrderedSet()
+            self.attached_taxon_namespace = None
+            self._process_taxon_namespace_directives(kwargs)
+            self.comments = []
+            if len(args) == 1 and not isinstance(args[0], DataSet):
+                for item in args[0]:
+                    self.add(item)
+        if kwargs:
+            raise TypeError("Unrecognized or unsupported arguments: {}".format(kwargs))
+
+    def __hash__(self):
+        return id(self)
+
+    def __eq__(self, other):
+        return self is other
+
+    def _clone_from(self, dataset, kwargs_dict):
+        raise NotImplementedError
+
+    def __copy__(self):
+        raise NotImplementedError
+
+    def taxon_namespace_scoped_copy(self, memo=None):
+        raise NotImplementedError
+
+    def __deepcopy__(self, memo=None):
+        raise NotImplementedError
+
+    ###########################################################################
+    ### Data I/O
+
+    def _parse_and_add_from_stream(self,
+            stream,
+            schema,
+            exclude_trees=False,
+            exclude_chars=False,
+            **kwargs):
+        # exclude_trees = kwargs.pop("exclude_trees", False)
+        # exclude_chars = kwargs.pop("exclude_chars", False)
+        taxon_namespace = taxonmodel.process_kwargs_dict_for_taxon_namespace(kwargs, None)
+        if (self.attached_taxon_namespace is not None
+                and taxon_namespace is not None
+                and self.attached_taxon_namespace is not taxon_namespace):
+            raise ValueError("DataSet has attached TaxonNamespace that is not the same as ``taxon_namespace``")
+        if self.attached_taxon_namespace is not None and taxon_namespace is None:
+            taxon_namespace = self.attached_taxon_namespace
+        label = kwargs.pop("label", None)
+        reader = dataio.get_reader(schema, **kwargs)
+        n_tns = len(self.taxon_namespaces)
+        n_tree_lists = len(self.tree_lists)
+        n_char_matrices = len(self.char_matrices)
+        reader.read_dataset(
+                stream=stream,
+                dataset=self,
+                taxon_namespace=taxon_namespace,
+                exclude_trees=exclude_trees,
+                exclude_chars=exclude_chars,
+                state_alphabet_factory=charstatemodel.StateAlphabet,
+                )
+        n_tns2 = len(self.taxon_namespaces)
+        n_tree_lists2 = len(self.tree_lists)
+        n_char_matrices2 = len(self.char_matrices)
+        return (n_tns2-n_tns,
+                n_tree_lists2-n_tree_lists,
+                n_char_matrices2-n_char_matrices)
+
+    def read(self, **kwargs):
+        """
+        Add data to ``self`` from data source.
+
+        **Mandatory Source-Specification Keyword Argument (Exactly One Required):**
+
+            - **file** (*file*) -- File or file-like object of data opened for reading.
+            - **path** (*str*) -- Path to file of data.
+            - **url** (*str*) -- URL of data.
+            - **data** (*str*) -- Data given directly.
+
+        **Mandatory Schema-Specification Keyword Argument:**
+
+            - **schema** (*str*) -- Identifier of format of data given by the
+              "``file``", "``path``", "``data``", or "``url``" argument
+              specified above: ":doc:`newick </schemas/newick>`", ":doc:`nexus
+              </schemas/nexus>`", or ":doc:`nexml </schemas/nexml>`". See
+              "|Schemas|" for more details.
+
+        **Optional General Keyword Arguments:**
+
+            - **exclude_trees** (*bool*) -- If ``True``, then all tree data in the data
+              source will be skipped.
+            - **exclude_chars** (*bool*) -- If ``True``, then all character
+              data in the data source will be skipped.
+            - **taxon_namespace** (|TaxonNamespace|) -- The |TaxonNamespace|
+              instance to use to :doc:`manage the taxon names </primer/taxa>`.
+              If not specified, a new one will be created unless the DataSet
+              object is in attached taxon namespace mode
+              (``self.attached_taxon_namespace`` is not ``None`` but assigned
+              to a specific |TaxonNamespace| instance).
+            - **ignore_unrecognized_keyword_arguments** (*bool*) -- If `True`,
+              then unsupported or unrecognized keyword arguments will not
+              result in an error. Default is `False`: unsupported keyword
+              arguments will result in an error.
+
+        **Optional Schema-Specific Keyword Arguments:**
+
+            These provide control over how the data is interpreted and
+            processed, and supported argument names and values depend on
+            the schema as specified by the value passed as the "``schema``"
+            argument. See "|Schemas|" for more details.
+
+        **Examples:**
+
+        ::
+
+            ds = dendropy.DataSet()
+            ds.read(
+                    path="pythonidae.chars_and_trees.nex",
+                    schema="nexus")
+            ds.read(
+                    url="http://purl.org/phylo/treebase/phylows/study/TB2:S1925?format=nexml",
+                    schema="nexml")
+
+        """
+        return basemodel.MultiReadable._read_from(self, **kwargs)
+
+    def _format_and_write_to_stream(self,
+            stream,
+            schema,
+            exclude_trees=False,
+            exclude_chars=False,
+            **kwargs):
+        """
+        Writes out ``self`` in ``schema`` format to a destination given by
+        file-like object ``stream``.
+
+        Parameters
+        ----------
+        stream : file or file-like object
+            Destination for data.
+        schema : string
+            Must be a recognized character file schema, such as "nexus",
+            "phylip", etc, for which a specialized tree list writer is
+            available. If this is not implemented for the schema specified, then
+            a UnsupportedSchemaError is raised.
+
+        \*\*kwargs : keyword arguments, optional
+            Keyword arguments will be passed directly to the writer for the
+            specified schema. See documentation for details on keyword
+            arguments supported by writers of various schemas.
+
+        """
+        writer = dataio.get_writer(schema, **kwargs)
+        writer.write_dataset(self, stream, exclude_trees, exclude_chars)
+
+    ###########################################################################
+    ### Domain Data Management
+
+    ### General ###
+
+    def add(self, data_object, **kwargs):
+        """
+        Generic add for TaxonNamespace, TreeList or CharacterMatrix objects.
+        """
+        if isinstance(data_object, taxonmodel.TaxonNamespace):
+            self.add_taxon_namespace(data_object)
+        elif isinstance(data_object, treecollectionmodel.TreeList):
+            self.add_tree_list(data_object)
+        elif isinstance(data_object, charmatrixmodel.CharacterMatrix):
+            self.add_char_matrix(data_object)
+        else:
+            raise error.InvalidArgumentValueError("Cannot add object of type {} to DataSet" .format(type(data_object)))
+
+    ### TaxonNamespace ###
+
+    def add_taxon_namespace(self, taxon_namespace):
+        """
+        Adds a taxonomic unit concept namespace represented by a
+        |TaxonNamespace| instance to this dataset if it is not already
+        there.
+
+        Parameters
+        ----------
+        taxon_namespace : |TaxonNamespace|
+            The |TaxonNamespace| object to be added.
+        """
+        self.taxon_namespaces.add(taxon_namespace)
+        return taxon_namespace
+
+    def new_taxon_namespace(self, *args, **kwargs):
+        """
+        Creates a new |TaxonNamespace| object, according to the arguments given
+        (passed to `TaxonNamespace()`), and adds it to this |DataSet|.
+        """
+        t = taxonmodel.TaxonNamespace(*args, **kwargs)
+        self.add_taxon_namespace(t)
+        return t
+
+    def attach_taxon_namespace(self, taxon_namespace=None):
+        """
+        Forces all read() calls of this DataSet to use the same |TaxonNamespace|. If
+        ``taxon_namespace`` If ``taxon_namespace`` is None, then a new |TaxonNamespace| will be
+        created, added to ``self.taxon_namespaces``, and that is the |TaxonNamespace| that will be
+        attached.
+        """
+        if taxon_namespace is None:
+            raise TypeError("Automatic creation of a new TaxonNamespace is no longer supported: ``taxon_namespace`` argument required to be passed a valid 'TaxonNamespace' instance. E.g.,:\n\n    taxon_namespace = dendropy.TaxonNamespace()\n    dataset.attach_taxon_namespace(taxon_namespace)\n")
+            taxon_namespace = self.new_taxon_namespace()
+        if type(taxon_namespace) == type(True):
+            raise ValueError("attach_taxon_namespace() no longer accepts a bool argument: a valid 'TaxonNamespace' instance is required")
+        if taxon_namespace not in self.taxon_namespaces:
+            self.add_taxon_namespace(taxon_namespace)
+        self.attached_taxon_namespace = taxon_namespace
+        return self.attached_taxon_namespace
+
+    def detach_taxon_namespace(self):
+        """
+        Relaxes constraint forcing all
+        """
+        t = self.attached_taxon_namespace
+        self.attached_taxon_namespace = None
+        return t
+
+    def _process_taxon_namespace_directives(self, kwargs_dict):
+        """
+        The following idioms are supported:
+
+            `taxon_namespace=tns`
+                Attach ``tns`` as the bound (single, unified) taxonomic namespace
+                reference for all objects.
+            `attached_taxon_namespace=tns`
+                Attach ``tns`` as the bound (single, unified) taxonomic namespace
+                reference for all objects.
+            `attach_taxon_namespace=True, attached_taxon_namespace=tns`
+                Attach ``tns`` as the bound (single, unified) taxonomic namespace
+                reference for all objects.
+            `attach_taxon_namespace=True`
+                Create a *new* |TaxonNamespace| and set it as the bound
+                (single, unified) taxonomic namespace reference for all
+                objects.
+        """
+        attach_taxon_namespace, taxon_namespace = taxonmodel.process_attached_taxon_namespace_directives(kwargs_dict)
+        if attach_taxon_namespace or (taxon_namespace is not None):
+            self.attach_taxon_namespace(taxon_namespace)
+        return attach_taxon_namespace, taxon_namespace
+
+    def unify_taxon_namespaces(self,
+            taxon_namespace=None,
+            case_sensitive_label_mapping=True,
+            attach_taxon_namespace=True):
+        """
+        Reindices taxa across all subcomponents, mapping to single taxon set.
+        """
+        if len(self.taxon_namespaces) or len(self.tree_lists) or len(self.char_matrices):
+            self.taxon_namespaces.clear()
+            if taxon_namespace is None:
+                taxon_namespace = self.new_taxon_namespace()
+            taxon_mapping_memo = {}
+            for tree_list in self.tree_lists:
+                tree_list.migrate_taxon_namespace(
+                        taxon_namespace=taxon_namespace,
+                        unify_taxa_by_label=True,
+                        # case_sensitive_label_mapping=case_sensitive_label_mapping,
+                        taxon_mapping_memo=taxon_mapping_memo)
+            for char_matrix in self.char_matrices:
+                char_matrix.migrate_taxon_namespace(
+                        taxon_namespace=taxon_namespace,
+                        unify_taxa_by_label=True,
+                        # case_sensitive_label_mapping=case_sensitive_label_mapping,
+                        taxon_mapping_memo=taxon_mapping_memo)
+        if attach_taxon_namespace:
+            self.attach_taxon_namespace(taxon_namespace)
+
+    def unify_taxa(self, taxon_set=None, bind=None):
+        deprecate.dendropy_deprecation_warning(
+                message="Deprecated since DendroPy 4: '{class_name}.unify_taxa()' will no longer be supported in future releases; use '{class_name}.unify_taxon_namespaces()' instead".format(class_name=self.__class__.__name__))
+        self.unify_taxon_namespaces(taxon_namespace=taxon_set,
+                attach_taxon_namespace=bind)
+
+    ### **Legacy** ###
+
+    def _get_taxon_sets(self):
+        self.taxon_sets_deprecation_warning()
+        return self.taxon_namespaces
+    def _set_taxon_sets(self, v):
+        self.taxon_sets_deprecation_warning()
+        self.taxon_namespaces = v
+    def _del_taxon_sets(self):
+        self.taxon_sets_deprecation_warning()
+    taxon_sets = property(_get_taxon_sets, _set_taxon_sets, _del_taxon_sets)
+
+    def taxon_sets_deprecation_warning(self, stacklevel=4):
+        deprecate.dendropy_deprecation_warning(
+                message="Deprecated since DendroPy 4: 'taxon_sets' will no longer be supported in future releases; use 'taxon_namespaces' instead",
+                stacklevel=stacklevel)
+
+    def _get_attached_taxon_set(self):
+        self.attached_taxon_set_deprecation_warning()
+        return self.attached_taxon_namespace
+    def _set_attached_taxon_set(self, v):
+        self.attached_taxon_set_deprecation_warning()
+        self.attached_taxon_namespace = v
+    def _del_attached_taxon_set(self):
+        self.attached_taxon_set_deprecation_warning()
+    attached_taxon_set = property(_get_attached_taxon_set, _set_attached_taxon_set, _del_attached_taxon_set)
+
+    def attached_taxon_set_deprecation_warning(self, stacklevel=4):
+        deprecate.dendropy_deprecation_warning(
+                message="Deprecated since DendroPy 4: 'attached_taxon_set' will no longer be supported in future releases; use 'attached_taxon_namespace' instead",
+                stacklevel=3)
+
+    def add_taxon_set(self, taxon_set):
+        """
+        DEPRECATED: Use `add_taxon_namespace()` instead.
+        """
+        deprecate.dendropy_deprecation_warning(
+                message="Deprecated since DendroPy 4: 'add_taxon_set' will no longer be supported in future releases; use 'add_taxon_namespace' instead",
+                stacklevel=3)
+        return self.add_taxon_namespace(taxon_namespace=taxon_set)
+
+    def new_taxon_set(self, *args, **kwargs):
+        """
+        DEPRECATED: Use `new_taxon_namespace()` instead.
+        """
+        deprecate.dendropy_deprecation_warning(
+                message="Deprecated since DendroPy 4: 'new_taxon_set' will no longer be supported in future releases; use 'new_taxon_namespace' instead",
+                stacklevel=3)
+        return self.new_taxon_namespace(*args, **kwargs)
+
+    def attach_taxon_set(self, taxon_set=None):
+        """
+        DEPRECATED: Use `attach_taxon_namespace()` instead.
+        """
+        deprecate.dendropy_deprecation_warning(
+                message="Deprecated since DendroPy 4: 'attach_taxon_set' will no longer be supported in future releases; use 'attach_taxon_namespace' instead",
+                stacklevel=3)
+        return self.attach_taxon_namespace(taxon_namespace=taxon_set)
+
+    def detach_taxon_set(self):
+        """
+        DEPRECATED: Use `detach_taxon_namespace()` instead.
+        """
+        deprecate.dendropy_deprecation_warning(
+                message="Deprecated since DendroPy 4: 'detach_taxon_set' will no longer be supported in future releases; use 'detach_taxon_namespace' instead",
+                stacklevel=3)
+        self.detach_taxon_namespace()
+
+    ### TreeList ###
+
+    def add_tree_list(self, tree_list):
+        """
+        Adds a |TreeList| instance to this dataset if it is not already
+        there.
+
+        Parameters
+        ----------
+        tree_list : |TreeList|
+            The |TreeList| object to be added.
+        """
+        if tree_list.taxon_namespace not in self.taxon_namespaces:
+            self.taxon_namespaces.add(tree_list.taxon_namespace)
+        self.tree_lists.add(tree_list)
+        return tree_list
+
+    def new_tree_list(self, *args, **kwargs):
+        """
+        Creates a new |TreeList| instance, adds it to this DataSet.
+
+        Parameters
+        ----------
+        \*args : positional arguments
+            Passed directly to |TreeList| constructor.
+        \*\*kwargs : keyword arguments, optional
+            Passed directly to |TreeList| constructor.
+
+        Returns
+        -------
+        t : |TreeList|
+            The new |TreeList| instance created.
+        """
+        if self.attached_taxon_namespace is not None:
+            if "taxon_namespace" in kwargs and kwargs["taxon_namespace"] is not self.attached_taxon_namespace:
+                raise TypeError("DataSet object is attached to TaxonNamespace {}, but 'taxon_namespace' argument specifies different TaxonNamespace {}" .format(
+                    repr(self.attached_taxon_namespace), repr(kwargs["taxon_namespace"])))
+            else:
+                kwargs["taxon_namespace"] = self.attached_taxon_namespace
+        tree_list = treecollectionmodel.TreeList(*args, **kwargs)
+        return self.add_tree_list(tree_list)
+
+    def get_tree_list(self, label):
+        """
+        Returns a TreeList object specified by label.
+        """
+        for t in self.tree_lists:
+            if t.label == label:
+                return t
+        return None
+
+    ### CharacterMatrix ###
+
+    def add_char_matrix(self, char_matrix):
+        """
+        Adds a |CharacterMatrix| or |CharacterMatrix|-derived
+        instance to this dataset if it is not already there.
+
+        Parameters
+        ----------
+        char_matrix : |CharacterMatrix|
+            The |CharacterMatrix| object to be added.
+        """
+        if char_matrix.taxon_namespace not in self.taxon_namespaces:
+            self.taxon_namespaces.add(char_matrix.taxon_namespace)
+        self.char_matrices.add(char_matrix)
+        return char_matrix
+
+    def new_char_matrix(self, char_matrix_type, *args, **kwargs):
+        """
+        Creation and accession of new |CharacterMatrix| (of class
+        ``char_matrix_type``) into ``chars`` of self."
+        """
+        if self.attached_taxon_namespace is not None:
+            if "taxon_namespace" in kwargs and kwargs["taxon_namespace"] is not self.attached_taxon_namespace:
+                raise TypeError("DataSet object is attached to TaxonNamespace %s, but 'taxon_namespace' argument specifies different TaxonNamespace %s" % (
+                    repr(self.attached_taxon_namespace), repr(kwargs["taxon_namespace"])))
+            else:
+                kwargs["taxon_namespace"] = self.attached_taxon_namespace
+        if isinstance(char_matrix_type, str):
+            char_matrix = charmatrixmodel.new_char_matrix(
+                    data_type=char_matrix_type,
+                    *args,
+                    **kwargs)
+        else:
+            char_matrix = char_matrix_type(*args, **kwargs)
+        return self.add_char_matrix(char_matrix)
diff --git a/dendropy/datamodel/taxonmodel.py b/dendropy/datamodel/taxonmodel.py
new file mode 100644
index 0000000..e207b52
--- /dev/null
+++ b/dendropy/datamodel/taxonmodel.py
@@ -0,0 +1,2133 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Taxon management.
+
+Operational taxonomic unit concepts are essentially names for taxa in the "real
+world". Operational taxonomic unit concepts are organized into taxonomic
+namespaces. A taxonomic namespace is a self-contained and
+functionally-complete collection of mutually-distinct operational taxonomic
+unit concepts, and provide the semantic context in which operational taxonomic
+units from across various data sources of different formats and provenances can
+be related through correct interpretation of their taxon labels.
+
+    * Operational taxonomic units are modeled by a |Taxon| object.
+
+    * Taxonomic namespaces, in which operational taxonomic units are organized,
+      are modeled by a |TaxonNamespace| object.
+
+    * A |TaxonNamespace| manages a collection of |Taxon| objects, where each
+      object represents a distinct operational taxonomic unit concept within
+      the taxonomic namespace represented by that |TaxonNamespace| object.
+
+    * Each |Taxon| object can belong to one and only one |TaxonNamespace|:
+      |Taxon| objects are not shared across |TaxonNamespace| objects.
+
+    * Each |Taxon| object has an attribute, ``label``, whose (string) value
+      is the name of the operational taxon unit concept that it represents.
+
+    * Different |Taxon| objects represent different operational taxonomic
+      unit concepts, even if they have the same label value.
+
+    * All client objects (`TaxonNamespaceAssociated` objects) that reference
+      the same |TaxonNamespace| reference the same "universe" or domain of
+      operational taxonomic unit concepts.
+
+    * Operational taxonomic units from across different data sources are mapped
+      to distinct |Taxon| objects within a particular |TaxonNamespace| based on
+      matching the string values of labels of the |Taxon| object.
+
+    * A particular taxonomic unit concept in one data source will only be
+      correctly related to the same taxonomic unit concept (i.e, the same
+      |Taxon| object) in another data source only if they have both
+      been parsed with reference to the same taxonomic namespace (i.e., the
+      same |TaxonNamespace| has been used).
+
+    * A |TaxonNamespace| assigned an "accession index" to every |Taxon| object
+      added to it. This is a stable and unique number within the context of any
+      given |TaxonNamespace| object (though a |Taxon| object may have different
+      accession indexes in different |TaxonNamespace| objects if it
+      belongs to multiple namespaces). This number is will be used to
+      calculate the "split bitmask" hash of the trivial split or external edge
+      subtending the node to which this |Taxon| object is assigned on a tree.
+      The concept of a "split bitmask" hash is fundamental to DendroPy's tree
+      operations. The split bitmask is a hash that uniquely identifies every
+      split on a tree.  It is calculated by OR'ing the split bitmask of all the
+      child splits of the given split. Terminal edges, of course, do not have
+      child edges, and their split bitmask is given by the accession index of
+      the |Taxon| object at their head or target nodes.
+"""
+
+
+import warnings
+import collections
+import copy
+try:
+    from StringIO import StringIO # Python 2 legacy support: StringIO in this module is the one needed (not io)
+except ImportError:
+    from io import StringIO # Python 3
+from dendropy.datamodel import basemodel
+from dendropy.utility import bitprocessing
+from dendropy.utility import textprocessing
+from dendropy.utility import container
+from dendropy.utility import error
+from dendropy.utility import deprecate
+
+##############################################################################
+## Helper functions
+
+def taxon_set_deprecation_warning(stacklevel=6):
+    deprecate.dendropy_deprecation_warning(
+            message="Deprecated since DendroPy 4: 'taxon_set' will no longer be supported in future releases; use 'taxon_namespace' instead",
+            stacklevel=stacklevel)
+
+def process_kwargs_dict_for_taxon_namespace(kwargs_dict, default=None):
+    if "taxon_set" in kwargs_dict:
+        if "taxon_namespace" in kwargs_dict:
+            raise TypeError("Cannot specify both 'taxon_namespace' and 'taxon_set' (legacy support) simultaneously")
+        else:
+            taxon_set_deprecation_warning()
+            return kwargs_dict.pop("taxon_set", default)
+    else:
+        return kwargs_dict.pop("taxon_namespace", default)
+
+def process_attached_taxon_namespace_directives(kwargs_dict):
+    """
+    The following idioms are supported:
+
+        `taxon_namespace=tns`
+            Attach ``tns`` as the bound (single, unified) taxonomic namespace
+            reference for all objects.
+        `attached_taxon_namespace=tns`
+            Attach ``tns`` as the bound (single, unified) taxonomic namespace
+            reference for all objects.
+        `attach_taxon_namespace=True, attached_taxon_namespace=tns`
+            Attach ``tns`` as the bound (single, unified) taxonomic namespace
+            reference for all objects.
+        `attach_taxon_namespace=True`
+            Create a *new* |TaxonNamespace| and set it as the bound
+            (single, unified) taxonomic namespace reference for all
+            objects.
+    """
+    deprecated_kw = [
+            "taxon_namespace",
+            "attach_taxon_namespace",
+            "attached_taxon_namespace",
+            "taxon_set",
+            "attach_taxon_set",
+            "attached_taxon_set",
+            ]
+    for kw in deprecated_kw:
+        if kw in kwargs_dict:
+            raise TypeError("'{}' is no longer supported as a keyword argument. Use the instance method 'attach_taxon_namespace()' of the data object instead to bind the object to a single TaxonNamespace".format(kw))
+    taxon_namespace = None
+    attach_taxon_namespace = False
+    if ( ("taxon_set" in kwargs_dict or "taxon_namespace" in kwargs_dict)
+            and ("attached_taxon_set" in kwargs_dict or "attached_taxon_namespace" in kwargs_dict)
+            ):
+        raise TypeError("Cannot specify both 'taxon_namespace'/'taxon_set' and 'attached_taxon_namespace'/'attached_taxon_set' together")
+    if "taxon_set" in kwargs_dict:
+        if "taxon_namespace" in kwargs_dict:
+            raise TypeError("Both 'taxon_namespace' and 'taxon_set' cannot be specified simultaneously: use 'taxon_namespace' ('taxon_set' is only supported for legacy reasons)")
+        kwargs_dict["taxon_namespace"] = kwargs_dict["taxon_set"]
+        del kwargs_dict["taxon_set"]
+    if "attached_taxon_set" in kwargs_dict:
+        if "attached_taxon_namespace" in kwargs_dict:
+            raise TypeError("Both 'attached_taxon_namespace' and 'attached_taxon_set' cannot be specified simultaneously: use 'attached_taxon_namespace' ('attached_taxon_set' is only supported for legacy reasons)")
+        kwargs_dict["attached_taxon_namespace"] = kwargs_dict["attached_taxon_set"]
+        del kwargs_dict["attached_taxon_set"]
+    if "taxon_namespace" in kwargs_dict:
+        taxon_namespace = kwargs_dict.pop("taxon_namespace", None)
+        attach_taxon_namespace = True
+    elif "attached_taxon_namespace" in kwargs_dict:
+        taxon_namespace = kwargs_dict["attached_taxon_namespace"]
+        if not isinstance(taxon_namespace, TaxonNamespace):
+            raise TypeError("'attached_taxon_namespace' argument must be an instance of TaxonNamespace")
+        attach_taxon_namespace = True
+    else:
+        taxon_namespace = None
+        attach_taxon_namespace = kwargs_dict.get("attach_taxon_namespace", False)
+    kwargs_dict.pop("taxon_namespace", None)
+    kwargs_dict.pop("attach_taxon_namespace", None)
+    kwargs_dict.pop("attached_taxon_namespace", None)
+    return (attach_taxon_namespace, taxon_namespace)
+
+##############################################################################
+## TaxonNamespaceAssociated
+
+class TaxonNamespaceAssociated(object):
+    """
+    Provides infrastructure for the maintenance of references to taxa.
+    """
+
+    # def initialize_taxon_namespace_from_kwargs_dict(self, kwargs_dict):
+    #     tns = process_kwargs_dict_for_taxon_namespace(kwargs_dict)
+    #     if tns is None:
+    #         self.taxon_namespace = TaxonNamespace()
+    #     else:
+    #         self.taxon_namespace = tns
+    #     return self.taxon_namespace
+
+    def __init__(self, taxon_namespace=None):
+        if taxon_namespace is None:
+            self._taxon_namespace = TaxonNamespace()
+        else:
+            self._taxon_namespace = taxon_namespace
+        self.automigrate_taxon_namespace_on_assignment = False
+
+    def _get_taxon_namespace(self):
+        return self._taxon_namespace
+    def _set_taxon_namespace(self, tns):
+        if self.automigrate_taxon_namespace_on_assignment:
+            if tns is not None and self._taxon_namespace is not tns:
+                self.migrate_taxon_namespace(tns)
+            elif tns is None:
+                self._taxon_namespace = None
+        else:
+            self._taxon_namespace = tns
+    def _del_taxon_namespace(self):
+        raise TypeError("Cannot delete 'taxon_namespace' attribute")
+    taxon_namespace = property(_get_taxon_namespace, _set_taxon_namespace, _del_taxon_namespace)
+
+    def _get_taxon_set(self):
+        # raise NotImplementedError("'taxon_set' is no longer supported: use 'taxon_namespace' instead")
+        taxon_set_deprecation_warning()
+        return self.taxon_namespace
+    def _set_taxon_set(self, v):
+        # raise NotImplementedError("'taxon_set' is no longer supported: use 'taxon_namespace' instead")
+        taxon_set_deprecation_warning()
+        self.taxon_namespace = v
+    def _del_taxon_set(self):
+        # raise NotImplementedError("'taxon_set' is no longer supported: use 'taxon_namespace' instead")
+        taxon_set_deprecation_warning()
+    taxon_set = property(_get_taxon_set, _set_taxon_set, _del_taxon_set)
+
+    def migrate_taxon_namespace(self,
+            taxon_namespace,
+            unify_taxa_by_label=True,
+            taxon_mapping_memo=None):
+        """
+        Move this object and all members to a new operational taxonomic unit
+        concept namespace scope.
+
+        Current :attr:`self.taxon_namespace` value will be replaced with value
+        given in ``taxon_namespace`` if this is not `None`, or a new
+        |TaxonNamespace| object. Following this,
+        ``reconstruct_taxon_namespace()`` will be called: each distinct
+        |Taxon| object associated with ``self`` or members of ``self`` that
+        is not alread in ``taxon_namespace`` will be replaced with a new
+        |Taxon| object that will be created with the same label and
+        added to :attr:`self.taxon_namespace`.  Calling this method results in
+        the object (and all its member objects) being associated with a new,
+        independent taxon namespace.
+
+        Label mapping case sensitivity follows the
+        ``self.taxon_namespace.is_case_sensitive`` setting. If
+        `False` and ``unify_taxa_by_label`` is also `True`, then the
+        establishment of correspondence between |Taxon| objects in the
+        old and new namespaces with be based on case-insensitive matching of
+        labels. E.g., if there are four |Taxon| objects with labels
+        'Foo', 'Foo', 'FOO', and 'FoO' in the old namespace, then all objects
+        that reference these will reference a single new |Taxon| object
+        in the new namespace (with a label some existing casing variant of
+        'foo'). If `True`: if ``unify_taxa_by_label`` is `True`,
+        |Taxon| objects with labels identical except in case will be
+        considered distinct.
+
+        Parameters
+        ----------
+        taxon_namespace : |TaxonNamespace|
+            The |TaxonNamespace| into the scope of which this object
+            will be moved.
+
+        unify_taxa_by_label : boolean, optional
+            If `True`, then references to distinct |Taxon| objects with
+            identical labels in the current namespace will be replaced with a
+            reference to a single |Taxon| object in the new namespace.
+            If `False`: references to distinct |Taxon| objects will
+            remain distinct, even if the labels are the same.
+
+        taxon_mapping_memo : dictionary
+            Similar to ``memo`` of deepcopy, this is a dictionary that maps
+            |Taxon| objects in the old namespace to corresponding
+            |Taxon| objects in the new namespace. Mostly for interal
+            use when migrating complex data to a new namespace. Note that
+            any mappings here take precedence over all other options: if a
+            |Taxon| object in the old namespace is found in this
+            dictionary, the counterpart in the new namespace will be whatever
+            value is mapped, regardless of, e.g. label values.
+
+        Examples
+        --------
+        Use this method to move an object from one taxon namespace to
+        another.
+
+        For example, to get a copy of an object associated with another taxon
+        namespace and associate it with a different namespace::
+
+            # Get handle to the new TaxonNamespace
+            other_taxon_namespace = some_other_data.taxon_namespace
+
+            # Get a taxon-namespace scoped copy of a tree
+            # in another namespace
+            t2 = Tree(t1)
+
+            # Replace taxon namespace of copy
+            t2.migrate_taxon_namespace(other_taxon_namespace)
+
+        You can also use this method to get a copy of a structure and then
+        move it to a new namespace:
+
+            t2 = Tree(t1)
+            t2.migrate_taxon_namespace(TaxonNamespace())
+
+            # Note: the same effect can be achived by:
+            t3 = copy.deepcopy(t1)
+
+        See Also
+        --------
+        reconstruct_taxon_namespace
+
+        """
+        if taxon_namespace is None:
+            taxon_namespace = taxon.TaxonNamespace()
+        self._taxon_namespace = taxon_namespace
+        self.reconstruct_taxon_namespace(
+                unify_taxa_by_label=unify_taxa_by_label,
+                taxon_mapping_memo=taxon_mapping_memo)
+
+    def reconstruct_taxon_namespace(self,
+            unify_taxa_by_label=True,
+            taxon_mapping_memo=None):
+        """
+        Repopulates the current taxon namespace with new taxon objects,
+        preserving labels. Each distinct |Taxon| object associated with
+        ``self`` or members of ``self`` that is not already in
+        ``self.taxon_namespace`` will be replaced with a new |Taxon|
+        object that will be created with the same label and added to
+        :attr:`self.taxon_namespace`.
+
+        Label mapping case sensitivity follows the
+        ``self.taxon_namespace.is_case_sensitive`` setting. If
+        `False` and ``unify_taxa_by_label`` is also `True`, then the
+        establishment of correspondence between |Taxon| objects in the
+        old and new namespaces with be based on case-insensitive matching of
+        labels. E.g., if there are four |Taxon| objects with labels
+        'Foo', 'Foo', 'FOO', and 'FoO' in the old namespace, then all objects
+        that reference these will reference a single new |Taxon| object
+        in the new namespace (with a label some existing casing variant of
+        'foo'). If `True`: if ``unify_taxa_by_label`` is `True`,
+        |Taxon| objects with labels identical except in case will be
+        considered distinct.
+
+        Note
+        ----
+        Existing |Taxon| objects in ``self.taxon_namespace`` are *not*
+        removed. This method should thus only be called *only* when
+        ``self.taxon_namespace`` has been changed. In fact, typical usage would
+        not involve calling this method directly, but rather through
+
+        Parameters
+        ----------
+        unify_taxa_by_label : boolean, optional
+            If `True`, then references to distinct |Taxon| objects with
+            identical labels in the current namespace will be replaced with a
+            reference to a single |Taxon| object in the new namespace.
+            If `False`: references to distinct |Taxon| objects will
+            remain distinct, even if the labels are the same.
+
+        taxon_mapping_memo : dictionary
+            Similar to ``memo`` of deepcopy, this is a dictionary that maps
+            |Taxon| objects in the old namespace to corresponding
+            |Taxon| objects in the new namespace. Mostly for interal
+            use when migrating complex data to a new namespace.
+        """
+        raise NotImplementedError()
+
+    def update_taxon_namespace(self):
+        """
+        All |Taxon| objects associated with ``self`` or members of ``self``
+        that are not in ``self.taxon_namespace`` will be added. Note that, unlike
+        ``reconstruct_taxon_namespace``, no new |Taxon| objects
+        will be created.
+        """
+        raise NotImplementedError()
+
+    def purge_taxon_namespace(self):
+        """
+        Remove all |Taxon| instances in ``self.taxon_namespace`` that are
+        not associated with ``self`` or any item in ``self``.
+        """
+        taxa = self.poll_taxa()
+        to_remove = [t for t in self.taxon_namespace if t not in taxa]
+        for t in to_remove:
+            self.taxon_namespace.remove_taxon(t)
+
+    def poll_taxa(self, taxa=None):
+        """
+        Returns a set populated with all of |Taxon| instances associated
+        with ``self``.
+
+        Parameters
+        ----------
+        taxa : set()
+            Set to populate. If not specified, a new one will be created.
+
+        Returns
+        -------
+        taxa : set[|Taxon|]
+            Set of taxa associated with ``self``.
+        """
+        raise NotImplementedError()
+
+    def reindex_taxa(self, taxon_namespace=None, clear=False):
+        """
+        DEPRECATED: Use `migrate_taxon_namespace()` instead.
+        Rebuilds ``taxon_namespace`` from scratch, or assigns |Taxon| objects from
+        given |TaxonNamespace| object ``taxon_namespace`` based on label values.
+        """
+        deprecate.dendropy_deprecation_warning(
+                message="Deprecated since DendroPy 4: '{class_name}.reindex_taxa()' will no longer be supported in future releases; use '{class_name}.migrate_taxon_namespace()' instead".format(class_name=self.__class__.__name__),
+                stacklevel=3)
+        if taxon_namespace is not None:
+            self.taxon_namespace = taxon_namespace
+        if clear:
+            self.taxon_namespace.clear()
+        self.reindex_subcomponent_taxa()
+        return self.taxon_namespace
+
+    def reindex_subcomponent_taxa():
+        """
+        DEPRECATED: Use :meth:`reconstruct_taxon_namespace()` instead.
+        Derived classes should override this to ensure that their various
+        components, attributes and members all refer to the same |TaxonNamespace|
+        object as ``self.taxon_namespace``, and that ``self.taxon_namespace`` has all
+        the |Taxon| objects in the various members.
+        """
+        raise NotImplementedError()
+
+
+##############################################################################
+## TaxonNamespace
+
+class TaxonNamespace(
+        basemodel.Deserializable,
+        basemodel.MultiReadable,
+        basemodel.Serializable,
+        basemodel.DataObject,
+        basemodel.Annotable):
+
+    """
+    A collection of |Taxon| objects representing a self-contained and complete
+    domain of distinct operational taxonomic unit definitions.
+    Provides the common semantic context in which operational taxonomic units
+    referenced by various phylogenetic data objects (e.g., trees or alignments)
+    can be related.
+    """
+
+    ### Life-cycle
+
+    def __init__(self, *args, **kwargs):
+        """
+        Parameters
+        ----------
+
+        \*args : positional arguments, optional
+            Accepts a single iterable as an optional positional argument.  If a
+            |TaxonNamespace| object is passed as the positional argument, then
+            clones or deep-copies of its member |Taxon| objects will be added
+            to this one.  If any other iterable is passed as the positional
+            argument, then each string in the iterable will result in a new
+            |Taxon| object being constructed and added to the namespace with
+            the string as its label (name), while each Taxon object in the
+            iterable will be added to the namespace directly.
+
+        \*\*kwargs : keyword arguments
+            label : string
+                The label or name for this namespace.
+            is_mutable : boolean, optional (default = `True`)
+                If `True` (default), then |Taxon| objects can be added to this
+                namespace. If `False`, then adding |Taxon| objects will result
+                in an error.
+            is_case_sensitive : boolean, optional (default = `False`)
+                Whether or not taxon names are considered case sensitive or
+                insensitive.
+
+        Notes
+        -----
+        An empty |TaxonNamespace| can be created (with optional) label and |Taxon|
+        objects added later:
+
+        >>> tns = dendropy.TaxonNamespace(label="taxa")
+        >>> t1 = Taxon("a")
+        >>> tns.add_taxon(t1)
+        >>> t2 = Taxon("b")
+        >>> tns.add_taxon(t2)
+        >>> tns.add_taxon("c")
+        >>> tns
+        <TaxonNamespace 0x106509090 'taxa': [<Taxon 0x10661f050 'a'>, <Taxon 0x10651c590 'b'>, <Taxon 0x106642a90 'c'>]>
+
+        Alternatively, an iterable can be passed in as an initializer, and all
+        |Taxon| objects will be added directly while, for each string, a new
+        |Taxon| object will be created and added. So, the below are all equivalent
+        to the above:
+
+        >>> tns = dendropy.TaxonNamespace(["a", "b", "c"], label="taxa")
+
+        >>> taxa = [Taxon(n) for n in ["a", "b", "c"]]
+        >>> tns = dendropy.taxonnamespace(taxa, label="taxa")
+
+        >>> t1 = Taxon("a")
+        >>> t2 = Taxon("b")
+        >>> taxa = [t1, t2, "c"]
+        >>> tns = dendropy.TaxonNamespace(taxa, label="taxa")
+
+        If a |TaxonNamespace| object is passed as the
+        initializer argument, a *shallow* copy of the object is constructed:
+
+        >>> tns1 = dendropy.TaxonNamespace(["a", "b", "c"], label="taxa1")
+        >>> tns1
+        <TaxonNamespace 0x1097275d0 'taxa1': [<Taxon 0x109727610 'a'>, <Taxon 0x109727e10 'b'>, <Taxon 0x109727e90 'c'>]>
+        >>> tns2 = dendropy.TaxonNamespace(tns1, label="2")
+        >>> tns2
+        <TaxonNamespace 0x109727d50 'taxa1': [<Taxon 0x109727610 'a'>, <Taxon 0x109727e10 'b'>, <Taxon 0x109727e90 'c'>]>
+
+        Thus, while "``tns1``" and "``tns2``" are independent collections, and
+        addition/deletion of |Taxon| instances to one will not effect
+        the other, the label of a |Taxon| instance that is an element in
+        one will of course effect the same instance if it is in the other:
+
+        >>> print(tns1[0].label)
+        >>> a
+        >>> print(tns2[0].label)
+        >>> a
+        >>> tns1[0].label = "Z"
+        >>> print(tns1[0].label)
+        >>> Z
+        >>> print(tns2[0].label)
+        >>> Z
+
+        In contrast to actual data (i.e., the |Taxon| objects), alll
+        metadata associated with "``tns2``" (i.e., the |AnnotationSet| object,
+        in the :attr:`TaxonNamespace.annotations` attribute), will be a full,
+        independent deep-copy.
+
+        If what is needed is a true deep-copy of the data of a particular
+        |TaxonNamespace| object, including copies of the member
+        |Taxon| instances, then this can be achieved using
+        :func:`copy.deepcopy()`.
+
+        >>> import copy
+        >>> tns1 = dendropy.TaxonNamespace(["a", "b", "c"], label="taxa1")
+        >>> tns2 = copy.deepcopy(tns1)
+        """
+        kwargs_set_label = kwargs.pop("label", None)
+        self.comments = []
+        self.is_mutable = kwargs.pop('is_mutable', True)
+        self.is_case_sensitive = kwargs.pop('is_case_sensitive', False)
+        self._accession_index_taxon_map = {}
+        self._taxa = []
+        self._taxon_accession_index_map = {}
+        self._taxon_bitmask_map = {}
+        # self._split_bitmask_taxon_map = {}
+        self._current_accession_count = 0
+        if len(args) > 1:
+            raise TypeError("TaxonNamespace() takes at most 1 non-keyword argument ({} given)".format(len(args)))
+        elif len(args) == 1:
+            # special case: construct from argument
+            other = args[0]
+            for i in other:
+                if isinstance(i, Taxon):
+                    self.add_taxon(i)
+                else:
+                    self.new_taxon(label=i)
+            if isinstance(other, TaxonNamespace):
+                memo = { id(other): self, id(other._taxa): self._taxa }
+                for t1, t2 in zip(self._taxa, other._taxa):
+                    memo[id(t2)] = t1
+                for k in other.__dict__:
+                    if k == "_annotations" or k == "_taxa":
+                        continue
+                    self.__dict__[k] = copy.deepcopy(other.__dict__[k], memo)
+                self.deep_copy_annotations_from(other, memo=memo)
+                # self.copy_annotations_from(other, attribute_object_mapper=memo)
+            # override with label with value passed as argument
+            if kwargs_set_label is not None:
+                self.label = kwargs_set_label
+        else:
+            basemodel.DataObject.__init__(self, label=kwargs_set_label)
+        if kwargs:
+            raise TypeError("Unrecognized or unsupported arguments: {}".format(kwargs))
+
+    def __copy__(self):
+        return TaxonNamespace(self)
+
+    def taxon_namespace_scoped_copy(self, memo=None):
+        self.populate_memo_for_taxon_namespace_scoped_copy(memo=memo)
+        return self
+
+    def __deepcopy__(self, memo):
+        if memo is None:
+            memo = {}
+        o = self.__class__.__new__(self.__class__)
+        memo[id(self)] = o
+        o._taxa = []
+        memo[id(self._taxa)] = o._taxa
+        for t in self._taxa:
+            o._taxa.append(copy.deepcopy(t, memo))
+        for k in self.__dict__:
+            if k == "_annotations" or k == "_taxa":
+                continue
+            o.__dict__[k] = copy.deepcopy(self.__dict__[k], memo)
+        o.deep_copy_annotations_from(self, memo=memo)
+        # o.copy_annotations_from(self, attribute_object_mapper=memo)
+        return o
+
+    def populate_memo_for_taxon_namespace_scoped_copy(self, memo):
+        if memo is not None:
+            memo[id(self)] = self
+            for taxon in self._taxa:
+                memo[id(taxon)] = taxon
+        return memo
+
+    ### Identity and Comparison
+
+    def __str__(self):
+        return "[{}]".format(", ".join([str(i) for i in self._taxa]))
+
+    def __repr__(self):
+        return "<{} {} '{}': [{}]>".format(self.__class__.__name__, hex(id(self)), self.label, ", ".join(repr(i) for i in self._taxa))
+
+    def __hash__(self):
+        return id(self)
+
+    def __lt__(self, other):
+        return self._taxa < o._taxa
+
+    def __eq__(self, other):
+        # enforce non-equivalence of non-identical namespaces
+        return self is other
+        # if not isinstance(other, self.__class__):
+        #     return False
+        # return (self.label == other.label
+        #         and self._taxa == other._taxa
+        #         and basemodel.Annotable.__eq__(self, other))
+
+    ### Collection Iteration
+
+    def __iter__(self):
+        return iter(self._taxa)
+
+    def __reversed__(self):
+        return reversed(self._taxa)
+
+    ### Collection Data
+
+    def __len__(self):
+        """
+        Returns number of |Taxon| objects in this |TaxonNamespace|.
+        """
+        return len(self._taxa)
+
+    ### Collection Access and Management
+
+    def __getitem__(self, key):
+        """
+        Returns |Taxon| object with index or slice given by ``key``.
+        """
+        if isinstance(key, int) or isinstance(key, slice):
+            return self._taxa[key]
+        raise ValueError("'TaxonNamespace[]' now only accepts indexes or slices. To access Taxon objects by label, use 'TaxonNamespace.get_taxon()' or 'TaxonNamespace.findall()'")
+
+    def __setitem__(self, key, value):
+        raise NotImplementedError("Item assignment not supported")
+
+    def __delitem__(self, key):
+        self.remove_taxon(self[key])
+
+    def __contains__(self, taxon):
+        """
+        Returns `True` if Taxon object ``taxon`` is in self.
+        """
+        # look-up in dictionary for O(1) instead of O(n) in list
+        return taxon in self._taxon_accession_index_map
+
+    def _lookup_label(self,
+            label,
+            is_case_sensitive=None,
+            first_match_only=False,
+            error_if_not_found=False,
+            ):
+        """
+        Return |Taxon| object(s) with label matching ``label``.
+
+        Parameters
+        ----------
+        label : str
+            The label for which to search.
+        is_case_sensitive : `None` or bool
+            By default, label lookup will use the
+            ``is_case_sensitive`` attribute of ``self`` to decide
+            whether or not to respect case when trying to match labels to
+            operational taxonomic unit names represented by |Taxon|
+            instances. This can be over-ridden by specifying
+            ``is_case_sensitive`` to `True` (forcing case-sensitivity) or `False`
+            (forcing case-insensitivity).
+        first_match_only : bool
+            If `False`, then the entire namespace will be searched and *all*
+            |Taxon| objects with the matching labels will be returned
+            as a list. If `True` then the function will return after
+            processing the first |Taxon| object with a matching label
+            (i.e., the entire namespace is not searched). Setting this
+            argument to `True` will be more efficient and should be preferred
+            if there are no redundant or duplicate labels.
+        error_if_not_found : bool
+            If `True`, then a LookupError is raised if there are no matches.
+
+        Returns
+        -------
+        t : `None` or |Taxon| instance or list[|Taxon|]
+            If no |Taxon| instances have ``label`` attributes that match
+            the ``label`` argument, then `None`. Otherise, if
+            `first_match_only==True`, then a |Taxon| instance with
+            ``label`` attribute matching the value of the ``label`` argument; if
+            `first_match_only==False`, a list of one or more |Taxon|
+            instances with a ``label`` attribute matching the ``label`` argument.
+        """
+        taxa = []
+        if is_case_sensitive is True or (is_case_sensitive is None and self.is_case_sensitive):
+            for taxon in self._taxa:
+                if label == taxon.label:
+                    if first_match_only:
+                        return taxon
+                    else:
+                        taxa.append(taxon)
+        else:
+            label = str(label).lower()
+            for taxon in self._taxa:
+                if label == taxon.lower_cased_label:
+                    if first_match_only:
+                        return taxon
+                    else:
+                        taxa.append(taxon)
+        if len(taxa) == 0:
+            if error_if_not_found:
+                raise LookupError(label)
+            else:
+                return None
+        return taxa
+
+    ### Adding Taxa
+
+    def add_taxon(self, taxon):
+        """
+        Adds a new |Taxon| object to ``self``.
+
+        If ``taxon`` is not already in the collection of |Taxon| objects in this
+        namespace, and this namespace is mutable, it is added to the
+        collection. If it is already in the collection, then nothing happens.
+        If it is not already in the collection, but the namespace is not
+        mutable, then TypeError is raised.
+
+        Parameters
+        ----------
+        taxon : |Taxon|
+            The |Taxon| object to be accessioned or registered in this
+            collection.
+
+        Raises
+        ------
+        TypeError
+            If this namespace is immutable (i.e.
+            :attr:`TaxonNamespace.is_mutable` is `False`).
+
+        """
+        # NOTE
+        # Previously, this was:
+        #
+        #     if taxon in self._taxa:
+        #
+        # Changing the membership lookup to dictionaries resulted in 10x
+        # increase in speed!!!!
+        if taxon in self._taxon_accession_index_map:
+            return
+        if not self.is_mutable:
+            raise error.ImmutableTaxonNamespaceError("Taxon '{}' cannot be added to an immutable TaxonNamespace".format((taxon.label)))
+        self._taxa.append(taxon)
+        self._accession_index_taxon_map[self._current_accession_count] = taxon
+        self._taxon_accession_index_map[taxon] = self._current_accession_count
+        self._current_accession_count += 1
+
+    def append(self, taxon):
+        """
+        LEGACY. Use 'add_taxon()' instead.
+        """
+        return self.add_taxon(taxon)
+
+    def add_taxa(self, taxa):
+        """
+        Adds multiple |Taxon| objects to self.
+
+        Each |Taxon| object in ``taxa`` that is not already in the collection of
+        |Taxon| objects in this namespace is added to it. If any of the |Taxon|
+        objects are already in the collection, then nothing happens. If the
+        namespace is immutable, then TypeError is raised when trying
+        to add |Taxon| objects.
+
+        Parameters
+        ----------
+        taxa : collections.Iterable [|Taxon|]
+            A list of |Taxon| objects to be accessioned or registered in this
+            collection.
+
+        Raises
+        ------
+        TypeError
+            If this namespace is immutable (i.e. :attr:`TaxonNamespace.is_mutable` is
+            `False`).
+        """
+        for t in taxa:
+            self.add_taxon(t)
+
+    def new_taxon(self, label):
+        """
+        Creates, adds, and returns a new |Taxon| object with corresponding
+        label.
+
+        Parameters
+        ----------
+        label : string or string-like
+            The name or label of the new operational taxonomic unit concept.
+
+        Returns
+        -------
+        taxon: |Taxon|
+            The new |Taxon| object,
+
+        """
+        if not self.is_mutable:
+            raise error.ImmutableTaxonNamespaceError("Taxon '{}' cannot be added to an immutable TaxonNamespace".format(label))
+        taxon = Taxon(label=label)
+        self.add_taxon(taxon)
+        return taxon
+
+    def new_taxa(self, labels):
+        """
+        Creates and add a new |Taxon| with corresponding label for each label
+        in ``labels``. Returns list of |Taxon| objects created.
+
+        Parameters
+        ----------
+        labels : `collections.Iterable` [string]
+            The values of the ``label`` attributes of the new |Taxon| objects to
+            be created, added to this namespace collection, and returned.
+
+        Returns
+        -------
+        taxa : `collections.Iterable` [|Taxon|]
+            A list of |Taxon| objects created and added.
+
+        Raises
+        ------
+        TypeError
+            If this namespace is immutable (i.e.
+            :attr:`TaxonNamespace.is_mutable` is `False`).
+
+        """
+        if not self.is_mutable:
+            raise error.ImmutableTaxonNamespaceError("Taxon objects cannot be added to an immutable TaxonNamespace")
+        taxa = []
+        for label in labels:
+            taxa.append(self.new_taxon(label=label))
+        return taxa
+
+    ### Removing Taxa
+
+    def remove_taxon(self, taxon):
+        """
+        Removes specified |Taxon| object from the collection in this namespace.
+
+        Parameters
+        ----------
+        taxon : a |Taxon| object
+            The |Taxon| object to be removed.
+
+        Raises
+        ------
+        ValueError
+            If ``taxon`` is not in the collection of this namespace.
+        """
+        if taxon not in self._taxa:
+            raise ValueError(taxon)
+        self._taxa.remove(taxon)
+        # assert taxon not in self._taxa
+        while taxon in self._taxa:
+            self._taxa.remove(taxon)
+        idx = self._taxon_accession_index_map.pop(taxon, None)
+        if idx is not None:
+            self._accession_index_taxon_map.pop(idx, None)
+            self._taxon_accession_index_map.pop(taxon, None)
+        bm = self._taxon_bitmask_map.pop(taxon, None)
+        if bm is not None:
+            # self._split_bitmask_taxon_map.pop(bm, None)
+            self._taxon_accession_index_map.pop(taxon, None)
+
+    def remove(self, taxon):
+        deprecate.dendropy_deprecation_warning(
+                message="Deprecated since DendroPy 4: 'TaxonNamespace.remove()'; use 'TaxonNamespace.remove_taxon()' instead",
+                stacklevel=3)
+        return self.remove_taxon(taxon)
+
+    def remove_taxon_label(self,
+            label,
+            is_case_sensitive=None,
+            first_match_only=False,
+            ):
+        """
+        Removes *all* |Taxon| objects with label matching ``label`` from the
+        collection in this namespace.
+
+        Parameters
+        ----------
+        label : string or string-like
+            The value of the |Taxon| object label to remove.
+        is_case_sensitive : `None` or bool
+            By default, label lookup will use the
+            ``is_case_sensitive`` attribute of ``self`` to decide
+            whether or not to respect case when trying to match labels to
+            operational taxonomic unit names represented by |Taxon|
+            instances. This can be over-ridden by specifying
+            ``is_case_sensitive`` to `True` (forcing case-sensitivity) or `False`
+            (forcing case-insensitivity).
+        first_match_only : bool
+            If `False`, then the entire namespace will be searched and *all*
+            |Taxon| objects with the matching labels will be remove. If
+            `True` then only the first |Taxon| object with a matching
+            label will be removed (i.e., the entire namespace is not searched).
+            Setting this argument to `True` will be more efficient and should
+            be preferred if there are no redundant or duplicate labels.
+
+        Raises
+        ------
+        LookupError
+            If no |Taxon| objects are found with matching label(s).
+
+        See Also
+        --------
+        :meth:`TaxonNamespace.discard_taxon_labels`
+            Similar, but does not raise an error if no matching |Taxon|
+            objects are found.
+        """
+        taxa = self._lookup_label(label,
+                is_case_sensitive=is_case_sensitive,
+                first_match_only=first_match_only,
+                error_if_not_found=True,
+                )
+        for taxon in taxa:
+            self.remove_taxon(taxon)
+
+    def discard_taxon_label(self,
+            label,
+            is_case_sensitive=None,
+            first_match_only=False,
+            ):
+        """
+        Removes *all* |Taxon| objects with label matching ``label`` from the
+        collection in this namespace.
+
+        Parameters
+        ----------
+        label : string or string-like
+            The value of the |Taxon| object label to remove.
+        is_case_sensitive : `None` or bool
+            By default, label lookup will use the
+            ``is_case_sensitive`` attribute of ``self`` to decide
+            whether or not to respect case when trying to match labels to
+            operational taxonomic unit names represented by |Taxon|
+            instances. This can be over-ridden by specifying
+            ``is_case_sensitive`` to `True` (forcing case-sensitivity) or `False`
+            (forcing case-insensitivity).
+        first_match_only : bool
+            If `False`, then the entire namespace will be searched and *all*
+            |Taxon| objects with the matching labels will be remove. If
+            `True` then only the first |Taxon| object with a matching
+            label will be removed (i.e., the entire namespace is not searched).
+            Setting this argument to `True` will be more efficient and should
+            be preferred if there are no redundant or duplicate labels.
+
+        See Also
+        --------
+        :meth:`TaxonNamespace.remove_taxon_label` : Similar, but
+            raises an error if no matching |Taxon| objects are found.
+        """
+        taxa = self._lookup_label(label,
+                is_case_sensitive=is_case_sensitive,
+                first_match_only=first_match_only,
+                error_if_not_found=False,
+                )
+        if taxa is None:
+            return
+        for taxon in taxa:
+            self.remove_taxon(taxon)
+
+    def clear(self):
+        """
+        Removes all |Taxon| objects from this namespace.
+        """
+        # self._taxa.clear() # Python 2 ``list`` class does not have `clear()` method
+        del self._taxa[:]
+        self._accession_index_taxon_map.clear()
+        self._taxon_accession_index_map.clear()
+        self._taxon_bitmask_map.clear()
+        # self._split_bitmask_taxon_map.clear()
+
+    ### Look-up and Retrieval of Taxa
+
+    def findall(self, label, is_case_sensitive=None):
+        """
+        Return list of |Taxon| object(s) with label matching ``label``.
+
+        Parameters
+        ----------
+        label : string or string-like
+            The value which the ``label`` attribute of the |Taxon| object(s)
+            to be returned must match.
+        is_case_sensitive : `None` or bool
+            By default, label lookup will use the
+            ``is_case_sensitive`` attribute of ``self`` to decide
+            whether or not to respect case when trying to match labels to
+            operational taxonomic unit names represented by |Taxon|
+            instances. This can be over-ridden by specifying
+            ``is_case_sensitive`` to `True` (forcing case-sensitivity) or `False`
+            (forcing case-insensitivity).
+
+        Returns
+        -------
+        taxa : ``list`` [|Taxon|]
+            A list containing zero or more |Taxon| objects with labels
+            matching ``label``.
+
+        """
+        taxa = self._lookup_label(label=label,
+                is_case_sensitive=is_case_sensitive,
+                first_match_only=False,
+                error_if_not_found=False,
+                )
+        if taxa is None:
+            return []
+        else:
+            return taxa
+
+    def has_taxon_label(self, label, is_case_sensitive=None):
+        """
+        Checks for presence of a |Taxon| object with the given label.
+
+        Parameters
+        ----------
+        label : string or string-like
+            The value of the |Taxon| object label to match.
+        is_case_sensitive : `None` or bool
+            By default, label lookup will use the
+            ``is_case_sensitive`` attribute of ``self`` to decide
+            whether or not to respect case when trying to match labels to
+            operational taxonomic unit names represented by |Taxon|
+            instances. This can be over-ridden by specifying
+            ``is_case_sensitive`` to `True` (forcing case-sensitivity) or `False`
+            (forcing case-insensitivity).
+
+        Returns
+        -------
+        b : boolean
+            `True` if there is at least one |Taxon| object in this namespace
+            with a label matching the value of ``label``. Otherwise, `False`.
+        """
+        t = self._lookup_label(
+                label=label,
+                is_case_sensitive=is_case_sensitive,
+                first_match_only=True,
+                error_if_not_found=False,
+                )
+        return t is not None
+
+    def has_taxa_labels(self, labels, is_case_sensitive=None):
+        """
+        Checks for presence of |Taxon| objects with the given labels.
+
+        Parameters
+        ----------
+        labels : `collections.Iterable` [string]
+            The values of the |Taxon| object labels to match.
+        is_case_sensitive : `None` or bool
+            By default, label lookup will use the
+            ``is_case_sensitive`` attribute of ``self`` to decide
+            whether or not to respect case when trying to match labels to
+            operational taxonomic unit names represented by |Taxon|
+            instances. This can be over-ridden by specifying
+            ``is_case_sensitive`` to `True` (forcing case-sensitivity) or `False`
+            (forcing case-insensitivity).
+
+        Returns
+        -------
+        b : boolean
+            Returns `True` if, for every element in the iterable ``labels``,
+            there is at least one |Taxon| object that has a label attribute
+            that matches this. `False` otherwise.
+        """
+        for label in labels:
+            f = self._lookup_label(label=label,
+                    is_case_sensitive=is_case_sensitive,
+                    first_match_only=False,
+                    error_if_not_found=False,
+                    )
+            if f is None:
+                return False
+        return True
+
+    def get_taxon(self, label, is_case_sensitive=None):
+        """
+        Retrieves a |Taxon| object with the given label.
+
+        If multiple |Taxon| objects exist with labels that match
+        ``label``, then only the first one is returned.  If no |Taxon|
+        object is found in this namespace with the specified critieria,
+        `None` is returned.
+
+        Parameters
+        ----------
+        label : string or string-like
+            The value which the ``label`` attribute of the |Taxon| object
+            to be returned must match.
+        is_case_sensitive : `None` or bool
+            By default, label lookup will use the
+            ``is_case_sensitive`` attribute of ``self`` to decide
+            whether or not to respect case when trying to match labels to
+            operational taxonomic unit names represented by |Taxon|
+            instances. This can be over-ridden by specifying
+            ``is_case_sensitive`` to `True` (forcing case-sensitivity) or `False`
+            (forcing case-insensitivity).
+
+        Returns
+        -------
+        taxon : |Taxon| object or `None`
+            The first |Taxon| object in this namespace collection with a label
+            matching ``label``, or `None` if no such |Taxon| object exists.
+        """
+        return self._lookup_label(label=label,
+                is_case_sensitive=is_case_sensitive,
+                first_match_only=True,
+                error_if_not_found=False,
+                )
+
+    def get_taxa(self, labels, is_case_sensitive=None, first_match_only=False):
+        """
+        Retrieves list of |Taxon| objects with given labels.
+
+        Parameters
+        ----------
+        labels : `collections.Iterable` [string]
+            Any |Taxon| object in this namespace collection that has a label
+            attribute that matches any value in ``labels`` will be included in
+            the list returned.
+        is_case_sensitive : `None` or bool
+            By default, label lookup will use the
+            ``is_case_sensitive`` attribute of ``self`` to decide
+            whether or not to respect case when trying to match labels to
+            operational taxonomic unit names represented by |Taxon|
+            instances. This can be over-ridden by specifying
+            ``is_case_sensitive`` to `True` (forcing case-sensitivity) or `False`
+            (forcing case-insensitivity).
+        first_match_only : bool
+            If `False`, then for *each* label in ``labels``, the entire namespace
+            will be searched and *all* |Taxon| objects with the matches
+            will be added to the lest. If `True` then, for each label in
+            ``labels``, only the first |Taxon| object with a matching
+            label will be added to the list (i.e., the entire namespace is not
+            searched). Setting this argument to `True` will be more
+            efficient and should be preferred if there are no redundant or
+            duplicate labels.
+
+        Returns
+        -------
+        taxa : ``list`` [|Taxon|]
+            A list containing zero or more |Taxon| objects with labels
+            matching ``label``.
+        """
+        taxa = []
+        for label in labels:
+            tt = self._lookup_label(label=label,
+                    is_case_sensitive=is_case_sensitive,
+                    first_match_only=first_match_only,
+                    error_if_not_found=False,
+                    )
+            if tt is None:
+                continue
+            if first_match_only:
+                taxa.append(tt)
+            else:
+                for t in tt:
+                    if t not in taxa:
+                        taxa.append(t)
+        return taxa
+
+    def require_taxon(self, label, is_case_sensitive=None):
+        """
+        Retrieves a |Taxon| object with the given label, creating it if
+        necessary.
+
+        Retrieves a Taxon object with the label, ``label``.
+        If multiple |Taxon| objects exist with labels that match
+        ``label``, then only the first one is returned.  If no such
+        |Taxon| object exists in the current namespace and the
+        |TaxonNamespace| is NOT mutable, an exception is raised.  If no
+        such |Taxon| object exists in the current namespace and
+        |TaxonNamespace| is mutable, then a new |Taxon| is
+        created, added, and returned.
+
+        Parameters
+        ----------
+        label : string or string-like
+            The value which the ``label`` attribute of the |Taxon| object
+            to be returned must match.
+        is_case_sensitive : `None` or bool
+            By default, label lookup will use the
+            ``is_case_sensitive`` attribute of ``self`` to decide
+            whether or not to respect case when trying to match labels to
+            operational taxonomic unit names represented by |Taxon|
+            instances. This can be over-ridden by specifying
+            ``is_case_sensitive`` to `True` (forcing case-sensitivity) or `False`
+            (forcing case-insensitivity).
+
+        Returns
+        -------
+        taxon : |Taxon| object or `None`
+            A |Taxon| object in this namespace collection with a label
+            matching ``label``.
+
+        Raises
+        ------
+        TypeError
+            If no |Taxon| object is currently in the collection with a label
+            matching the input ``label`` and the ``is_mutable`` attribute of self
+            is `False`.
+        """
+        taxon = self._lookup_label(label=label,
+                is_case_sensitive=is_case_sensitive,
+                first_match_only=True,
+                error_if_not_found=False,
+                )
+        if taxon is not None:
+            return taxon
+        if not self.is_mutable:
+            raise error.ImmutableTaxonNamespaceError("Taxon '{}' not in TaxonNamespace, and cannot be created because TaxonNamespace is immutable".format(label))
+        taxon = self.new_taxon(label=label)
+        return taxon
+
+    ### Taxon Ordering
+
+    def sort(self, key=None, reverse=False):
+        """
+        Sorts |Taxon| objects in collection. If ``key`` is not given, defaults
+        to sorting by label (i.e., ``key = lambda x: x.label``).
+
+        Parameters
+        ----------
+        key : key function object, optional
+            Function that takes a |Taxon| object as an argument and
+            returns the value that determines its sort order. Defaults to
+            sorting by label.
+        reverse : boolean, optional
+            If `True`, sort will be in reverse order.
+        """
+        if key is None:
+            key = lambda x: x.label
+        self._taxa.sort(key=key, reverse=reverse)
+
+    def reverse(self):
+        """
+        Reverses order of |Taxon| objects in collection.
+        """
+        self._taxa.reverse()
+
+    ### Summarization of Collection
+
+    def labels(self):
+        """
+        Returns list of labels of all |Taxon| objects in ``self``.
+
+        Returns
+        -------
+        labels : ``list`` [string]
+            List of :attr:`Taxon.label` values of |Taxon| objects in
+            ``self``.
+        """
+        return [t.label for t in self._taxa]
+
+    def label_taxon_map(self, is_case_sensitive=None):
+        """
+        Returns dictionary with taxon labels as keys and corresponding |Taxon|
+        objects as values.
+
+        If the |TaxonNamespace| is currently case-insensitive, then the
+        dictionary returned will have case-insensitive keys, other the
+        dictionary will be case-sensitive. You can override this by explicitly
+        specifying ``is_case_sensitive`` to `False` or `True`.
+
+        No attempt is made to handle collisions.
+
+        Returns
+        -------
+        d : dictonary-like
+            Dictionary with :attr:`Taxon.label` values of |Taxon| objects in
+            ``self`` as keys and corresponding |Taxon| objects as values.
+        """
+        if is_case_sensitive is True or (is_case_sensitive is None and self.is_case_sensitive):
+            d = {}
+        else:
+            d = container.CaseInsensitiveDict()
+        for t in self._taxa:
+            d[t.label] = t
+        return d
+
+    ### Split Management
+
+    # def complement_bitmask(self, bitmask):
+    #     """
+    #     Returns complement of the given split or clade bitmask.
+
+    #     Parameters
+    #     ----------
+    #     bitmask : integer
+    #         Bitmask to be complemented.
+
+    #     Returns
+    #     -------
+    #     h : integer
+    #         Complement of ``bitmask``.
+    #     """
+    #     return (~bitmask) & self.all_taxa_bitmask()
+
+    # def normalize_bitmask(self, bitmask):
+    #     """
+    #     "Normalizes" split, by ensuring that the least-significant bit is
+    #     always 1 (used on unrooted trees to establish split identity
+    #     independent of rotation).
+
+    #     Parameters
+    #     ----------
+    #     bitmask : integer
+    #         Split bitmask hash to be normalized.
+
+    #     Returns
+    #     -------
+    #     h : integer
+    #         Normalized split bitmask.
+    #     """
+    #     return container.NormalizedBitmaskDict.normalize(bitmask, self.all_taxa_bitmask(), 1)
+
+    def all_taxa_bitmask(self):
+        """
+        Returns mask of all taxa.
+
+        Returns
+        -------
+        h : integer
+            Bitmask spanning all |Taxon| objects in self.
+        """
+        #return pow(2, len(self)) - 1
+        b = 1 << self._current_accession_count
+        return b - 1
+
+    def taxon_bitmask(self, taxon):
+        """
+        Returns bitmask value of split hash for split subtending node with
+        ``taxon``.
+
+        Parameters
+        ----------
+        taxon : |Taxon|
+            |Taxon| object for which to calculate split hash bitmask.
+
+        Returns
+        -------
+        h : integer
+            Split hash bitmask value for node associated with |Taxon| object ``taxon``.
+        """
+        # i = self._taxa.index(taxon)
+        # m = 1 << i
+        # return m
+        try:
+            return self._taxon_bitmask_map[taxon]
+        except KeyError:
+            i = self._taxon_accession_index_map[taxon]
+            # i = self._taxa.index(taxon)
+            m = 1 << i
+            self._taxon_bitmask_map[taxon] = m
+            # self._split_bitmask_taxon_map[m] = taxon
+            return m
+
+    def accession_index(self, taxon):
+        """
+        Returns the accession index of ``taxon``. Note that this may not be the
+        same as the list index of the taxon if taxa have been deleted from the
+        namespace.
+
+        Parameters
+        ----------
+        taxon : |Taxon|
+            |Taxon| object for which to return the accession index.
+
+        Returns
+        -------
+        h : integer
+            The accession index.
+        """
+        return self._taxon_accession_index_map[taxon]
+
+    def taxa_bitmask(self, **kwargs):
+        """
+        Retrieves the list of split hash bitmask values representing all taxa
+        specified by keyword-specified list of taxon objects (``taxa=``) or
+        labels (``labels=``).
+
+        Parameters
+        ----------
+        \*\*kwargs : keyword arguments
+            Requires one of:
+
+                taxa : `collections.Iterable` [|Taxon|]
+                    Iterable of |Taxon| objects.
+                labels : `collections.Iterable` [string]
+                    Iterable of |Taxon| label values.
+
+        Returns
+        -------
+        b : ``list`` [integer]
+            List of split hash bitmask values for specified |Taxon|
+            objects.
+        """
+        if "taxa" in kwargs:
+            taxa = kwargs["taxa"]
+        else:
+            taxa = self.get_taxa(**kwargs)
+        bitmask = 0
+        for taxon in taxa:
+            bitmask |= self.taxon_bitmask(taxon)
+        return bitmask
+
+    def taxa_bipartition(self, **kwargs):
+        """
+        Returns a bipartition that represents all taxa specified by
+        keyword-specified list of taxon objects (``taxa=``) or labels
+        (``labels=``).
+
+        Parameters
+        ----------
+        \*\*kwargs : keyword arguments
+            Requires one of:
+
+                taxa : `collections.Iterable` [|Taxon|]
+                    Iterable of |Taxon| objects.
+                labels : `collections.Iterable` [string]
+                    Iterable of |Taxon| label values.
+
+        Returns
+        -------
+        b : ``list`` [integer]
+            List of split hash bitmask values for specified |Taxon|
+            objects.
+        """
+        from dendropy.treemodel import Bipartition
+        return Bipartition(bitmask=bitmask, compile_bipartition=True)
+
+    def get_taxa_bitmask(self, **kwargs):
+        """
+        LEGACY. Use 'taxa_bitmask' instead.
+        """
+        return self.taxa_bitmask(**kwargs)
+
+    def bitmask_taxa_list(self, bitmask, index=0):
+        """
+        Returns list of |Taxon| objects represented by split
+        ``bitmask``.
+
+        Parameters
+        ----------
+        bitmask : integer
+            Split hash bitmask value.
+        index : integer, optional
+            Start from this |Taxon| object instead of the first
+            |Taxon| object in the collection.
+
+        Returns
+        -------
+        taxa : ``list`` [|Taxon|]
+            List of |Taxon| objects specified or spanned by
+            ``bitmask``.
+        """
+        taxa = []
+        while bitmask:
+            if bitmask & 1:
+                taxa.append(self._accession_index_taxon_map[index])
+            bitmask = bitmask >> 1
+            index += 1
+        return taxa
+
+    def bitmask_as_newick_string(self,
+            bitmask,
+            preserve_spaces=False,
+            quote_underscores=True):
+        """
+        Represents a split as a newick string.
+
+        Parameters
+        ----------
+        bitmask : integer
+            Split hash bitmask value.
+        preserve_spaces : boolean, optional
+            If `False` (default), then spaces in taxon labels will be replaced
+            by underscores. If `True`, then taxon labels with spaces will be
+            wrapped in quotes.
+        quote_underscores : boolean, optional
+            If `True` (default), then taxon labels with underscores will be
+            wrapped in quotes. If `False`, then the labels will not be wrapped
+            in quotes.
+
+        Returns
+        -------
+        s : string
+            NEWICK representation of split specified by ``bitmask``.
+        """
+        from dendropy.dataio import nexusprocessing
+        return nexusprocessing.bitmask_as_newick_string(
+                bitmask,
+                self,
+                preserve_spaces=preserve_spaces,
+                quote_underscores=quote_underscores)
+
+    def split_as_newick_string(self,
+            split,
+            preserve_spaces=False,
+            quote_underscores=True):
+        """
+        Represents a split as a newick string.
+
+        Parameters
+        ----------
+        bitmask : integer
+            Split hash bitmask value.
+        preserve_spaces : boolean, optional
+            If `False` (default), then spaces in taxon labels will be replaced
+            by underscores. If `True`, then taxon labels with spaces will be
+            wrapped in quotes.
+        quote_underscores : boolean, optional
+            If `True` (default), then taxon labels with underscores will be
+            wrapped in quotes. If `False`, then the labels will not be wrapped
+            in quotes.
+
+        Returns
+        -------
+        s : string
+            NEWICK representation of split specified by ``bitmask``.
+        """
+        return self.bitmask_as_newick_string(
+                bitmask=split,
+                preserve_spaces=preserve_spaces,
+                quote_underscores=quote_underscores)
+
+    def bitmask_as_bitstring(self, b):
+        return bitprocessing.int_as_bitstring(b, length=self._current_accession_count)
+
+    def split_as_string(self, b):
+        deprecate.dendropy_deprecation_warning(
+                message="Deprecated since DendroPy 4: 'TaxonNamespace.split_as_string()'; use 'TaxonNamespace.bitmask_as_bitstring()' instead",
+                stacklevel=3)
+        return self.bitmask_as_bitstring(b)
+
+    def description(self, depth=1, indent=0, itemize="", output=None, **kwargs):
+        """
+        Returns description of object, up to level ``depth``.
+        """
+        if depth is None or depth < 0:
+            return ""
+        output_strio = StringIO()
+        if self.label is None:
+            label = str(self.label)
+        output_strio.write('%s%sTaxonNamespace object at %s%s'
+                % (indent*' ',
+                   itemize,
+                   hex(id(self)),
+                   label))
+        if depth >= 1:
+            output_strio.write(': %d Taxa' % len(self))
+            if depth >= 2 and len(self) > 0:
+                for i, t in enumerate(self):
+                    output_strio.write('\n')
+                    t.description(depth=depth-1, indent=indent+4, itemize="[%d]" % (i), output=output_strio, **kwargs)
+        s = output_strio.getvalue()
+        if output is not None:
+            output.write(s)
+        return s
+
+    ### I/O
+
+    def _format_and_write_to_stream(self, stream, schema, **kwargs):
+        """
+        Writes out ``self`` in ``schema`` format to a destination given by
+        file-like object ``stream``.
+
+        Parameters
+        ----------
+        stream : file or file-like object
+            Destination for data.
+        schema : string
+            Must be a recognized and tree file schema, such as "nexus",
+            "newick", etc, for which a specialized tree list writer is
+            available. If this is not implemented for the schema specified, then
+            a UnsupportedSchemaError is raised.
+
+        \*\*kwargs : keyword arguments, optional
+            Keyword arguments will be passed directly to the writer for the
+            specified schema. See documentation for details on keyword
+            arguments supported by writers of various schemas.
+
+        """
+        from dendropy import dataio
+        writer = dataio.get_writer(schema, **kwargs)
+        writer._write(
+                stream=stream,
+                taxon_namespaces=[self],)
+
+##############################################################################
+## TaxonSet
+
+class TaxonSet(TaxonNamespace):
+    """
+    This class is present for (temporary!) legacy support of code written under
+    DendroPy 3.x.  It will be removed in future versions. All new code should
+    be written using |TaxonNamespace|. Old code needs to be updated to use
+    |TaxonNamespace|.
+    """
+
+    def __init__(self, *args, **kwargs):
+        deprecate.dendropy_deprecation_warning(
+                message="Deprecated since DendroPy 4: 'TaxonSet' will no longer be supported in future releases; use 'TaxonNamespace' instead",
+                stacklevel=3)
+        TaxonNamespace.__init__(self, *args, **kwargs)
+
+##############################################################################
+## Taxon
+
+class Taxon(
+        basemodel.DataObject,
+        basemodel.Annotable):
+    """
+    A taxon associated with a sequence or a node on a tree.
+    """
+
+    def __init__(self, label=None):
+        """
+        Parameters
+        ----------
+        label : string or |Taxon| object
+            Label or name of this operational taxonomic unit concept. If a
+            string, then the ``label`` attribute of ``self`` is set to this value.
+            If a |Taxon| object, then the ``label`` attribute of ``self`` is
+            set to the same value as the ``label`` attribute the other
+            |Taxon| object and all annotations/metadata are copied.
+        """
+        if isinstance(label, Taxon):
+            other_taxon = label
+            label = other_taxon.label
+            memo={id(other_taxon):self}
+            for k in other_taxon.__dict__:
+                if k != "_annotations":
+                    self.__dict__[k] = copy.deepcopy(other_taxon.__dict__[k], memo=memo)
+            self.deep_copy_annotations_from(other_taxon, memo=memo)
+            # self.copy_annotations_from(other_taxon, attribute_object_mapper=memo)
+        else:
+            basemodel.DataObject.__init__(self, label=label)
+            self._lower_cased_label = None
+        self.comments = []
+
+    def _get_label(self):
+        return self._label
+    def _set_label(self, v):
+        self._label = v
+        self._lower_cased_label = None
+    label = property(_get_label, _set_label)
+
+    def _get_lower_cased_label(self):
+        if self._label is None:
+            return None
+        if self._lower_cased_label is None:
+            self._lower_cased_label = str(self._label).lower()
+        return self._lower_cased_label
+    lower_cased_label = property(_get_lower_cased_label)
+
+    def __copy__(self):
+        raise TypeError("Cannot shallow-copy Taxon")
+        # return self
+
+    def taxon_namespace_scoped_copy(self, memo=None):
+        if memo is not None:
+            memo[id(self)] = self
+        return self
+
+    def __deepcopy__(self, memo=None):
+        if memo is None:
+            memo = {}
+        try:
+            o = memo[id(self)]
+        except KeyError:
+            # o = type(self).__new__(self.__class__)
+            o = self.__class__.__new__(self.__class__)
+            memo[id(self)] = o
+        for k in self.__dict__:
+            if k != "_annotations":
+                o.__dict__[k] = copy.deepcopy(self.__dict__[k], memo)
+        o.deep_copy_annotations_from(self, memo)
+        # o.copy_annotations_from(self, attribute_object_mapper=memo)
+        return o
+
+    def __hash__(self):
+        return id(self)
+
+    def __eq__(self, other):
+        return self is other
+
+    def __lt__(self, other):
+        return self.label < other.label
+
+    def __str__(self):
+        "String representation of self = taxon name."
+        return "'{}'".format(self._label)
+
+    def __repr__(self):
+        return "<{} {} '{}'>".format(self.__class__.__name__, hex(id(self)), self._label)
+
+    def description(self, depth=1, indent=0, itemize="", output=None, **kwargs):
+        """
+        Returns description of object, up to level ``depth``.
+        """
+        if depth is None or depth < 0:
+            return ""
+        output_strio = StringIO()
+        if self._label is None:
+            label = "<Unnamed Taxon>"
+        else:
+            label = "'{}'".format(self._label)
+        output_strio.write('{}{} Taxon object at {}: {}'.format(indent*' ', itemize, hex(id(self)), label))
+        s = output_strio.getvalue()
+        if output is not None:
+            output.write(s)
+        return s
+
+##############################################################################
+## TaxonNamespacePartition
+
+class TaxonNamespacePartition(TaxonNamespaceAssociated):
+    """
+    Manages a partition of a TaxonNamespace (i.e., a set of mutually-exclusive
+    and exhaustive subsets of a TaxonNamespace).
+    """
+
+    def __init__(self, taxon_namespace, **kwargs):
+        """
+        __init__ uses one of the following keyword arguments:
+
+            - ``membership_fn``
+                A function that takes a |Taxon| object as an argument and
+                returns a a population membership identifier or flag
+                (e.g., a string, an integer) .
+            - ``membership_attr_name``
+                Name of an attribute of |Taxon| objects that serves as an
+                identifier for subset membership.
+            - ``membership_dict``
+                A dictionary with |Taxon| objects as keys and population
+                membership identifier or flag as values (e.g., a string,
+                an integer).
+            - ``membership_lists``
+                A container of containers of |Taxon| objects, with every
+                |Taxon| object in ``taxon_namespace`` represented once and only
+                once in the sub-containers.
+
+        If none of these are specified, defaults to a partition consisting of
+        a single subset with all the objects in ``taxon_namespace``.
+        """
+        TaxonNamespaceAssociated.__init__(self,
+                taxon_namespace=taxon_namespace)
+        self.subset_map = {}
+        if taxon_namespace is not None:
+            if len(kwargs) > 0:
+                self.apply(**kwargs)
+            else:
+                ss = TaxonNamespace(self.taxon_namespace)
+                self.subset_map = { self.taxon_namespace.label : ss}
+
+    def subsets(self):
+        """
+        Return subsets of partition.
+        """
+        return set(self.subset_map.values())
+
+    def __len__(self):
+        """
+        Number of subsets.
+        """
+        return len(self.subset_map)
+
+    def __iter__(self):
+        """
+        Iterate over subsets.
+        """
+        for k, v in self.subset_map.items():
+            yield v
+
+    def __getitem__(self, label):
+        """
+        Get subset with specified label.
+        """
+        return self.subset_map[label]
+
+    def apply(self, **kwargs):
+        """
+        Builds the subsets of the linked TaxonNamespace resulting from the
+        partitioning scheme specified by one of the following keyword arguments:
+
+            ``membership_fn``
+                A function that takes a |Taxon| object as an argument and
+                returns a a population membership identifier or flag
+                (e.g., a string, an integer).
+
+            ``membership_attr_name``
+                Name of an attribute of |Taxon| objects that serves as an
+                identifier for subset membership.
+
+            ``membership_dict``
+                A dictionary with |Taxon| objects as keys and population
+                membership identifier or flag as values (e.g., a string,
+                an integer).
+
+            ``membership_lists``
+                A container of containers of |Taxon| objects, with every
+                |Taxon| object in ``taxon_namespace`` represented once and only
+                once in the sub-containers.
+        """
+        if "membership_fn" in kwargs:
+            self.apply_membership_fn(kwargs["membership_fn"])
+        elif  "membership_attr_name" in kwargs:
+            self.apply_membership_attr_name(kwargs["membership_attr_name"])
+        elif  "membership_dict" in kwargs:
+            self.apply_membership_dict(kwargs["membership_dict"])
+        elif "membership_lists" in kwargs:
+            self.apply_membership_lists(kwargs["membership_lists"])
+        else:
+            raise TypeError("Must specify partitioning scheme using one of: " \
+                + "'membership_fn', 'membership_dict', or 'membership_lists'")
+
+    def apply_membership_fn(self, mfunc):
+        """
+        Constructs subsets based on function ``mfunc``, which should take a
+        |Taxon| object as an argument and return a population membership
+        identifier or flag (e.g., a string, an integer).
+        """
+        self.subset_map = {}
+        for t in self.taxon_namespace:
+            subset_id = mfunc(t)
+            if subset_id not in self.subset_map:
+                self.subset_map[subset_id] = TaxonNamespace(label=subset_id)
+            self.subset_map[subset_id].add_taxon(t)
+        return self.subsets()
+
+    def apply_membership_attr_name(self, attr_name):
+        """
+        Constructs subsets based on attribute ``attr_name`` of each
+        |Taxon| object.
+        """
+        return self.apply_membership_fn(lambda x: getattr(x, attr_name))
+
+    def apply_membership_dict(self, mdict):
+        """
+        Constructs subsets based on dictionary ``mdict``, which should be
+        dictionary with |Taxon| objects as keys and population membership
+        identifier or flag as values (e.g., a string, an integer).
+        """
+        return self.apply_membership_fn(lambda x: mdict[x])
+
+    def apply_membership_lists(self, mlists, subset_labels=None):
+        """
+        Constructs subsets based on list ``mlists``, which should be an interable
+        of iterables of |Taxon| objects, with every |Taxon| object in
+        ``taxon_namespace`` represented once and only once in the sub-containers.
+        """
+        if subset_labels is not None:
+            if len(subset_labels) != len(mlists):
+                raise ValueError('Length of subset label list must equal to number of subsets')
+        else:
+            subset_labels = range(len(mlists))
+        self.subset_map = {}
+        for lidx, mlist in enumerate(mlists):
+            subset_id = subset_labels[lidx]
+            self.subset_map[subset_id] = TaxonNamespace(label=subset_id)
+            for i, t in enumerate(mlist):
+                self.subset_map[subset_id].add_taxon(t)
+        return self.subsets()
+
+##############################################################################
+## TaxonNamespaceMapping
+
+class TaxonNamespaceMapping(
+        basemodel.DataObject,
+        basemodel.Annotable):
+    """
+    A many-to-one mapping of |Taxon| objects (e.g., gene taxa to population/species taxa).
+    """
+
+    @staticmethod
+    def create_contained_taxon_mapping(containing_taxon_namespace,
+            num_contained,
+            contained_taxon_label_prefix=None,
+            contained_taxon_label_separator=' ',
+            contained_taxon_label_fn=None):
+        """
+        Creates and returns a TaxonNamespaceMapping object that maps multiple
+        "contained" Taxon objects (e.g., genes) to Taxon objects in
+        ``containing_taxon_namespace`` (e.g., populations or species).
+
+            ``containing_taxon_namespace``
+                A TaxonNamespace object that defines a Taxon for each population or
+                species.
+
+            ``num_contained``
+                The number of genes per population of species. The value of
+                this attribute can be a scalar integer, in which case each
+                species or population taxon will get the same fixed number
+                of genes. Or it can be a list, in which case the list has
+                to have as many elements as there are members in
+                ``containing_taxon_namespace``, and each element will specify the
+                number of genes that the corresponding species or population
+                Taxon will get.
+
+            ``contained_taxon_label_prefix``
+                If specified, then each gene Taxon label will begin with this.
+                Otherwise, each gene Taxon label will begin with the same label
+                as its corresponding species/population taxon label.
+
+            ``contained_taxon_label_separator``
+                String used to separate gene Taxon label prefix from its index.
+
+            ``contained_taxon_label_fn``
+                If specified, should be a function that takes two arguments: a
+                Taxon object from ``containing_taxon_namespace`` and an integer
+                specifying the contained gene index. It should return a string
+                which will be used as the label for the corresponding gene
+                taxon. If not None, this will bypass the
+                ``contained_taxon_label_prefix`` and
+                ``contained_taxon_label_separator`` arguments.
+        """
+        if isinstance(num_contained, int):
+            _num_contained = [num_contained] * len(containing_taxon_namespace)
+        else:
+            _num_contained = num_contained
+        contained_to_containing = {}
+        contained_taxa = TaxonNamespace()
+        for cidx, containing_taxon in enumerate(containing_taxon_namespace):
+            num_new = _num_contained[cidx]
+            for new_idx in range(num_new):
+
+                if contained_taxon_label_fn is not None:
+                    label = contained_taxon_label_fn(containing_taxon,
+                            new_idx)
+                else:
+                    label = "%s%s%d" % (containing_taxon.label,
+                            contained_taxon_label_separator,
+                            new_idx+1)
+                contained_taxon = Taxon(label=label)
+                contained_to_containing[contained_taxon] = containing_taxon
+                contained_taxa.append(contained_taxon)
+        contained_to_containing_map = TaxonNamespaceMapping(domain_taxon_namespace=contained_taxa,
+                range_taxon_namespace=containing_taxon_namespace,
+                mapping_dict=contained_to_containing)
+        return contained_to_containing_map
+
+    def __init__(self, **kwargs):
+        """
+        __init__ uses one of the following keyword arguments:
+
+            - ``mapping_fn``
+                A function that takes a |Taxon| object from the domain taxa
+                as an argument and returns the corresponding |Taxon| object
+                from the range taxa. If this argument is given, then a
+                |TaxonNamespace| or some other container of |Taxon| objects needs
+                to be passed using the ``taxon_namespace`` argument.
+            - ``mapping_attr_name``
+                Name of an attribute of |Taxon| object of the domain taxa
+                that references the corresponding |Taxon| object from the
+                range taxa. If this argument is given, then a |TaxonNamespace| or
+                some other container of |Taxon| objects needs to be passed
+                using the ``taxon_namespace`` argument.
+            - ``mapping_dict``
+                A dictionary with |Taxon| objects from the domain taxa as
+                keys, and the corresponding |Taxon| object from the range
+                taxa as values.
+        """
+        basemodel.DataObject.__init__(self, label=kwargs.pop("label", None))
+        self.forward = {}
+        self.reverse = {}
+        if "mapping_fn" in kwargs:
+            if "domain_taxon_namespace" not in kwargs:
+                raise TypeError("Must specify 'domain_taxon_namespace'")
+            self.apply_mapping_fn(kwargs["mapping_fn"],
+                    domain_taxon_namespace=kwargs["domain_taxon_namespace"],
+                    range_taxon_namespace=kwargs.get("range_taxon_namespace", None))
+        elif "mapping_attr_name" in kwargs:
+            if "domain_taxon_namespace" not in kwargs:
+                raise TypeError("Must specify 'domain_taxon_namespace'")
+            self.apply_mapping_attr_name(kwargs["mapping_attr_name"],
+                    domain_taxon_namespace=kwargs["domain_taxon_namespace"],
+                    range_taxon_namespace=kwargs.get("range_taxon_namespace", None))
+        elif "mapping_dict" in kwargs:
+            self.apply_mapping_dict(kwargs["mapping_dict"],
+                    domain_taxon_namespace=kwargs.get("domain_taxon_namespace", None),
+                    range_taxon_namespace=kwargs.get("range_taxon_namespace", None))
+        else:
+            raise TypeError("Must specify at least one of: 'mapping_fn', 'mapping_attr_name', or 'mapping_dict'")
+
+    def __len__(self):
+        """
+        Number of subsets.
+        """
+        return len(self.forward)
+
+    def __iter__(self):
+        """
+        Iterate over subsets.
+        """
+        for k in self.forward:
+            yield k
+
+    def items(self):
+        return self.forward.items()
+
+    def keys(self):
+        return self.forward.keys()
+
+    def __getitem__(self, taxon):
+        """
+        Get mapping for specified taxon.
+        """
+        return self.forward[taxon]
+
+    def _get_domain_taxon_namespace(self):
+        return self._domain_taxon_namespace
+
+    def _set_domain_taxon_namespace(self, taxa):
+        if taxa and not isinstance(taxa, TaxonNamespace):
+            self._domain_taxon_namespace = TaxonNamespace(taxa)
+        else:
+            self._domain_taxon_namespace = taxa
+
+    domain_taxon_namespace = property(_get_domain_taxon_namespace, _set_domain_taxon_namespace)
+
+    def _get_range_taxon_namespace(self):
+        return self._range_taxon_namespace
+
+    def _set_range_taxon_namespace(self, taxa):
+        if taxa and not isinstance(taxa, TaxonNamespace):
+            self._range_taxon_namespace = TaxonNamespace(taxa)
+        else:
+            self._range_taxon_namespace = taxa
+
+    range_taxon_namespace = property(_get_range_taxon_namespace, _set_range_taxon_namespace)
+
+    def apply_mapping_fn(self, mfunc, domain_taxon_namespace, range_taxon_namespace=None):
+        """
+        Constructs forward and reverse mapping dictionaries based on ``mfunc``,
+        which should take a |Taxon| object in ``domain_taxon_namespace`` as an argument
+        and return another |Taxon| object.
+        """
+        self.forward = {}
+        self.reverse = {}
+        self.domain_taxon_namespace = domain_taxon_namespace
+        if range_taxon_namespace is None:
+            self.range_taxon_namespace = TaxonNamespace()
+        else:
+            self.range_taxon_namespace = range_taxon_namespace
+        for dt in self.domain_taxon_namespace:
+            rt = mfunc(dt)
+            if rt not in self.range_taxon_namespace:
+                self.range_taxon_namespace.add_taxon(rt)
+            self.forward[dt] = rt
+            try:
+                self.reverse[rt].add(dt)
+            except KeyError:
+                self.reverse[rt] = set([dt])
+
+    def apply_mapping_attr_name(self, attr_name, domain_taxon_namespace, range_taxon_namespace=None):
+        """
+        Constructs mapping based on attribute ``attr_name`` of each
+        |Taxon| object in ``domain_taxon_namespace``.
+        """
+        return self.apply_mapping_fn(lambda x: getattr(x, attr_name), domain_taxon_namespace=domain_taxon_namespace, range_taxon_namespace=range_taxon_namespace)
+
+    def apply_mapping_dict(self, mdict, domain_taxon_namespace=None, range_taxon_namespace=None):
+        """
+        Constructs mapping based on dictionary ``mdict``, which should have
+        domain taxa as keys and range taxa as values.
+        """
+        if domain_taxon_namespace is None:
+            domain_taxon_namespace = TaxonNamespace(mdict.keys())
+        return self.apply_mapping_fn(lambda x: mdict[x], domain_taxon_namespace=domain_taxon_namespace, range_taxon_namespace=range_taxon_namespace)
+
+    def mesquite_association_rows(self):
+        from dendropy.dataio import nexusprocessing
+        rows = []
+        for rt in self.reverse:
+            x1 = nexusprocessing.escape_nexus_token(rt.label)
+            dt_labels = [dt.label for dt in self.reverse[rt]]
+            dt_labels.sort()
+            x2 = " ".join([nexusprocessing.escape_nexus_token(d) for d in dt_labels])
+            rows.append("        %s / %s" % (x1, x2))
+        return ",\n".join(rows)
+
+    def write_mesquite_association_block(self, out, domain_taxon_namespace_title=None, range_taxon_namespace_title=None):
+        """
+        For debugging purposes ...
+        """
+        def _compose_title(b):
+            if b.label:
+                return b.label
+            else:
+                return "d{}".format(id(b))
+        from dendropy.dataio import nexusprocessing
+        out.write("BEGIN TaxaAssociation;\n")
+        title = _compose_title(self)
+        out.write("    TITLE %s;\n"  % nexusprocessing.escape_nexus_token(title))
+        if domain_taxon_namespace_title is None:
+            domain_taxon_namespace_title = _compose_title(self.domain_taxon_namespace)
+        if range_taxon_namespace_title is None:
+            range_taxon_namespace_title = _compose_title(self.range_taxon_namespace)
+        out.write("    TAXA %s, %s;\n" % (
+            nexusprocessing.escape_nexus_token(range_taxon_namespace_title),
+            nexusprocessing.escape_nexus_token(domain_taxon_namespace_title)
+            ))
+        out.write("    ASSOCIATES\n")
+        out.write(self.mesquite_association_rows() + "\n")
+        out.write("    ;\n")
+        out.write("END;\n")
diff --git a/dendropy/datamodel/treecollectionmodel.py b/dendropy/datamodel/treecollectionmodel.py
new file mode 100644
index 0000000..dc57cc9
--- /dev/null
+++ b/dendropy/datamodel/treecollectionmodel.py
@@ -0,0 +1,2989 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+This module handles the core definition of classes that model collections of
+trees.
+"""
+
+import collections
+import math
+try:
+    from StringIO import StringIO # Python 2 legacy support: StringIO in this module is the one needed (not io)
+except ImportError:
+    from io import StringIO # Python 3
+import copy
+import sys
+from dendropy.utility import container
+from dendropy.utility import error
+from dendropy.utility import bitprocessing
+from dendropy.utility import deprecate
+from dendropy.utility import constants
+from dendropy.calculate import statistics
+from dendropy.datamodel import basemodel
+from dendropy.datamodel import taxonmodel
+from dendropy.datamodel import treemodel
+from dendropy import dataio
+
+##############################################################################
+### TreeList
+
+class TreeList(
+        taxonmodel.TaxonNamespaceAssociated,
+        basemodel.Annotable,
+        basemodel.Deserializable,
+        basemodel.MultiReadable,
+        basemodel.Serializable,
+        basemodel.DataObject):
+    """
+    A collection of |Tree| objects, all referencing the same "universe" of
+    opeational taxonomic unit concepts through the same |TaxonNamespace|
+    object reference.
+    """
+
+    def _parse_and_create_from_stream(cls,
+            stream,
+            schema,
+            collection_offset=None,
+            tree_offset=None,
+            **kwargs):
+        """
+        Constructs a new |TreeList| object and populates it with trees from
+        file-like object ``stream``.
+
+        Notes
+        -----
+        *All* operational taxonomic unit concepts in the data source will be included
+        in the |TaxonNamespace| object associated with the new
+        |TreeList| object and its contained |Tree| objects, even those
+        not associated with trees or the particular trees being retrieved.
+
+        Parameters
+        ----------
+
+        stream : file or file-like object
+            Source of data.
+
+        schema : string
+            Identifier of format of data in ``stream``
+
+        collection_offset : integer or None
+            0-based index indicating collection of trees to parse. If `None`,
+            then all tree collections are retrieved, with each distinct
+            collection parsed into a separate |TreeList| object. If the
+            tree colleciton offset index is equal or greater than the number of
+            tree collections in the data source, then IndexError is raised.
+            Negative offsets work like negative list indexes; e.g., a
+            ``collection_offset`` of -1 means to read the last collection of
+            trees in the data source. For data formats that do not support the
+            concept of distinct tree collections (e.g. NEWICK) are considered
+            single-collection data source (i.e, the only acceptable
+            ``collection_offset`` values are -1 or 0).
+
+        tree_offset : integer or None
+            0-based index indicating particular tree within a particular
+            collection of trees at which to begin reading.  If not specified or
+            `None` (default), then all trees are parsed.  Otherwise, must be an
+            integer value up the length of the collection minus 1.  A positive
+            offset indicates the number of trees in the collection to skip;
+            e.g. a ``tree_offset`` of 20 means to skip the first 20 trees in the
+            collection.  Negative offsets work like negative list indexes;
+            e.g., a ``tree_offset`` value of -10 means to retrieve the last 10
+            trees in the collection.  If the tree offset index is equal or
+            greater than the number of trees in the collection, then IndexError
+            is raised. Requires that a particular tree collection has been
+            identified using the ``tree_collection_offset`` parameter: if
+            ``tree_collection_offset`` is not specified, a TypeError is raised.
+
+        \*\*kwargs : keyword arguments
+            Arguments to customize parsing, instantiation, processing, and
+            accession of |Tree| objects read from the data source, including
+            schema- or format-specific handling.
+
+            The following optional keyword arguments are recognized and handled
+            by this function:
+
+                * ``label`` Specifies the label or description of the new
+                  |TreeList|.
+                * ``taxon_namespace`` specifies the |TaxonNamespace|
+                   object to be attached to the new |TreeList| object.
+                   Note that *all* operational taxonomic unit concepts in the
+                   data source will be accessioned into the specified
+                   |TaxonNamespace| instance. This includes the
+                   operation taxonomic unit definitions associated with all
+                   tree collections and character matrices in the data source.
+                * ``tree_list`` : **SPECIAL** If passed a |TreeList| using
+                  this keyword, then this instance is populated and returned
+                  (instead of a new instance being created).
+
+            All other keyword arguments are passed directly to |TreeList|.read()`.
+            Other keyword arguments may be available, depending on the implementation
+            of the reader specialized to handle ``schema`` formats.
+
+        Notes
+        -----
+        Note that in most cases, even if ``collection_offset`` and ``tree_offset``
+        are specified to restrict the trees returned, the *entire* data source
+        is still parsed and processed. So this is not more efficient than
+        reading all the trees and then manually-extracting them later; just
+        more convenient. If you need just a single subset of trees from a data
+        source, there is no gain in efficiency. If you need multiple trees or
+        subsets of trees from the same data source, it would be much more
+        efficient to read the entire data source, and extract trees as needed.
+
+        Returns
+        -------
+        A |TreeList| object.
+
+        """
+        # these must be pulled before passing the kwargs
+        # down to the reader
+        tree_list = kwargs.pop("tree_list", None)
+        taxon_namespace = taxonmodel.process_kwargs_dict_for_taxon_namespace(kwargs, None)
+        label = kwargs.pop("label", None)
+
+        # get the reader
+        reader = dataio.get_reader(schema, **kwargs)
+
+        # Accommodate an existing TreeList object being passed
+        if tree_list is None:
+            tree_list = cls(label=label, taxon_namespace=taxon_namespace)
+
+        if collection_offset is None and tree_offset is not None:
+            collection_offset = 0
+        if collection_offset is None:
+            # if tree_offset is not None:
+            #     raise TypeError("Cannot specify ``tree_offset`` without specifying ``collection_offset``")
+            # coerce all tree products into this list
+            reader.read_tree_lists(
+                        stream=stream,
+                        taxon_namespace_factory=tree_list._taxon_namespace_pseudofactory,
+                        tree_list_factory=tree_list._tree_list_pseudofactory,
+                        global_annotations_target=None)
+        else:
+            tree_lists = reader.read_tree_lists(
+                        stream=stream,
+                        taxon_namespace_factory=tree_list._taxon_namespace_pseudofactory,
+                        tree_list_factory=tree_list.__class__,
+                        global_annotations_target=None)
+            # if collection_offset < 0:
+            #     raise IndexError("Collection offset out of range: {} (minimum valid tree offset = 0)".format(collection_offset))
+            if collection_offset >= len(tree_lists):
+                raise IndexError("Collection offset out of range: {} (number of collections = {}, maximum valid collection offset = {})".format(collection_offset, len(tree_lists), len(tree_lists)-1))
+            target_tree_list = tree_lists[collection_offset]
+            tree_list.copy_annotations_from(target_tree_list)
+            if tree_offset is not None:
+                # if tree_offset < 0:
+                #     raise IndexError("Tree offset out of range: {} (minimum offset = 0)".format(tree_offset))
+                if tree_offset >= len(target_tree_list):
+                    raise IndexError("Tree offset out of range: {} (number of trees in source = {}, maximum valid tree offset = {})".format(tree_offset, len(target_tree_list), len(target_tree_list)-1))
+                for tree in target_tree_list[tree_offset:]:
+                    tree_list._trees.append(tree)
+            else:
+                for tree in target_tree_list:
+                    tree_list._trees.append(tree)
+        return tree_list
+        # taxon_namespace = taxonmodel.process_kwargs_dict_for_taxon_namespace(kwargs, None)
+        # label = kwargs.pop("label", None)
+        # tree_list = cls(label=label,
+        #         taxon_namespace=taxon_namespace)
+        # tree_list.read(
+        #         stream=stream,
+        #         schema=schema,
+        #         collection_offset=collection_offset,
+        #         tree_offset=tree_offset,
+        #         **kwargs)
+        # return tree_list
+    _parse_and_create_from_stream = classmethod(_parse_and_create_from_stream)
+
+    @classmethod
+    def get(cls, **kwargs):
+        """
+        Instantiate and return a *new* |TreeList| object from a data source.
+
+        **Mandatory Source-Specification Keyword Argument (Exactly One Required):**
+
+            - **file** (*file*) -- File or file-like object of data opened for reading.
+            - **path** (*str*) -- Path to file of data.
+            - **url** (*str*) -- URL of data.
+            - **data** (*str*) -- Data given directly.
+
+        **Mandatory Schema-Specification Keyword Argument:**
+
+            - **schema** (*str*) -- Identifier of format of data given by the
+              "``file``", "``path``", "``data``", or "``url``" argument
+              specified above: ":doc:`newick </schemas/newick>`", ":doc:`nexus
+              </schemas/nexus>`", or ":doc:`nexml </schemas/nexml>`". See
+              "|Schemas|" for more details.
+
+        **Optional General Keyword Arguments:**
+
+            - **label** (*str*) -- Name or identifier to be assigned to the new
+              object; if not given, will be assigned the one specified in the
+              data source, or `None` otherwise.
+            - **taxon_namespace** (|TaxonNamespace|) -- The |TaxonNamespace|
+              instance to use to :doc:`manage the taxon names </primer/taxa>`.
+              If not specified, a new one will be created.
+            - **collection_offset** (*int*) -- 0-based index of tree block or
+              collection in source to be parsed. If not specified then the
+              first collection (offset = 0) is assumed.
+            - **tree_offset** (*int*) -- 0-based index of first tree within the
+              collection specified by ``collection_offset`` to be parsed (i.e.,
+              skipping the first ``tree_offset`` trees). If not
+              specified, then the first tree (offset = 0) is assumed (i.e., no
+              trees within the specified collection will be skipped). Use this
+              to specify, e.g. a burn-in.
+            - **ignore_unrecognized_keyword_arguments** (*bool*) -- If `True`,
+              then unsupported or unrecognized keyword arguments will not
+              result in an error. Default is `False`: unsupported keyword
+              arguments will result in an error.
+
+        **Optional Schema-Specific Keyword Arguments:**
+
+            These provide control over how the data is interpreted and
+            processed, and supported argument names and values depend on
+            the schema as specified by the value passed as the "``schema``"
+            argument. See "|Schemas|" for more details.
+
+        **Examples:**
+
+        ::
+
+            tlst1 = dendropy.TreeList.get(
+                    file=open('treefile.tre', 'rU'),
+                    schema="newick")
+            tlst2 = dendropy.TreeList.get(
+                    path='sometrees.nexus',
+                    schema="nexus",
+                    collection_offset=2,
+                    tree_offset=100)
+            tlst3 = dendropy.TreeList.get(
+                    data="((A,B),(C,D));((A,C),(B,D));",
+                    schema="newick")
+            tree4 = dendropy.dendropy.TreeList.get(
+                    url="http://api.opentreeoflife.org/v2/study/pg_1144/tree/tree2324.nex",
+                    schema="nexus")
+
+
+        """
+        return cls._get_from(**kwargs)
+
+    DEFAULT_TREE_TYPE = treemodel.Tree
+
+    def tree_factory(cls, *args, **kwargs):
+        """
+        Creates and returns a |Tree| of a type that this list understands how to
+        manage.
+
+        Deriving classes can override this to provide for custom Tree-type
+        object lists. You can simple override the class-level variable
+        `DEFAULT_TREE_TYPE` in your derived class if the constructor signature
+        of the alternate tree type is the same as |Tree|.
+        If you want to have a TreeList *instance* that generates
+        custom trees (i.e., as opposed to a TreeList-ish *class* of instances),
+        set the ``tree_type`` attribute of the TreeList instance.
+
+        Parameters
+        ----------
+        \*args : positional arguments
+            Passed directly to constructor of |Tree|.
+
+        \*\*kwargs : keyword arguments
+            Passed directly to constructor of |Tree|.
+
+        Returns
+        -------
+        A |Tree| object.
+
+        """
+        tree = cls.DEFAULT_TREE_TYPE(*args, **kwargs)
+        return tree
+    tree_factory = classmethod(tree_factory)
+
+    ###########################################################################
+    ### Lifecycle and Identity
+
+    def __init__(self, *args, **kwargs):
+        """
+        Constructs a new |TreeList| object, populating it with any iterable
+        container with Tree object members passed as unnamed argument, or from
+        a data source if ``stream`` and ``schema`` are passed.
+
+        If passed an iterable container, the objects in that container must be
+        of type |Tree| (or derived). If the container is of type |TreeList|,
+        then, because each |Tree| object must have the same |TaxonNamespace|
+        reference as the containing |TreeList|, the trees in the container
+        passed as an initialization argument will be **deep**-copied (except
+        for associated |TaxonNamespace| and |Taxon| objects, which will
+        be shallow-copied). If the container is any other type of
+        iterable, then the |Tree| objects will be **shallow**-copied.
+
+        |TreeList| objects can directly thus be instantiated in the
+        following ways::
+
+            # /usr/bin/env python
+
+            import StringIO
+            from dendropy import TaxonNamespace, Tree, TreeList
+
+            # instantiate an empty tree
+            tlst1 = TreeList()
+
+            # TreeList objects can be instantiated from an external data source
+            # using the 'get()' factory class method
+
+            tlst2 = TreeList.get(file=open('treefile.tre', 'rU'), schema="newick")
+            tlst3 = TreeList.get(path='sometrees.nexus', schema="nexus")
+            tlst4 = TreeList.get(data="((A,B),(C,D));((A,C),(B,D));", schema="newick")
+
+            # can also call `read()` on a TreeList object; each read adds
+            # (appends) the tree(s) found to the TreeList
+            tlst5 = TreeList()
+            tlst5.read(file=open('boot1.tre', 'rU'), schema="newick")
+            tlst5.read(path="boot3.tre", schema="newick")
+            tlst5.read(value="((A,B),(C,D));((A,C),(B,D));", schema="newick")
+
+            # populated from list of Tree objects
+            tlist6_1 = Tree.get(
+                    data="((A,B),(C,D))",
+                    schema="newick")
+            tlist6_2 = Tree.get(
+                    data="((A,C),(B,D))",
+                    schema="newick")
+            tlist6 = TreeList([tlist5_1, tlist5_2])
+
+            # passing keywords to underlying tree parser
+            tlst8 = TreeList.get(
+                             data="((A,B),(C,D));((A,C),(B,D));",
+                             schema="newick",
+                             taxon_namespace=tlst3.taxon_namespace,
+                             rooting="force-rooted",
+                             extract_comment_metadata=True,
+                             store_tree_weights=False,
+                             preserve_underscores=True)
+
+            # Subsets of trees can be read. Note that in most cases, the entire
+            # data source is parsed, so this is not more efficient than reading
+            # all the trees and then manually-extracting them later; just more
+            # convenient
+
+            # skip the *first* 100 trees in the *first* (offset=0) collection of trees
+            trees = TreeList.get(
+                        path="mcmc.tre",
+                        schema="newick",
+                        collection_offset=0,
+                        tree_offset=100)
+
+            # get the *last* 10 trees in the *second* (offset=1) collection of trees
+            trees = TreeList.get(
+                        path="mcmc.tre",
+                        schema="newick",
+                        collection_offset=1,
+                        tree_offset=-10)
+
+            # get the last 10 trees in the second-to-last collection of trees
+            trees = TreeList.get(
+                        path="mcmc.tre",
+                        schema="newick",
+                        collection_offset=-2,
+                        tree_offset=100)
+
+            # Slices give shallow-copy: trees are references
+            tlst4copy0a = t4[:]
+            assert tlst4copy0a[0] is t4[0]
+            tlst4copy0b = t4[:4]
+            assert tlst4copy0b[0] is t4[0]
+
+            # 'Taxon-namespace-scoped' copy:
+            # I.e., Deep-copied objects but taxa and taxon namespace
+            # are copied as references
+            tlst4copy1a = TreeList(t4)
+            tlst4copy1b = TreeList([Tree(t) for t in tlst5])
+            assert tlst4copy1a[0] is not tlst4[0] # True
+            assert tlst4copy1a.taxon_namespace is tlst4.taxon_namespace # True
+            assert tlst4copy1b[0] is not tlst4[0] # True
+            assert tlst4copy1b.taxon_namespace is tlst4.taxon_namespace # True
+
+
+        """
+        if len(args) > 1:
+            # only allow 1 positional argument
+            raise error.TooManyArgumentsError(func_name=self.__class__.__name__, max_args=1, args=args)
+        elif len(args) == 1 and isinstance(args[0], TreeList):
+            self._clone_from(args[0], kwargs)
+        else:
+            basemodel.DataObject.__init__(self, label=kwargs.pop("label", None))
+            taxonmodel.TaxonNamespaceAssociated.__init__(self,
+                    taxon_namespace=taxonmodel.process_kwargs_dict_for_taxon_namespace(kwargs, None))
+            self.tree_type = kwargs.pop("tree_type", self.__class__.DEFAULT_TREE_TYPE)
+            self._trees = []
+            self.comments = []
+            if len(args) == 1:
+                for aidx, a in enumerate(args[0]):
+                    if not isinstance(a, self.tree_type):
+                        raise ValueError("Cannot add object not of 'Tree' type to 'TreeList'")
+                    self.append(a)
+        if kwargs:
+            raise TypeError("Unrecognized or unsupported arguments: {}".format(kwargs))
+
+    def __hash__(self):
+        return id(self)
+
+    def __eq__(self, other):
+        return (
+            isinstance(other, TreeList)
+            and (self.taxon_namespace is other.taxon_namespace)
+            and (self._trees == other._trees)
+        )
+
+    def _clone_from(self, tree_list, kwargs_dict):
+        memo = {}
+        # memo[id(tree)] = self
+        taxon_namespace = taxonmodel.process_kwargs_dict_for_taxon_namespace(kwargs_dict, tree_list.taxon_namespace)
+        memo[id(tree_list.taxon_namespace)] = taxon_namespace
+        if taxon_namespace is not tree_list.taxon_namespace:
+            for t1 in tree_list.taxon_namespace:
+                t2 = taxon_namespace.require_taxon(label=t1.label)
+                memo[id(t1)] = t2
+        else:
+            for t1 in tree_list.taxon_namespace:
+                memo[id(t1)] = t1
+        t = copy.deepcopy(tree_list, memo)
+        self.__dict__ = t.__dict__
+        self.label = kwargs_dict.pop("label", tree_list.label)
+        return self
+
+    def __copy__(self):
+        other = TreeList(label=self.label, taxon_namespace=self.taxon_namespace)
+        other._trees = list(self._trees)
+        memo = {}
+        memo[id(self)] = other
+        other.deep_copy_annotations_from(self, memo)
+        return other
+
+    def taxon_namespace_scoped_copy(self, memo=None):
+        if memo is None:
+            memo = {}
+        # this populates ``memo`` with references to the
+        # the TaxonNamespace and Taxon objects
+        self.taxon_namespace.populate_memo_for_taxon_namespace_scoped_copy(memo)
+        return self.__deepcopy__(memo=memo)
+
+    def __deepcopy__(self, memo=None):
+        return basemodel.Annotable.__deepcopy__(self, memo=memo)
+
+    ###########################################################################
+    ### Representation
+
+    def __str__(self):
+        return "<TreeList {} '{}': [{}]>".format(hex(id(self)), self.label, ", ".join(repr(i) for i in self._trees))
+
+    ###########################################################################
+    ### Data I/O
+
+    def _taxon_namespace_pseudofactory(self, **kwargs):
+        """
+        Dummy factory to coerce all |TaxonNamespace| objects required when
+        parsing a data source to reference ``self.taxon_namespace``.
+        """
+        if "label" in kwargs and kwargs["label"] is not None and self.taxon_namespace.label is None:
+            self.taxon_namespace.label = kwargs["label"]
+        return self.taxon_namespace
+
+    def _tree_list_pseudofactory(self, **kwargs):
+        """
+        Dummy factory to coerce all |TreeList| objects required when
+        parsing a data source to reference ``self``.
+        """
+        if "label" in kwargs and kwargs["label"] is not None and self.label is None:
+            self.label = kwargs["label"]
+        return self
+
+    def _parse_and_add_from_stream(self,
+            stream,
+            schema,
+            collection_offset=None,
+            tree_offset=None,
+            **kwargs):
+        """
+        Parses |Tree| objects from data source and adds to this collection.
+
+        Notes
+        -----
+        *All* operational taxonomic unit concepts in the data source will be included
+        in the |TaxonNamespace| object associated with the new
+        |TreeList| object and its contained |Tree| objects, even those
+        not associated with trees or the particular trees being retrieved.
+
+        Parameters
+        ----------
+
+        stream : file or file-like object
+            Source of data.
+
+        schema : string
+            Identifier of format of data in ``stream``.
+
+        collection_offset : integer or None
+            0-based index indicating collection of trees to parse. If `None`,
+            then all tree collections are retrieved, with each distinct
+            collection parsed into a separate |TreeList| object. If the
+            tree colleciton offset index is equal or greater than the number of
+            tree collections in the data source, then IndexError is raised.
+            Negative offsets work like negative list indexes; e.g., a
+            ``collection_offset`` of -1 means to read the last collection of
+            trees in the data source. For data formats that do not support the
+            concept of distinct tree collections (e.g. NEWICK) are considered
+            single-collection data source (i.e, the only acceptable
+            ``collection_offset`` values are -1 or 0).
+
+        tree_offset : integer or None
+            0-based index indicating particular tree within a particular
+            collection of trees at which to begin reading.  If not specified or
+            `None` (default), then all trees are parsed.  Otherwise, must be an
+            integer value up the length of the collection minus 1.  A positive
+            offset indicates the number of trees in the collection to skip;
+            e.g. a ``tree_offset`` of 20 means to skip the first 20 trees in the
+            collection.  Negative offsets work like negative list indexes;
+            e.g., a ``tree_offset`` value of -10 means to retrieve the last 10
+            trees in the collection.  If the tree offset index is equal or
+            greater than the number of trees in the collection, then IndexError
+            is raised. Requires that a particular tree collection has been
+            identified using the ``tree_collection_offset`` parameter: if
+            ``tree_collection_offset`` is not specified, a TypeError is raised.
+
+        \*\*kwargs : keyword arguments
+
+            Arguments to customize parsing, instantiation, processing, and
+            accession of |Tree| objects read from the data source, including
+            schema- or format-specific handling. These will be passed to the
+            underlying schema-specific reader for handling.
+
+            General (schema-agnostic) keyword arguments are:
+
+                * ``rooted`` specifies the default rooting interpretation of the tree.
+                * ``edge_length_type`` specifies the type of the edge lengths (int or
+                  float; defaults to 'float')
+
+            Other keyword arguments are available depending on the schema. See
+            specific schema handlers (e.g., `NewickReader`, `NexusReader`,
+            `NexmlReader`) for more details.
+
+        Notes
+        -----
+        Note that in most cases, even if ``collection_offset`` and ``tree_offset``
+        are specified to restrict the trees read, the *entire* data source
+        is still parsed and processed. So this is not more efficient than
+        reading all the trees and then manually-extracting them later; just
+        more convenient. If you need just a single subset of trees from a data
+        source, there is no gain in efficiency. If you need multiple trees or
+        subsets of trees from the same data source, it would be much more
+        efficient to read the entire data source, and extract trees as needed.
+
+        Returns
+        -------
+        n : ``int``
+            The number of |Tree| objects read.
+
+        """
+        if "taxon_namespace" in kwargs and kwargs['taxon_namespace'] is not self.taxon_namespace:
+            raise TypeError("Cannot change ``taxon_namespace`` when reading into an existing TreeList")
+        kwargs["taxon_namespace"] = self.taxon_namespace
+        kwargs["tree_list"] = self
+        cur_size = len(self._trees)
+        TreeList._parse_and_create_from_stream(
+                stream=stream,
+                schema=schema,
+                collection_offset=collection_offset,
+                tree_offset=tree_offset,
+                **kwargs)
+        new_size = len(self._trees)
+        return new_size - cur_size
+
+    def read(self, **kwargs):
+        """
+        Add |Tree| objects to existing |TreeList| from data source providing
+        one or more collections of trees.
+
+        **Mandatory Source-Specification Keyword Argument (Exactly One Required):**
+
+            - **file** (*file*) -- File or file-like object of data opened for reading.
+            - **path** (*str*) -- Path to file of data.
+            - **url** (*str*) -- URL of data.
+            - **data** (*str*) -- Data given directly.
+
+        **Mandatory Schema-Specification Keyword Argument:**
+
+            - **schema** (*str*) -- Identifier of format of data given by the
+              "``file``", "``path``", "``data``", or "``url``" argument
+              specified above: ":doc:`newick </schemas/newick>`", ":doc:`nexus
+              </schemas/nexus>`", or ":doc:`nexml </schemas/nexml>`". See
+              "|Schemas|" for more details.
+
+        **Optional General Keyword Arguments:**
+
+            - **collection_offset** (*int*) -- 0-based index of tree block or
+              collection in source to be parsed. If not specified then the
+              first collection (offset = 0) is assumed.
+            - **tree_offset** (*int*) -- 0-based index of first tree within the
+              collection specified by ``collection_offset`` to be parsed (i.e.,
+              skipping the first ``tree_offset`` trees). If not
+              specified, then the first tree (offset = 0) is assumed (i.e., no
+              trees within the specified collection will be skipped). Use this
+              to specify, e.g. a burn-in.
+            - **ignore_unrecognized_keyword_arguments** (*bool*) -- If `True`,
+              then unsupported or unrecognized keyword arguments will not
+              result in an error. Default is `False`: unsupported keyword
+              arguments will result in an error.
+
+        **Optional Schema-Specific Keyword Arguments:**
+
+            These provide control over how the data is interpreted and
+            processed, and supported argument names and values depend on
+            the schema as specified by the value passed as the "``schema``"
+            argument. See "|Schemas|" for more details.
+
+        **Examples:**
+
+        ::
+
+            tlist = dendropy.TreeList()
+            tlist.read(
+                    file=open('treefile.tre', 'rU'),
+                    schema="newick",
+                    tree_offset=100)
+            tlist.read(
+                    path='sometrees.nexus',
+                    schema="nexus",
+                    collection_offset=2,
+                    tree_offset=100)
+            tlist.read(
+                    data="((A,B),(C,D));((A,C),(B,D));",
+                    schema="newick")
+            tlist.read(
+                    url="http://api.opentreeoflife.org/v2/study/pg_1144/tree/tree2324.nex",
+                    schema="nexus")
+
+        """
+        return basemodel.MultiReadable._read_from(self, **kwargs)
+
+    def _format_and_write_to_stream(self, stream, schema, **kwargs):
+        """
+        Writes out ``self`` in ``schema`` format to a destination given by
+        file-like object ``stream``.
+
+        Parameters
+        ----------
+        stream : file or file-like object
+            Destination for data.
+        schema : string
+            Must be a recognized and tree file schema, such as "nexus",
+            "newick", etc, for which a specialized tree list writer is
+            available. If this is not implemented for the schema specified, then
+            a UnsupportedSchemaError is raised.
+
+        \*\*kwargs : keyword arguments, optional
+            Keyword arguments will be passed directly to the writer for the
+            specified schema. See documentation for details on keyword
+            arguments supported by writers of various schemas.
+
+        """
+        writer = dataio.get_writer(schema, **kwargs)
+        writer.write_tree_list(self, stream)
+
+    ###########################################################################
+    ### List Interface
+
+    def _import_tree_to_taxon_namespace(self,
+            tree,
+            taxon_import_strategy="migrate",
+            **kwargs):
+        if tree.taxon_namespace is not self.taxon_namespace:
+            if taxon_import_strategy == "migrate":
+                tree.migrate_taxon_namespace(taxon_namespace=self.taxon_namespace,
+                        **kwargs)
+            elif taxon_import_strategy == "add":
+                tree._taxon_namespace = self.taxon_namespace
+                tree.update_taxon_namespace()
+            else:
+                raise ValueError("Unrecognized taxon import strategy: '{}'".format(taxon_import_strategy))
+        # assert tree.taxon_namespace is self.taxon_namespace
+        return tree
+
+    def insert(self,
+            index,
+            tree,
+            taxon_import_strategy="migrate",
+            **kwargs):
+        """
+        Inserts a |Tree| object, ``tree``, into the collection before
+        ``index``.
+
+        The |TaxonNamespace| reference of ``tree`` will be set to that of
+        ``self``.  Any |Taxon| objects associated with nodes in ``tree``
+        that are not already in ``self.taxon_namespace`` will be handled
+        according to ``taxon_import_strategy``:
+
+            - 'migrate'
+                |Taxon| objects associated with ``tree`` that are not already
+                in ``self.taxon_nameaspace`` will be remapped based on their
+                labels, with new :class|Taxon| objects being reconstructed if
+                none with matching labels are found. Specifically,
+                :meth:`dendropy.datamodel.treemodel.Tree.migrate_taxon_namespace()`
+                will be called on ``tree``, where ``kwargs`` is as passed to
+                this function.
+            - 'add'
+                |Taxon| objects associated with ``tree`` that are not already
+                in ``self.taxon_namespace`` will be added. Note that this might
+                result in |Taxon| objects with duplicate labels as no
+                attempt at mapping to existing |Taxon| objects based on
+                label-matching is done.
+
+        Parameters
+        ----------
+        index : integer
+            Position before which to insert ``tree``.
+        tree : A |Tree| instance
+            The |Tree| object to be added.
+        taxon_import_strategy : string
+            If ``tree`` is associated with a different |TaxonNamespace|,
+            this argument determines how new |Taxon| objects in ``tree``
+            are handled: 'migrate' or 'add'. See above for details.
+        \*\*kwargs : keyword arguments
+            These arguments will be passed directly to
+            'migrate_taxon_namespace()' method call on ``tree``.
+
+        See Also
+        --------
+
+        :meth:`Tree.migrate_taxon_namespace`
+
+        """
+        self._import_tree_to_taxon_namespace(
+                tree=tree,
+                taxon_import_strategy=taxon_import_strategy,
+                **kwargs)
+        self._trees.insert(index, tree)
+
+    def append(self,
+            tree,
+            taxon_import_strategy="migrate",
+            **kwargs):
+        """
+        Adds a |Tree| object, ``tree``, to the collection.
+
+        The |TaxonNamespace| reference of ``tree`` will be set to that of
+        ``self``.  Any |Taxon| objects associated with nodes in ``tree``
+        that are not already in ``self.taxon_namespace`` will be handled
+        according to ``taxon_import_strategy``:
+
+            - 'migrate'
+                |Taxon| objects associated with ``tree`` that are not already
+                in ``self.taxon_nameaspace`` will be remapped based on their
+                labels, with new :class|Taxon| objects being reconstructed if
+                none with matching labels are found. Specifically,
+                :meth:`dendropy.datamodel.treemodel.Tree.migrate_taxon_namespace()`
+                will be called on ``tree``, where ``kwargs`` is as passed to this
+                function.
+            - 'add'
+                |Taxon| objects associated with ``tree`` that are not already
+                in ``self.taxon_namespace`` will be added. Note that this might
+                result in |Taxon| objects with duplicate labels as no
+                attempt at mapping to existing |Taxon| objects based on
+                label-matching is done.
+
+        Parameters
+        ----------
+        tree : A |Tree| instance
+            The |Tree| object to be added.
+        taxon_import_strategy : string
+            If ``tree`` is associated with a different |TaxonNamespace|,
+            this argument determines how new |Taxon| objects in ``tree``
+            are handled: 'migrate' or 'add'. See above for details.
+        \*\*kwargs : keyword arguments
+            These arguments will be passed directly to
+            'migrate_taxon_namespace()' method call on ``tree``.
+
+        See Also
+        --------
+
+        :meth:`Tree.migrate_taxon_namespace`
+
+        """
+        self._import_tree_to_taxon_namespace(
+                tree=tree,
+                taxon_import_strategy=taxon_import_strategy,
+                **kwargs)
+        self._trees.append(tree)
+
+    def extend(self, other):
+        """
+        In-place addition of |Tree| objects in ``other`` to ``self``.
+
+        If ``other`` is a |TreeList|, then the trees are *copied*
+        and migrated into ``self.taxon_namespace``; otherwise, the original
+        objects are migrated into ``self.taxon_namespace`` and added directly.
+
+        Parameters
+        ----------
+        other : iterable of |Tree| objects
+
+        Returns
+        -------
+        ``self`` : |TreeList|
+        """
+        if isinstance(other, TreeList):
+            for t0 in other:
+                t1 = self.tree_type(t0, taxon_namespace=self.taxon_namespace)
+                self._trees.append(t1)
+        else:
+            for t0 in other:
+                self.append(t0)
+        return self
+
+    def __iadd__(self, other):
+        """
+        In-place addition of |Tree| objects in ``other`` to ``self``.
+
+        If ``other`` is a |TreeList|, then the trees are *copied*
+        and migrated into ``self.taxon_namespace``; otherwise, the original
+        objects are migrated into ``self.taxon_namespace`` and added directly.
+
+        Parameters
+        ----------
+        other : iterable of |Tree| objects
+
+        Returns
+        -------
+        ``self`` : |TreeList|
+        """
+        return self.extend(other)
+
+    def __add__(self, other):
+        """
+        Creates and returns new |TreeList| with clones of all trees in ``self``
+        as well as all |Tree| objects in ``other``.  If ``other`` is a
+        |TreeList|, then the trees are *cloned* and migrated into
+        ``self.taxon_namespace``; otherwise, the original objects are migrated into
+        ``self.taxon_namespace`` and added directly.
+
+        Parameters
+        ----------
+        other : iterable of |Tree| objects
+
+        Returns
+        -------
+        tlist : |TreeList| object
+            |TreeList| object containing clones of |Tree| objects
+            in ``self`` and ``other``.
+        """
+        tlist = TreeList(taxon_namespace=self.taxon_namespace)
+        tlist += self
+        tlist += other
+        return tlist
+
+    def __contains__(self, tree):
+        return tree in self._trees
+
+    def __delitem__(self, tree):
+        del self._trees[tree]
+
+    def __iter__(self):
+        return iter(self._trees)
+
+    def __reversed__(self):
+        return reversed(self._trees)
+
+    def __len__(self):
+        return len(self._trees)
+
+    def __getitem__(self, index):
+        """
+        If ``index`` is an integer, then |Tree| object at position ``index``
+        is returned. If ``index`` is a slice, then a |TreeList| is returned
+        with references (i.e., not copies or clones, but the actual original
+        instances themselves) to |Tree| objects in the positions given
+        by the slice. The |TaxonNamespace| is the same as ``self``.
+
+        Parameters
+        ----------
+        index : integer or slice
+            Index or slice.
+
+        Returns
+        -------
+        t : |Tree| object or |TreeList| object
+
+        """
+        if isinstance(index, slice):
+            r = self._trees[index]
+            return TreeList(r,
+                    taxon_namespace=self.taxon_namespace)
+        else:
+            return self._trees[index]
+
+    def __setitem__(self, index, value):
+        if isinstance(index, slice):
+            if isinstance(value, TreeList):
+                tt = []
+                for t0 in value:
+                    t1 = self.tree_type(t0,
+                            taxon_namespace=self.taxon_namespace)
+                    tt.append(t1)
+                value = tt
+            else:
+                for t in value:
+                    self._import_tree_to_taxon_namespace(t)
+            self._trees[index] = value
+        else:
+            self._trees[index] = self._import_tree_to_taxon_namespace(value)
+
+    def clear(self):
+        # list.clear() only with 3.4 or so ...
+        self._trees = []
+
+    def index(self, tree):
+        return self._trees.index(tree)
+
+    def pop(self, index=-1):
+        return self._trees.pop(index)
+
+    def remove(self, tree):
+        self._trees.remove(tree)
+
+    def reverse(self):
+        self._trees.reverse()
+
+    def sort(self, key=None, reverse=False):
+        self._trees.sort(key=key, reverse=reverse)
+
+    def new_tree(self, *args, **kwargs):
+        tns = taxonmodel.process_kwargs_dict_for_taxon_namespace(kwargs, self.taxon_namespace)
+        if tns is not self.taxon_namespace:
+            raise TypeError("Cannot create new Tree with different TaxonNamespace")
+        kwargs["taxon_namespace"] = self.taxon_namespace
+        if self.tree_type is not None:
+            tree = self.tree_type(*args, **kwargs)
+        else:
+            tree = self.tree_factory(*args, **kwargs)
+        self._trees.append(tree)
+        return tree
+
+   ##############################################################################
+   ## Taxon Handling
+
+    def reconstruct_taxon_namespace(self,
+            unify_taxa_by_label=True,
+            taxon_mapping_memo=None):
+        if taxon_mapping_memo is None:
+            taxon_mapping_memo = {}
+        for tree in self._trees:
+            tree._taxon_namespace = self.taxon_namespace
+            tree.reconstruct_taxon_namespace(
+                unify_taxa_by_label=unify_taxa_by_label,
+                taxon_mapping_memo=taxon_mapping_memo,
+            )
+
+    def update_taxon_namespace(self):
+        for tree in self._trees:
+            tree._taxon_namespace = self.taxon_namespace
+            tree.update_taxon_namespace()
+
+    def poll_taxa(self, taxa=None):
+        """
+        Returns a set populated with all of |Taxon| instances associated
+        with ``self``.
+
+        Parameters
+        ----------
+        taxa : set()
+            Set to populate. If not specified, a new one will be created.
+
+        Returns
+        -------
+        taxa : set[|Taxon|]
+            Set of taxa associated with ``self``.
+        """
+        if taxa is None:
+            taxa = set()
+        for tree in self:
+            tree.poll_taxa(taxa)
+        return taxa
+
+    def reindex_subcomponent_taxa():
+        raise NotImplementedError()
+
+   ##############################################################################
+   ## Special Calculations and Operations on Entire Collection
+
+    def _get_tree_array(self,
+            kwargs_dict,
+            ):
+        """
+        Return TreeArray containing information of trees currently
+        in self. Processes ``kwargs_dict`` intelligently: removing
+        and passing on keyword arguments pertaining to TreeArray
+        construction, and leaving everything else.
+        """
+        # TODO: maybe ignore_node_ages defaults to `False` but ``ultrametricity_precision`` defaults to 0?
+        ta = TreeArray.from_tree_list(
+                trees=self,
+                # taxon_namespace=self.taxon_namespace,
+                is_rooted_trees=kwargs_dict.pop("is_rooted_trees", None),
+                ignore_edge_lengths=kwargs_dict.pop("ignore_edge_lengths", False),
+                ignore_node_ages=kwargs_dict.pop("ignore_node_ages", True),
+                use_tree_weights=kwargs_dict.pop("use_tree_weights", True),
+                ultrametricity_precision=kwargs_dict.pop("ultrametricity_precision", constants.DEFAULT_ULTRAMETRICITY_PRECISION),
+                is_bipartitions_updated=kwargs_dict.pop("is_bipartitions_updated", False)
+                )
+        return ta
+
+    def split_distribution(self,
+            is_bipartitions_updated=False,
+            default_edge_length_value=None,
+            **kwargs):
+        """
+        Return `SplitDistribution` collecting information on splits in
+        contained trees. Keyword arguments get passed directly to
+        `SplitDistribution` constructor.
+        """
+        assert "taxon_namespace" not in kwargs or kwargs["taxon_namespace"] is self.taxon_namespace
+        kwargs["taxon_namespace"] = self.taxon_namespace
+        sd = SplitDistribution(**kwargs)
+        for tree in self:
+            sd.count_splits_on_tree(
+                    tree=tree,
+                    is_bipartitions_updated=is_bipartitions_updated,
+                    default_edge_length_value=default_edge_length_value)
+        return sd
+
+    def as_tree_array(self, **kwargs):
+        """
+        Return |TreeArray| collecting information on splits in contained
+        trees. Keyword arguments get passed directly to |TreeArray|
+        constructor.
+        """
+        ta = TreeArray.from_tree_list(
+                trees=self,
+                **kwargs)
+        return ta
+
+    def consensus(self,
+            min_freq=constants.GREATER_THAN_HALF,
+            is_bipartitions_updated=False,
+            summarize_splits=True,
+            **kwargs):
+        """
+        Returns a consensus tree of all trees in self, with minumum frequency
+        of bipartition to be added to the consensus tree given by ``min_freq``.
+        """
+        ta = self._get_tree_array(kwargs)
+        return ta.consensus_tree(min_freq=min_freq,
+                summarize_splits=summarize_splits,
+                **kwargs)
+
+    def maximum_product_of_split_support_tree(
+            self,
+            include_external_splits=False,
+            score_attr="log_product_of_split_support"):
+        """
+        Return the tree with that maximizes the product of split supports, also
+        known as the "Maximum Clade Credibility Tree" or MCCT.
+
+        Parameters
+        ----------
+        include_external_splits : bool
+            If `True`, then non-internal split posteriors will be included in
+            the score. Defaults to `False`: these are skipped. This should only
+            make a difference when dealing with splits collected from trees of
+            different leaf sets.
+
+        Returns
+        -------
+        mcct_tree : Tree
+            Tree that maximizes the product of split supports.
+        """
+        ta = self._get_tree_array({})
+        scores, max_score_tree_idx = ta.calculate_log_product_of_split_supports(
+                include_external_splits=include_external_splits,
+                )
+        tree = self[max_score_tree_idx]
+        if score_attr is not None:
+            setattr(tree, score_attr, scores[max_score_tree_idx])
+        return tree
+
+    def maximum_sum_of_split_support_tree(
+            self,
+            include_external_splits=False,
+            score_attr="sum_of_split_support"):
+        """
+        Return the tree with that maximizes the *sum* of split supports.
+
+        Parameters
+        ----------
+        include_external_splits : bool
+            If `True`, then non-internal split posteriors will be included in
+            the score. Defaults to `False`: these are skipped. This should only
+            make a difference when dealing with splits collected from trees of
+            different leaf sets.
+
+        Returns
+        -------
+        mcct_tree : Tree
+            Tree that maximizes the sum of split supports.
+        """
+        ta = self._get_tree_array({})
+        scores, max_score_tree_idx = ta.calculate_sum_of_split_supports(
+                include_external_splits=include_external_splits,
+                )
+        tree = self[max_score_tree_idx]
+        if score_attr is not None:
+            setattr(tree, score_attr, scores[max_score_tree_idx])
+        return tree
+
+    def frequency_of_bipartition(self, **kwargs):
+        """
+        Given a bipartition specified as:
+
+            - a |Bipartition| instance given the keyword 'bipartition'
+            - a split bitmask given the keyword 'split_bitmask'
+            - a list of |Taxon| objects given with the keyword ``taxa``
+            - a list of taxon labels given with the keyword ``labels``
+
+        this function returns the proportion of trees in self
+        in which the split is found.
+        """
+        split = None
+        if "split_bitmask" in kwargs:
+            split = kwargs["split_bitmask"]
+        elif "bipartition" in kwargs:
+            split = kwargs["bipartition"].split_bitmask
+        elif "taxa" in kwargs or "labels" in kwargs:
+            split = self.taxon_namespace.taxa_bitmask(**kwargs)
+            if "taxa" in kwargs:
+                k = len(kwargs["taxa"])
+            else:
+                k = len(kwargs["labels"])
+            if bitprocessing.num_set_bits(split) != k:
+                raise IndexError('Not all taxa could be mapped to bipartition (%s): %s' \
+                    % (self.taxon_namespace.bitmask_as_bitstring(split), k))
+        found = 0
+        total = 0
+        is_bipartitions_updated = kwargs.get("is_bipartitions_updated", False)
+        for tree in self:
+            if not is_bipartitions_updated or not tree.bipartitions:
+                tree.encode_bipartitions()
+            bipartition_encoding = set(b.split_bitmask for b in tree.bipartition_encoding)
+            total += 1
+            if split in bipartition_encoding:
+                found += 1
+        try:
+            return float(found)/total
+        except ZeroDivisionError:
+            return 0
+
+    def frequency_of_split(self, **kwargs):
+        """
+        DEPRECATED: use 'frequency_of_bipartition()' instead.
+        """
+        deprecate.dendropy_deprecation_warning(
+                message="Deprecated since DendroPy 4: Instead of 'frequency_of_split()' use 'frequency_of_bipartition()'",
+                stacklevel=4,
+                )
+        return self.frequency_of_bipartition(**kwargs)
+
+###############################################################################
+### SplitDistribution
+
+class SplitDistribution(taxonmodel.TaxonNamespaceAssociated):
+    """
+    Collects information regarding splits over multiple trees.
+    """
+
+    SUMMARY_STATS_FIELDNAMES = ('mean', 'median', 'sd', 'hpd95', 'quant_5_95', 'range')
+
+
+    def __init__(self,
+            taxon_namespace=None,
+            ignore_edge_lengths=False,
+            ignore_node_ages=True,
+            use_tree_weights=True,
+            ultrametricity_precision=constants.DEFAULT_ULTRAMETRICITY_PRECISION):
+
+        # Taxon Namespace
+        taxonmodel.TaxonNamespaceAssociated.__init__(self,
+                taxon_namespace=taxon_namespace)
+
+        # configuration
+        self.ignore_edge_lengths = ignore_edge_lengths
+        self.ignore_node_ages = ignore_node_ages
+        self.use_tree_weights = use_tree_weights
+        self.ultrametricity_precision = ultrametricity_precision
+
+        # storage
+        self.total_trees_counted = 0
+        self.sum_of_tree_weights = 0.0
+        self.tree_rooting_types_counted = set()
+        self.split_counts = collections.defaultdict(float)
+        self.split_edge_lengths = collections.defaultdict(list)
+        self.split_node_ages = collections.defaultdict(list)
+
+        # secondary/derived/generated/collected data
+        self._is_rooted = False
+        self._split_freqs = None
+        self._trees_counted_for_freqs = 0
+        self._split_edge_length_summaries = None
+        self._split_node_age_summaries = None
+        self._trees_counted_for_summaries = 0
+
+        # services
+        self.tree_decorator = None
+
+    ###########################################################################
+    ### Utility
+
+    def normalize_bitmask(self, bitmask):
+        """
+        "Normalizes" split, by ensuring that the least-significant bit is
+        always 1 (used on unrooted trees to establish split identity
+        independent of rotation).
+
+        Parameters
+        ----------
+        bitmask : integer
+            Split bitmask hash to be normalized.
+
+        Returns
+        -------
+        h : integer
+            Normalized split bitmask.
+        """
+        return treemodel.Bipartition.normalize_bitmask(
+                bitmask=bitmask,
+                fill_bitmask=self.taxon_namespace.all_taxa_bitmask(),
+                lowest_relevant_bit=1)
+
+    ###########################################################################
+    ### Configuration
+
+    def _is_rooted_deprecation_warning(self):
+        deprecate.dendropy_deprecation_warning(
+                message="Deprecated since DendroPy 4: 'SplitDistribution.is_rooted' and 'SplitDistribution.is_unrooted' are no longer valid attributes; rooting state tracking and management is now the responsibility of client code.",
+                stacklevel=4,
+                )
+    def _get_is_rooted(self):
+        self._is_rooted_deprecation_warning()
+        return self._is_rooted
+    def _set_is_rooted(self, val):
+        self._is_rooted_deprecation_warning()
+        self._is_rooted = val
+    is_rooted = property(_get_is_rooted, _set_is_rooted)
+    def _get_is_unrooted(self):
+        self._is_rooted_deprecation_warning()
+        return not self._is_rooted
+    def _set_is_unrooted(self, val):
+        self._is_rooted_deprecation_warning()
+        self._is_rooted = not val
+    is_unrooted = property(_get_is_unrooted, _set_is_unrooted)
+
+    ###########################################################################
+    ### Split Counting and Book-Keeping
+
+    def add_split_count(self, split, count=1):
+        self.split_counts[split] += count
+
+    def count_splits_on_tree(self,
+            tree,
+            is_bipartitions_updated=False,
+            default_edge_length_value=None):
+        """
+        Counts splits in this tree and add to totals. ``tree`` must be decorated
+        with splits, and no attempt is made to normalize taxa.
+
+        Parameters
+        ----------
+        tree : a |Tree| object.
+            The tree on which to count the splits.
+        is_bipartitions_updated : bool
+            If `False` [default], then the tree will have its splits encoded or
+            updated. Otherwise, if `True`, then the tree is assumed to have its
+            splits already encoded and updated.
+
+        Returns
+        --------
+        s : iterable of splits
+            A list of split bitmasks from ``tree``.
+        e :
+            A list of edge length values from ``tree``.
+        a :
+            A list of node age values from ``tree``.
+        """
+        assert tree.taxon_namespace is self.taxon_namespace
+        self.total_trees_counted += 1
+        if not self.ignore_node_ages:
+            tree.calc_node_ages(ultrametricity_precision=self.ultrametricity_precision)
+        if tree.weight is not None and self.use_tree_weights:
+            weight_to_use = float(tree.weight)
+        else:
+            weight_to_use = 1.0
+        self.sum_of_tree_weights += weight_to_use
+        if tree.is_rooted:
+            self.tree_rooting_types_counted.add(True)
+        else:
+            self.tree_rooting_types_counted.add(False)
+        if not is_bipartitions_updated:
+            tree.encode_bipartitions()
+        splits = []
+        edge_lengths = []
+        node_ages = []
+        for bipartition in tree.bipartition_encoding:
+            split = bipartition.split_bitmask
+
+            ## if edge is stored as an attribute, might be faster to:
+            # edge = bipartition.edge
+            edge = tree.bipartition_edge_map[bipartition]
+
+            splits.append(split)
+            self.split_counts[split] += weight_to_use
+            if not self.ignore_edge_lengths:
+                sel = self.split_edge_lengths.setdefault(split,[])
+                if edge.length is None:
+                    elen = default_edge_length_value
+                else:
+                    elen = edge.length
+                sel.append(elen)
+                edge_lengths.append(elen)
+            else:
+                sel = None
+            if not self.ignore_node_ages:
+                sna = self.split_node_ages.setdefault(split, [])
+                if edge.head_node is not None:
+                    nage = edge.head_node.age
+                else:
+                    nage = None
+                sna.append(nage)
+                node_ages.append(nage)
+            else:
+                sna = None
+        return splits, edge_lengths, node_ages
+
+    def splits_considered(self):
+        """
+        Returns 4 values:
+            total number of splits counted
+            total *weighted* number of unique splits counted
+            total number of non-trivial splits counted
+            total *weighted* number of unique non-trivial splits counted
+        """
+        if not self.split_counts:
+            return 0, 0, 0, 0
+        num_splits = 0
+        num_unique_splits = 0
+        num_nt_splits = 0
+        num_nt_unique_splits = 0
+        taxa_mask = self.taxon_namespace.all_taxa_bitmask()
+        for s in self.split_counts:
+            num_unique_splits += 1
+            num_splits += self.split_counts[s]
+            if not treemodel.Bipartition.is_trivial_bitmask(s, taxa_mask):
+                num_nt_unique_splits += 1
+                num_nt_splits += self.split_counts[s]
+        return num_splits, num_unique_splits, num_nt_splits, num_nt_unique_splits
+
+    def calc_freqs(self):
+        "Forces recalculation of frequencies."
+        self._split_freqs = {}
+        if self.total_trees_counted == 0:
+            for split in self.split_counts:
+                self._split_freqs[split] = 1.0
+        else:
+            normalization_weight = self.calc_normalization_weight()
+            for split in self.split_counts:
+                count = self.split_counts[split]
+                self._split_freqs[split] = float(self.split_counts[split]) / normalization_weight
+        self._trees_counted_for_freqs = self.total_trees_counted
+        self._split_edge_length_summaries = None
+        self._split_node_age_summaries = None
+        return self._split_freqs
+
+    def calc_normalization_weight(self):
+        if not self.sum_of_tree_weights:
+            return self.total_trees_counted
+        else:
+            return float(self.sum_of_tree_weights)
+
+    def update(self, split_dist):
+        self.total_trees_counted += split_dist.total_trees_counted
+        self.sum_of_tree_weights += split_dist.sum_of_tree_weights
+        self._split_edge_length_summaries = None
+        self._split_node_age_summaries = None
+        self._trees_counted_for_summaries = 0
+        self.tree_rooting_types_counted.update(split_dist.tree_rooting_types_counted)
+        for split in split_dist.split_counts:
+            self.split_counts[split] += split_dist.split_counts[split]
+            self.split_edge_lengths[split] += split_dist.split_edge_lengths[split]
+            self.split_node_ages[split] += split_dist.split_node_ages[split]
+
+    ###########################################################################
+    ### Basic Information Access
+
+    def __len__(self):
+        return len(self.split_counts)
+
+    def __iter__(self):
+        for s in self.split_counts:
+            yield s
+
+    def __getitem__(self, split_bitmask):
+        """
+        Returns freqency of split_bitmask.
+        """
+        return self._get_split_frequencies().get(split_bitmask, 0.0)
+
+    def _get_split_frequencies(self):
+        if self._split_freqs is None or self._trees_counted_for_freqs != self.total_trees_counted:
+            self.calc_freqs()
+        return self._split_freqs
+    split_frequencies = property(_get_split_frequencies)
+
+    def is_mixed_rootings_counted(self):
+        return ( (True in self.tree_rooting_types_counted)
+                and (False in self.tree_rooting_types_counted or None in self.tree_rooting_types_counted) )
+
+    def is_all_counted_trees_rooted(self):
+        return (True in self.tree_rooting_types_counted) and (len(self.tree_rooting_types_counted) == 1)
+
+    def is_all_counted_trees_strictly_unrooted(self):
+        return (False in self.tree_rooting_types_counted) and (len(self.tree_rooting_types_counted) == 1)
+
+    def is_all_counted_trees_treated_as_unrooted(self):
+        return True not in self.tree_rooting_types_counted
+
+    ###########################################################################
+    ### Summarization
+
+    def split_support_iter(self,
+            tree,
+            is_bipartitions_updated=False,
+            include_external_splits=False,
+            traversal_strategy="preorder",
+            node_support_attr_name=None,
+            edge_support_attr_name=None,
+            ):
+        """
+        Returns iterator over support values for the splits of a given tree,
+        where the support value is given by the proportional frequency of the
+        split in the current split distribution.
+
+        Parameters
+        ----------
+        tree : |Tree|
+            The |Tree| which will be scored.
+        is_bipartitions_updated : bool
+            If `False` [default], then the tree will have its splits encoded or
+            updated. Otherwise, if `True`, then the tree is assumed to have its
+            splits already encoded and updated.
+        include_external_splits : bool
+            If `True`, then non-internal split posteriors will be included.
+            If `False`, then these are skipped. This should only make a
+            difference when dealing with splits collected from trees of
+            different leaf sets.
+        traversal_strategy : str
+            One of: "preorder" or "postorder". Specfies order in which splits
+            are visited.
+
+        Returns
+        -------
+        s : list of floats
+            List of values for splits in the tree corresponding to the
+            proportional frequency that the split is found in the current
+            distribution.
+        """
+        if traversal_strategy == "preorder":
+            if include_external_splits:
+                iter_fn = tree.preorder_node_iter
+            else:
+                iter_fn = tree.preorder_internal_node_iter
+        elif traversal_strategy == "postorder":
+            if include_external_splits:
+                iter_fn = tree.postorder_node_iter
+            else:
+                iter_fn = tree.postorder_internal_node_iter
+        else:
+            raise ValueError("Traversal strategy not supported: '{}'".format(traversal_strategy))
+        if not is_bipartitions_updated:
+            tree.encode_bipartitions()
+        split_frequencies = self._get_split_frequencies()
+        for nd in iter_fn():
+            split = nd.edge.split_bitmask
+            support = split_frequencies.get(split, 0.0)
+            yield support
+
+    def calc_split_edge_length_summaries(self):
+        self._split_edge_length_summaries = {}
+        for split, elens in self.split_edge_lengths.items():
+            if not elens:
+                continue
+            try:
+                self._split_edge_length_summaries[split] = statistics.summarize(elens)
+            except ValueError:
+                pass
+        return self._split_edge_length_summaries
+
+    def calc_split_node_age_summaries(self):
+        self._split_node_age_summaries = {}
+        for split, ages in self.split_node_ages.items():
+            if not ages:
+                continue
+            try:
+                self._split_node_age_summaries[split] = statistics.summarize(ages)
+            except ValueError:
+                pass
+        return self._split_node_age_summaries
+
+    def _get_split_edge_length_summaries(self):
+        if self._split_edge_length_summaries is None \
+                or self._trees_counted_for_summaries != self.total_trees_counted:
+            self.calc_split_edge_length_summaries()
+        return self._split_edge_length_summaries
+    split_edge_length_summaries = property(_get_split_edge_length_summaries)
+
+    def _get_split_node_age_summaries(self):
+        if self._split_node_age_summaries is None \
+                or self._trees_counted_for_summaries != self.total_trees_counted:
+            self.calc_split_node_age_summaries()
+        return self._split_node_age_summaries
+    split_node_age_summaries = property(_get_split_node_age_summaries)
+
+    def log_product_of_split_support_on_tree(self,
+            tree,
+            is_bipartitions_updated=False,
+            include_external_splits=False,
+            ):
+        """
+        Calculates the (log) product of the support of the splits of the
+        tree, where the support is given by the proportional frequency of the
+        split in the current split distribution.
+
+        The tree that has the highest product of split support out of a sample
+        of trees corresponds to the "maximum credibility tree" for that sample.
+        This can also be referred to as the "maximum clade credibility tree",
+        though this latter term is sometimes use for the tree that has the
+        highest *sum* of split support (see
+        :meth:`SplitDistribution.sum_of_split_support_on_tree()`).
+
+        Parameters
+        ----------
+        tree : |Tree|
+            The tree for which the score should be calculated.
+        is_bipartitions_updated : bool
+            If `True`, then the splits are assumed to have already been encoded
+            and will not be updated on the trees.
+        include_external_splits : bool
+            If `True`, then non-internal split posteriors will be included in
+            the score. Defaults to `False`: these are skipped. This should only
+            make a difference when dealing with splits collected from trees of
+            different leaf sets.
+
+        Returns
+        -------
+        s : numeric
+            The log product of the support of the splits of the tree.
+        """
+        log_product_of_split_support = 0.0
+        for split_support in self.split_support_iter(
+                tree=tree,
+                is_bipartitions_updated=is_bipartitions_updated,
+                include_external_splits=include_external_splits,
+                traversal_strategy="preorder",
+                ):
+            if split_support:
+                log_product_of_split_support += math.log(split_support)
+        return log_product_of_split_support
+
+    def sum_of_split_support_on_tree(self,
+            tree,
+            is_bipartitions_updated=False,
+            include_external_splits=False,
+            ):
+        """
+        Calculates the sum of the support of the splits of the tree, where the
+        support is given by the proportional frequency of the split in the
+        current distribtion.
+
+        Parameters
+        ----------
+        tree : |Tree|
+            The tree for which the score should be calculated.
+        is_bipartitions_updated : bool
+            If `True`, then the splits are assumed to have already been encoded
+            and will not be updated on the trees.
+        include_external_splits : bool
+            If `True`, then non-internal split posteriors will be included in
+            the score. Defaults to `False`: these are skipped. This should only
+            make a difference when dealing with splits collected from trees of
+            different leaf sets.
+
+        Returns
+        -------
+        s : numeric
+            The sum of the support of the splits of the tree.
+        """
+        sum_of_split_support = 0.0
+        for split_support in self.split_support_iter(
+                tree=tree,
+                is_bipartitions_updated=is_bipartitions_updated,
+                include_external_splits=include_external_splits,
+                traversal_strategy="preorder",
+                ):
+            sum_of_split_support += split_support
+        return sum_of_split_support
+
+    def consensus_tree(self,
+            min_freq=constants.GREATER_THAN_HALF,
+            is_rooted=None,
+            summarize_splits=True,
+            **split_summarization_kwargs
+            ):
+        """
+        Returns a consensus tree from splits in ``self``.
+
+        Parameters
+        ----------
+
+        min_freq : real
+            The minimum frequency of a split in this distribution for it to be
+            added to the tree.
+
+        is_rooted : bool
+            Should tree be rooted or not? If *all* trees counted for splits are
+            explicitly rooted or unrooted, then this will default to `True` or
+            `False`, respectively. Otherwise it defaults to `None`.
+
+        \*\*split_summarization_kwargs : keyword arguments
+            These will be passed directly to the underlying
+            `SplitDistributionSummarizer` object. See
+            :meth:`SplitDistributionSummarizer.configure` for options.
+
+        Returns
+        -------
+        t : consensus tree
+
+        """
+        if is_rooted is None:
+            if self.is_all_counted_trees_rooted():
+                is_rooted = True
+            elif self.is_all_counted_trees_strictly_unrooted:
+                is_rooted = False
+        split_frequencies = self._get_split_frequencies()
+        to_try_to_add = []
+        _almost_one = lambda x: abs(x - 1.0) <= 0.0000001
+        for s in split_frequencies:
+            freq = split_frequencies[s]
+            if (min_freq is None) or (freq >= min_freq) or (_almost_one(min_freq) and _almost_one(freq)):
+                to_try_to_add.append((freq, s))
+        to_try_to_add.sort(reverse=True)
+        splits_for_tree = [i[1] for i in to_try_to_add]
+        con_tree = treemodel.Tree.from_split_bitmasks(
+                split_bitmasks=splits_for_tree,
+                taxon_namespace=self.taxon_namespace,
+                is_rooted=is_rooted)
+        if summarize_splits:
+            self.summarize_splits_on_tree(
+                tree=con_tree,
+                is_bipartitions_updated=False,
+                **split_summarization_kwargs
+                )
+        return con_tree
+
+    def summarize_splits_on_tree(self,
+            tree,
+            is_bipartitions_updated=False,
+            **split_summarization_kwargs
+            ):
+        """
+        Summarizes support of splits/edges/node on tree.
+
+        Parameters
+        ----------
+
+        tree: |Tree| instance
+            Tree to be decorated with support values.
+
+        is_bipartitions_updated: bool
+            If `True`, then bipartitions will not be recalculated.
+
+        \*\*split_summarization_kwargs : keyword arguments
+            These will be passed directly to the underlying
+            `SplitDistributionSummarizer` object. See
+            :meth:`SplitDistributionSummarizer.configure` for options.
+
+        """
+        if self.taxon_namespace is not tree.taxon_namespace:
+            raise error.TaxonNamespaceIdentityError(self, tree)
+        if self.tree_decorator is None:
+            self.tree_decorator = SplitDistributionSummarizer()
+        self.tree_decorator.configure(**split_summarization_kwargs)
+        self.tree_decorator.summarize_splits_on_tree(
+                split_distribution=self,
+                tree=tree,
+                is_bipartitions_updated=is_bipartitions_updated)
+        return tree
+
+    ###########################################################################
+    ### legacy
+
+    def _get_taxon_set(self):
+        from dendropy import taxonmodel
+        taxon_model.taxon_set_deprecation_warning()
+        return self.taxon_namespace
+
+    def _set_taxon_set(self, v):
+        from dendropy import taxonmodel
+        taxon_model.taxon_set_deprecation_warning()
+        self.taxon_namespace = v
+
+    def _del_taxon_set(self):
+        from dendropy import taxonmodel
+        taxon_model.taxon_set_deprecation_warning()
+
+    taxon_set = property(_get_taxon_set, _set_taxon_set, _del_taxon_set)
+
+###############################################################################
+### SplitDistributionSummarizer
+
+class SplitDistributionSummarizer(object):
+
+    def __init__(self, **kwargs):
+        """
+        See :meth:`SplitDistributionSummarizer.configure` for configuration
+        options.
+        """
+        self.configure(**kwargs)
+
+    def configure(self, **kwargs):
+        """
+        Configure rendition/mark-up.
+
+        Parameters
+        ----------
+
+        set_edge_lengths : string
+            For each edge, set the length based on:
+
+                - "support": use support values split corresponding to edge
+                - "mean-length": mean of edge lengths for split
+                - "median-length": median of edge lengths for split
+                - "mean-age": such that split age is equal to mean of ages
+                - "median-age": such that split age is equal to mean of ages
+                - `None`: do not set edge lengths
+
+        add_support_as_node_attribute: bool
+            Adds each node's support value as an attribute of the node,
+            "``support``".
+
+        add_support_as_node_annotation: bool
+            Adds support as a metadata annotation, "``support``". If
+            ``add_support_as_node_attribute`` is `True`, then the value will be
+            dynamically-bound to the value of the node's "``support``" attribute.
+
+        set_support_as_node_label : bool
+            Sets the ``label`` attribute of each node to the support value.
+
+        add_node_age_summaries_as_node_attributes: bool
+            Summarizes the distribution of the ages of each node in the
+            following attributes:
+
+                - ``age_mean``
+                - ``age_median``
+                - ``age_sd``
+                - ``age_hpd95``
+                - ``age_range``
+
+        add_node_age_summaries_as_node_annotations: bool
+            Summarizes the distribution of the ages of each node in the
+            following metadata annotations:
+
+                - ``age_mean``
+                - ``age_median``
+                - ``age_sd``
+                - ``age_hpd95``
+                - ``age_range``
+
+            If ``add_node_age_summaries_as_node_attributes`` is `True`, then the
+            values will be dynamically-bound to the corresponding node
+            attributes.
+
+        add_edge_length_summaries_as_edge_attributes: bool
+            Summarizes the distribution of the lengths of each edge in the
+            following attribtutes:
+
+                - ``length_mean``
+                - ``length_median``
+                - ``length_sd``
+                - ``length_hpd95``
+                - ``length_range``
+
+        add_edge_length_summaries_as_edge_annotations: bool
+            Summarizes the distribution of the lengths of each edge in the
+            following metadata annotations:
+
+                - ``length_mean``
+                - ``length_median``
+                - ``length_sd``
+                - ``length_hpd95``
+                - ``length_range``
+
+            If ``add_edge_length_summaries_as_edge_attributes`` is `True`, then the
+            values will be dynamically-bound to the corresponding edge
+            attributes.
+
+        support_label_decimals: int
+            Number of decimal places to express when rendering the support
+            value as a string for the node label.
+
+        support_as_percentages: bool
+            Whether or not to express the support value as percentages (default
+            is probability or proportion).
+
+        minimum_edge_length : numeric
+            All edge lengths calculated to have a value less than this will be
+            set to this.
+
+        error_on_negative_edge_lengths : bool
+            If `True`, an inferred edge length that is less than 0 will result
+            in a ValueError.
+
+        no_data_value : str or float
+            Value to substitute in for node ages, edge lengths and associated
+            summary statistics if a split is not found. Defaults to 0.0.
+
+        """
+        self.set_edge_lengths = kwargs.pop("set_edge_lengths", None)
+        self.add_support_as_node_attribute = kwargs.pop("add_support_as_node_attribute", True)
+        self.add_support_as_node_annotation = kwargs.pop("add_support_as_node_annotation", True)
+        self.set_support_as_node_label = kwargs.pop("set_support_as_node_label", None)
+        self.add_node_age_summaries_as_node_attributes = kwargs.pop("add_node_age_summaries_as_node_attributes", True)
+        self.add_node_age_summaries_as_node_annotations = kwargs.pop("add_node_age_summaries_as_node_annotations", True)
+        self.add_edge_length_summaries_as_edge_attributes = kwargs.pop("add_edge_length_summaries_as_edge_attributes", True)
+        self.add_edge_length_summaries_as_edge_annotations = kwargs.pop("add_edge_length_summaries_as_edge_annotations", True)
+        self.support_label_decimals = kwargs.pop("support_label_decimals", 4)
+        self.support_as_percentages = kwargs.pop("support_as_percentages", False)
+        self.support_label_compose_fn = kwargs.pop("support_label_compose_fn", None)
+        self.primary_fieldnames = ["support",]
+        self.summary_stats_fieldnames = SplitDistribution.SUMMARY_STATS_FIELDNAMES
+        self.node_age_summaries_fieldnames = list("age_{}".format(f) for f in self.summary_stats_fieldnames)
+        self.edge_length_summaries_fieldnames = list("length_{}".format(f) for f in self.summary_stats_fieldnames)
+        self.fieldnames = self.primary_fieldnames + self.node_age_summaries_fieldnames + self.edge_length_summaries_fieldnames
+        for fieldname in self.fieldnames:
+            setattr(self, "{}_attr_name".format(fieldname), kwargs.pop("{}_attr_name".format(fieldname), fieldname))
+            setattr(self, "{}_annotation_name".format(fieldname), kwargs.pop("{}_annotation_name".format(fieldname), fieldname))
+            setattr(self, "is_{}_annotation_dynamic".format(fieldname), kwargs.pop("is_{}_annotation_dynamic".format(fieldname), True))
+        self.minimum_edge_length = kwargs.pop("minimum_edge_length", None)
+        self.error_on_negative_edge_lengths = kwargs.pop("error_on_negative_edge_lengths", False)
+        self.no_data_value = kwargs.pop("no_data_value", 0.0)
+        if kwargs:
+            TypeError("Unrecognized or unsupported arguments: {}".format(kwargs))
+
+    def _decorate(self,
+            target,
+            fieldname,
+            value,
+            set_attribute,
+            set_annotation,
+            ):
+        attr_name = getattr(self, "{}_attr_name".format(fieldname))
+        annotation_name = getattr(self, "{}_annotation_name".format(fieldname))
+        if set_attribute:
+            setattr(target, attr_name, value)
+            if set_annotation:
+                target.annotations.drop(name=annotation_name)
+                if getattr(self, "is_{}_annotation_dynamic".format(fieldname)):
+                    target.annotations.add_bound_attribute(
+                        attr_name=attr_name,
+                        annotation_name=annotation_name,
+                        )
+                else:
+                    target.annotations.add_new(
+                            name=annotation_name,
+                            value=value,
+                            )
+        elif set_annotation:
+            target.annotations.drop(name=annotation_name)
+            target.annotations.add_new(
+                    name=annotation_name,
+                    value=value,
+                    )
+
+    def summarize_splits_on_tree(self,
+            split_distribution,
+            tree,
+            is_bipartitions_updated=False):
+        if split_distribution.taxon_namespace is not tree.taxon_namespace:
+            raise error.TaxonNamespaceIdentityError(split_distribution, tree)
+        if not is_bipartitions_updated:
+            tree.encode_bipartitions()
+        if self.support_label_compose_fn is not None:
+            support_label_fn = lambda freq: self.support_label_compose_fn(freq)
+        else:
+            support_label_fn = lambda freq: "{:.{places}f}".format(freq, places=self.support_label_decimals)
+        node_age_summaries = split_distribution.split_node_age_summaries
+        edge_length_summaries = split_distribution.split_edge_length_summaries
+        split_freqs = split_distribution.split_frequencies
+        assert len(self.node_age_summaries_fieldnames) == len(self.summary_stats_fieldnames)
+        for node in tree:
+            split_bitmask = node.edge.bipartition.split_bitmask
+            split_support = split_freqs.get(split_bitmask, 0.0)
+            if self.support_as_percentages:
+                split_support = split_support * 100
+            self._decorate(
+                target=node,
+                fieldname="support",
+                value=split_support,
+                set_attribute=self.add_support_as_node_attribute,
+                set_annotation=self.add_support_as_node_annotation,
+                )
+            if self.set_support_as_node_label:
+                node.label = support_label_fn(split_support)
+            if (self.add_node_age_summaries_as_node_attributes or self.add_node_age_summaries_as_node_annotations) and node_age_summaries:
+                for fieldname, stats_fieldname in zip(self.node_age_summaries_fieldnames, self.summary_stats_fieldnames):
+                    if not node_age_summaries or split_bitmask not in node_age_summaries:
+                        value = self.no_data_value
+                    else:
+                        value = node_age_summaries[split_bitmask].get(stats_fieldname, 0.0)
+                    self._decorate(
+                        target=node,
+                        fieldname=fieldname,
+                        value=value,
+                        set_attribute=self.add_node_age_summaries_as_node_attributes,
+                        set_annotation=self.add_node_age_summaries_as_node_annotations,
+                        )
+            if (self.add_edge_length_summaries_as_edge_attributes or self.add_edge_length_summaries_as_edge_annotations) and edge_length_summaries:
+                for fieldname, stats_fieldname in zip(self.edge_length_summaries_fieldnames, self.summary_stats_fieldnames):
+                    if not edge_length_summaries or split_bitmask not in edge_length_summaries:
+                        value = self.no_data_value
+                    else:
+                        value = edge_length_summaries[split_bitmask].get(stats_fieldname, 0.0)
+                    self._decorate(
+                        target=node.edge,
+                        fieldname=fieldname,
+                        value=value,
+                        set_attribute=self.add_edge_length_summaries_as_edge_attributes,
+                        set_annotation=self.add_edge_length_summaries_as_edge_annotations,
+                        )
+            if self.set_edge_lengths is None:
+                pass
+            elif self.set_edge_lengths == "keep":
+                pass
+            elif self.set_edge_lengths == "support":
+                node.edge.length = split_support
+            elif self.set_edge_lengths == "clear":
+                edge.length = None
+            elif self.set_edge_lengths in ("mean-age", "median-age"):
+                if not node_age_summaries:
+                    raise ValueError("Node ages not available")
+                if self.set_edge_lengths == "mean-age":
+                    try:
+                        node.age = node_age_summaries[split_bitmask]["mean"]
+                    except KeyError:
+                        node.age = self.no_data_value
+                elif self.set_edge_lengths == "median-age":
+                    try:
+                        node.age = node_age_summaries[split_bitmask]["median"]
+                    except KeyError:
+                        node.age = self.no_data_value
+                else:
+                    raise ValueError(self.set_edge_lengths)
+            elif self.set_edge_lengths in ("mean-length", "median-length"):
+                if not edge_length_summaries:
+                    raise ValueError("Edge lengths not available")
+                if self.set_edge_lengths == "mean-length":
+                    try:
+                        node.edge.length = edge_length_summaries[split_bitmask]["mean"]
+                    except KeyError:
+                        node.edge.length = self.no_data_value
+                elif self.set_edge_lengths == "median-length":
+                    try:
+                        node.edge.length = edge_length_summaries[split_bitmask]["median"]
+                    except KeyError:
+                        node.edge.length = self.no_data_value
+                else:
+                    raise ValueError(self.set_edge_lengths)
+                if self.minimum_edge_length is not None and edge.length < self.minimum_edge_length:
+                    edge.length = self.minimum_edge_length
+            else:
+                raise ValueError(self.set_edge_lengths)
+        if self.set_edge_lengths in ("mean-age", "median-age"):
+            tree.set_edge_lengths_from_node_ages(
+                    minimum_edge_length=self.minimum_edge_length,
+                    error_on_negative_edge_lengths=self.error_on_negative_edge_lengths)
+        elif self.set_edge_lengths not in ("keep", "clear", None) and self.minimum_edge_length is not None:
+            for node in tree:
+                if node.edge.length is None:
+                    node.edge.length = self.minimum_edge_length
+                elif node.edge.length < self.minimum_edge_length:
+                    node.edge.length = self.minimum_edge_length
+        return tree
+
+###############################################################################
+### TreeArray
+
+class TreeArray(
+        taxonmodel.TaxonNamespaceAssociated,
+        basemodel.MultiReadable,
+        ):
+    """
+    High-performance collection of tree structures.
+
+    Storage of minimal tree structural information as represented by toplogy
+    and edge lengths, minimizing memory and processing time.
+    This class stores trees as collections of splits and edge lengths. All
+    other information, such as labels, metadata annotations, etc. will be
+    discarded. A full |Tree| instance can be reconstructed as needed
+    from the structural information stored by this class, at the cost of
+    computation time.
+    """
+
+    class IncompatibleTreeArrayUpdate(Exception):
+        pass
+    class IncompatibleRootingTreeArrayUpdate(IncompatibleTreeArrayUpdate):
+        pass
+    class IncompatibleEdgeLengthsTreeArrayUpdate(IncompatibleTreeArrayUpdate):
+        pass
+    class IncompatibleNodeAgesTreeArrayUpdate(IncompatibleTreeArrayUpdate):
+        pass
+    class IncompatibleTreeWeightsTreeArrayUpdate(IncompatibleTreeArrayUpdate):
+        pass
+
+    ##############################################################################
+    ## Factory Function
+
+    @classmethod
+    def from_tree_list(cls,
+            trees,
+            is_rooted_trees=None,
+            ignore_edge_lengths=False,
+            ignore_node_ages=True,
+            use_tree_weights=True,
+            ultrametricity_precision=constants.DEFAULT_ULTRAMETRICITY_PRECISION,
+            is_bipartitions_updated=False,
+            ):
+        taxon_namespace = trees.taxon_namespace
+        ta = cls(
+            taxon_namespace=taxon_namespace,
+            is_rooted_trees=is_rooted_trees,
+            ignore_edge_lengths=ignore_edge_lengths,
+            ignore_node_ages=ignore_node_ages,
+            use_tree_weights=use_tree_weights,
+            ultrametricity_precision=constants.DEFAULT_ULTRAMETRICITY_PRECISION,
+            )
+        ta.add_trees(
+                trees=trees,
+                is_bipartitions_updated=is_bipartitions_updated)
+        return ta
+
+    ##############################################################################
+    ## Life-Cycle
+
+    def __init__(self,
+            taxon_namespace=None,
+            is_rooted_trees=None,
+            ignore_edge_lengths=False,
+            ignore_node_ages=True,
+            use_tree_weights=True,
+            ultrametricity_precision=constants.DEFAULT_ULTRAMETRICITY_PRECISION,
+            ):
+        """
+        Parameters
+        ----------
+        taxon_namespace : |TaxonNamespace|
+            The operational taxonomic unit concept namespace to manage taxon
+            references.
+        is_rooted_trees : bool
+            If not set, then it will be set based on the rooting state of the
+            first tree added. If `True`, then trying to add an unrooted tree
+            will result in an error. If `False`, then trying to add a rooted
+            tree will result in an error.
+        ignore_edge_lengths : bool
+            If `True`, then edge lengths of splits will not be stored. If
+            `False`, then edge lengths will be stored.
+        ignore_node_ages : bool
+            If `True`, then node ages of splits will not be stored. If
+            `False`, then node ages will be stored.
+        use_tree_weights : bool
+            If `False`, then tree weights will not be used to weight splits.
+        """
+        taxonmodel.TaxonNamespaceAssociated.__init__(self,
+                taxon_namespace=taxon_namespace)
+
+        # Configuration
+        self._is_rooted_trees = is_rooted_trees
+        self.ignore_edge_lengths = ignore_edge_lengths
+        self.ignore_node_ages = ignore_node_ages
+        self.use_tree_weights = use_tree_weights
+        self.default_edge_length_value = 0 # edge.length of `None` gets this value
+        self.tree_type = treemodel.Tree
+
+        # Storage
+        self._tree_split_bitmasks = []
+        self._tree_edge_lengths = []
+        self._tree_leafset_bitmasks = []
+        self._tree_weights = []
+        self._split_distribution = SplitDistribution(
+                taxon_namespace=self.taxon_namespace,
+                ignore_edge_lengths=self.ignore_edge_lengths,
+                ignore_node_ages=self.ignore_node_ages,
+                ultrametricity_precision=ultrametricity_precision,
+                )
+
+    ##############################################################################
+    ## Book-Keeping
+
+    def _get_is_rooted_trees(self):
+        return self._is_rooted_trees
+    is_rooted_trees = property(_get_is_rooted_trees)
+
+    def _get_split_distribution(self):
+        return self._split_distribution
+    split_distribution = property(_get_split_distribution)
+
+    def validate_rooting(self, rooting_of_other):
+        if self._is_rooted_trees is None:
+            self._is_rooted_trees = rooting_of_other
+        elif self._is_rooted_trees != rooting_of_other:
+            if self._is_rooted_trees:
+                ta = "rooted"
+                t = "unrooted"
+            else:
+                ta = "unrooted"
+                t = "rooted"
+            raise error.MixedRootingError("Cannot add {tree_rooting} tree to TreeArray with {tree_array_rooting} trees".format(
+                tree_rooting=t,
+                tree_array_rooting=ta))
+
+    ##############################################################################
+    ## Updating from Another TreeArray
+
+    def update(self, other):
+        if len(self) > 0:
+            # self.validate_rooting(other._is_rooted_trees)
+            if self._is_rooted_trees is not other._is_rooted_trees:
+                raise TreeArray.IncompatibleRootingTreeArrayUpdate("Updating from incompatible TreeArray: 'is_rooted_trees' should be '{}', but is instead '{}'".format(other._is_rooted_trees, self._is_rooted_trees, ))
+            if self.ignore_edge_lengths is not other.ignore_edge_lengths:
+                raise TreeArray.IncompatibleEdgeLengthsTreeArrayUpdate("Updating from incompatible TreeArray: 'ignore_edge_lengths' is not: {} ".format(other.ignore_edge_lengths, self.ignore_edge_lengths, ))
+            if self.ignore_node_ages is not other.ignore_node_ages:
+                raise TreeArray.IncompatibleNodeAgesTreeArrayUpdate("Updating from incompatible TreeArray: 'ignore_node_ages' should be '{}', but is instead '{}'".format(other.ignore_node_ages, self.ignore_node_ages))
+            if self.use_tree_weights is not other.use_tree_weights:
+                raise TreeArray.IncompatibleTreeWeightsTreeArrayUpdate("Updating from incompatible TreeArray: 'use_tree_weights' should be '{}', but is instead '{}'".format(other.use_tree_weights, self.use_tree_weights))
+        else:
+            self._is_rooted_trees = other._is_rooted_trees
+            self.ignore_edge_lengths = other.ignore_edge_lengths
+            self.ignore_node_ages = other.ignore_node_ages
+            self.use_tree_weights = other.use_tree_weights
+        self._tree_split_bitmasks.extend(other._tree_split_bitmasks)
+        self._tree_edge_lengths.extend(other._tree_edge_lengths)
+        self._tree_leafset_bitmasks.extend(other._tree_leafset_bitmasks)
+        self._tree_weights.extend(other._tree_weights)
+        self._split_distribution.update(other._split_distribution)
+
+    ##############################################################################
+    ## Fundamental Tree Accession
+
+    def add_tree(self,
+            tree,
+            is_bipartitions_updated=False,
+            index=None):
+        """
+        Adds the structure represented by a |Tree| instance to the
+        collection.
+
+        Parameters
+        ----------
+        tree : |Tree|
+            A |Tree| instance. This must have the same rooting state as
+            all the other trees accessioned into this collection as well as
+            that of ``self.is_rooted_trees``.
+        is_bipartitions_updated : bool
+            If `False` [default], then the tree will have its splits encoded or
+            updated. Otherwise, if `True`, then the tree is assumed to have its
+            splits already encoded and updated.
+        index : integer
+            Insert before index.
+
+        Returns
+        -------
+        index : int
+            The index of the accession.
+        s : iterable of splits
+            A list of split bitmasks from ``tree``.
+        e :
+            A list of edge length values from ``tree``.
+        """
+        if self.taxon_namespace is not tree.taxon_namespace:
+            raise error.TaxonNamespaceIdentityError(self, tree)
+        self.validate_rooting(tree.is_rooted)
+        splits, edge_lengths, node_ages = self._split_distribution.count_splits_on_tree(
+                tree=tree,
+                is_bipartitions_updated=is_bipartitions_updated,
+                default_edge_length_value=self.default_edge_length_value)
+
+        # pre-process splits
+        splits = tuple(splits)
+
+        # pre-process edge lengths
+        if self.ignore_edge_lengths:
+            # edge_lengths = tuple( [None] * len(splits) )
+            edge_lengths = tuple( None for x in range(len(splits)) )
+        else:
+            assert len(splits) == len(edge_lengths), "Unequal vectors:\n    Splits: {}\n    Edges: {}\n".format(splits, edge_lengths)
+            edge_lengths = tuple(edge_lengths)
+
+        # pre-process weights
+        if tree.weight is not None and self.use_tree_weights:
+            weight_to_use = float(tree.weight)
+        else:
+            weight_to_use = 1.0
+
+        # accession info
+        if index is None:
+            index = len(self._tree_split_bitmasks)
+            self._tree_split_bitmasks.append(splits)
+            self._tree_leafset_bitmasks.append(tree.seed_node.edge.bipartition.leafset_bitmask)
+            self._tree_edge_lengths.append(edge_lengths)
+            self._tree_weights.append(weight_to_use)
+        else:
+            self._tree_split_bitmasks.insert(index, splits)
+            self._tree_leafset_bitmasks.insert(index,
+                    tree.seed_node.edge.bipartition.leafset_bitmask)
+            self._tree_edge_lengths.insert(index, edge_lengths)
+            self._tree_weights.insert(index, weight_to_use)
+        return index, splits, edge_lengths, weight_to_use
+
+
+    def add_trees(self, trees, is_bipartitions_updated=False):
+        """
+        Adds multiple structures represneted by an iterator over or iterable of
+        |Tree| instances to the collection.
+
+        Parameters
+        ----------
+        trees : iterator over or iterable of |Tree| instances
+            An iterator over or iterable of |Tree| instances. Thess must
+            have the same rooting state as all the other trees accessioned into
+            this collection as well as that of ``self.is_rooted_trees``.
+        is_bipartitions_updated : bool
+            If `False` [default], then the tree will have its splits encoded or
+            updated. Otherwise, if `True`, then the tree is assumed to have its
+            splits already encoded and updated.
+
+        """
+        for tree in trees:
+            self.add_tree(tree,
+                    is_bipartitions_updated=is_bipartitions_updated)
+
+    ##############################################################################
+    ## I/O
+
+    def read_from_files(self,
+            files,
+            schema,
+            **kwargs):
+        """
+        Adds multiple structures from one or more external file sources to the
+        collection.
+
+        Parameters
+        ----------
+        files : iterable of strings and/or file objects
+            A list or some other iterable of file paths or file-like objects
+            (string elements will be assumed to be paths to files, while all
+            other types of elements will be assumed to be file-like
+            objects opened for reading).
+        schema : string
+            The data format of the source. E.g., "nexus", "newick", "nexml".
+        \*\*kwargs : keyword arguments
+            These will be passed directly to the underlying schema-specific
+            reader implementation.
+        """
+        if "taxon_namespace" in kwargs:
+            if kwargs["taxon_namespace"] is not self.taxon_namespace:
+                raise ValueError("TaxonNamespace object passed as keyword argument is not the same as self's TaxonNamespace reference")
+            kwargs.pop("taxon_namespace")
+        target_tree_offset = kwargs.pop("tree_offset", 0)
+        tree_yielder = self.tree_type.yield_from_files(
+                files=files,
+                schema=schema,
+                taxon_namespace=self.taxon_namespace,
+                **kwargs)
+        current_source_index = None
+        current_tree_offset = None
+        for tree_idx, tree in enumerate(tree_yielder):
+            current_yielder_index = tree_yielder.current_file_index
+            if current_source_index != current_yielder_index:
+                current_source_index = current_yielder_index
+                current_tree_offset = 0
+            if current_tree_offset >= target_tree_offset:
+                self.add_tree(tree=tree, is_bipartitions_updated=False)
+            current_tree_offset += 1
+
+    def _parse_and_add_from_stream(self,
+            stream,
+            schema,
+            **kwargs):
+        cur_size = len(self._tree_split_bitmasks)
+        self.read_from_files(files=[stream], schema=schema, **kwargs)
+        new_size = len(self._tree_split_bitmasks)
+        return new_size - cur_size
+
+    def read(self, **kwargs):
+        """
+        Add |Tree| objects to existing |TreeList| from data source providing
+        one or more collections of trees.
+
+        **Mandatory Source-Specification Keyword Argument (Exactly One Required):**
+
+            - **file** (*file*) -- File or file-like object of data opened for reading.
+            - **path** (*str*) -- Path to file of data.
+            - **url** (*str*) -- URL of data.
+            - **data** (*str*) -- Data given directly.
+
+        **Mandatory Schema-Specification Keyword Argument:**
+
+            - **schema** (*str*) -- Identifier of format of data given by the
+              "``file``", "``path``", "``data``", or "``url``" argument
+              specified above: ":doc:`newick </schemas/newick>`", ":doc:`nexus
+              </schemas/nexus>`", or ":doc:`nexml </schemas/nexml>`". See
+              "|Schemas|" for more details.
+
+        **Optional General Keyword Arguments:**
+
+            - **collection_offset** (*int*) -- 0-based index of tree block or
+              collection in source to be parsed. If not specified then the
+              first collection (offset = 0) is assumed.
+            - **tree_offset** (*int*) -- 0-based index of first tree within the
+              collection specified by ``collection_offset`` to be parsed (i.e.,
+              skipping the first ``tree_offset`` trees). If not
+              specified, then the first tree (offset = 0) is assumed (i.e., no
+              trees within the specified collection will be skipped). Use this
+              to specify, e.g. a burn-in.
+            - **ignore_unrecognized_keyword_arguments** (*bool*) -- If `True`,
+              then unsupported or unrecognized keyword arguments will not
+              result in an error. Default is `False`: unsupported keyword
+              arguments will result in an error.
+
+        **Optional Schema-Specific Keyword Arguments:**
+
+            These provide control over how the data is interpreted and
+            processed, and supported argument names and values depend on
+            the schema as specified by the value passed as the "``schema``"
+            argument. See "|Schemas|" for more details.
+
+        **Examples:**
+
+        ::
+
+            tree_array = dendropy.TreeArray()
+            tree_array.read(
+                    file=open('treefile.tre', 'rU'),
+                    schema="newick",
+                    tree_offset=100)
+            tree_array.read(
+                    path='sometrees.nexus',
+                    schema="nexus",
+                    collection_offset=2,
+                    tree_offset=100)
+            tree_array.read(
+                    data="((A,B),(C,D));((A,C),(B,D));",
+                    schema="newick")
+            tree_array.read(
+                    url="http://api.opentreeoflife.org/v2/study/pg_1144/tree/tree2324.nex",
+                    schema="nexus")
+
+        """
+        return basemodel.MultiReadable._read_from(self, **kwargs)
+
+
+    # def read_from_stream(self, fileobj, schema, **kwargs):
+    #     """
+    #     Reads trees from a file. See :meth:|TreeList|.read_from_stream()`.
+    #     """
+    #     return self.read_from_files(
+    #             files=[fileobj],
+    #             schema=schema,
+    #             **kwargs)
+
+    # def read_from_path(self, filepath, schema, **kwargs):
+    #     """
+    #     Reads trees from a path. See :meth:|TreeList|.read_from_path()`.
+    #     """
+    #     return self.read_from_files(
+    #             files=[filepath],
+    #             schema=schema,
+    #             **kwargs)
+
+    # def read_from_string(self, src_str, schema, **kwargs):
+    #     """
+    #     Reads trees from a string. See :meth:|TreeList|.read_from_string()`.
+    #     """
+    #     return self.read_from_files(
+    #             files=[StringIO(src_str)],
+    #             schema=schema,
+    #             **kwargs)
+
+    ##############################################################################
+    ## Container (List) Interface
+
+    def append(tree, is_bipartitions_updated=False):
+        """
+        Adds a |Tree| instance to the collection before position given
+        by ``index``.
+
+        Parameters
+        ----------
+        tree : |Tree|
+            A |Tree| instance. This must have the same rooting state as
+            all the other trees accessioned into this collection as well as
+            that of ``self.is_rooted_trees``.
+        is_bipartitions_updated : bool
+            If `False` [default], then the tree will have its splits encoded or
+            updated. Otherwise, if `True`, then the tree is assumed to have its
+            splits already encoded and updated.
+
+        """
+        return self.add_tree(tree=tree,
+                is_bipartitions_updated=is_bipartitions_updated)
+
+    def insert(index, tree, is_bipartitions_updated=False):
+        """
+        Adds a |Tree| instance to the collection before position given
+        by ``index``.
+
+        Parameters
+        ----------
+        index : integer
+            Insert before index.
+        tree : |Tree|
+            A |Tree| instance. This must have the same rooting state as
+            all the other trees accessioned into this collection as well as
+            that of ``self.is_rooted_trees``.
+        is_bipartitions_updated : bool
+            If `False` [default], then the tree will have its splits encoded or
+            updated. Otherwise, if `True`, then the tree is assumed to have its
+            splits already encoded and updated.
+
+        Returns
+        -------
+        index : int
+            The index of the accession.
+        s : iterable of splits
+            A list of split bitmasks from ``tree``.
+        e :
+            A list of edge length values ``tree``.
+        """
+        return self.add_tree(tree=tree,
+                is_bipartitions_updated=is_bipartitions_updated,
+                index=index)
+
+    def extend(self, tree_array):
+        """
+        Accession of data from ``tree_array`` to self.
+
+        Parameters
+        ----------
+        tree_array : |TreeArray|
+            A |TreeArray| instance from which to add data.
+
+        """
+        assert self.taxon_namespace is tree_array.taxon_namespace
+        assert self._is_rooted_trees is tree_array._is_rooted_trees
+        assert self.ignore_edge_lengths is tree_array.ignore_edge_lengths
+        assert self.ignore_node_ages is tree_array.ignore_node_ages
+        assert self.use_tree_weights is tree_array.use_tree_weights
+        self._tree_split_bitmasks.extend(tree_array._tree_split_bitmasks)
+        self._tree_edge_lengths.extend(tree_array._tree_edge_lengths)
+        self._tree_weights.extend(other._tree_weights)
+        self._split_distribution.update(tree_array._split_distribution)
+        return self
+
+    def __iadd__(self, tree_array):
+        """
+        Accession of data from ``tree_array`` to self.
+
+        Parameters
+        ----------
+        tree_array : |TreeArray|
+            A |TreeArray| instance from which to add data.
+
+        """
+        return self.extend(tree_array)
+
+    def __add__(self, other):
+        """
+        Creates and returns new |TreeArray|.
+
+        Parameters
+        ----------
+        other : iterable of |Tree| objects
+
+        Returns
+        -------
+        tlist : |TreeArray| object
+            |TreeArray| object containing clones of |Tree| objects
+            in ``self`` and ``other``.
+        """
+        ta = TreeArray(
+                taxon_namespace=self.taxon_namespace,
+                is_rooted_trees=self._is_rooted_trees,
+                ignore_edge_lengths=self.ignore_edge_lengths,
+                ignore_node_ages=self.ignore_node_ages,
+                use_tree_weights=self.use_tree_weights,
+                ultrametricity_precision=self._split_distribution.ultrametricity_precision,
+                )
+        ta.default_edge_length_value = self.default_edge_length_value
+        ta.tree_type = self.tree_type
+        ta += self
+        ta += other
+        return ta
+
+    def __contains__(self, splits):
+        # expensive!!
+        return tuple(splits) in self._tree_split_bitmasks
+
+    def __delitem__(self, index):
+        raise NotImplementedError
+        # expensive!!
+        # tree_split_bitmasks = self._trees_splits[index]
+        ### TODO: remove this "tree" from underlying splits distribution
+        # for split in tree_split_bitmasks:
+        #   self._split_distribution.split_counts[split] -= 1
+        # etc.
+        # becomes complicated because tree weights need to be updated etc.
+        # del self._tree_split_bitmasks[index]
+        # del self._tree_edge_lengths[index]
+        # return
+
+    def __iter__(self):
+        """
+        Yields pairs of (split, edge_length) from the store.
+        """
+        for split, edge_length in zip(self._tree_split_bitmasks, self._tree_edge_lengths):
+            yield split, edge_length
+
+    def __reversed__(self):
+        raise NotImplementedError
+
+    def __len__(self):
+        return len(self._tree_split_bitmasks)
+
+    def __getitem__(self, index):
+        raise NotImplementedError
+        # """
+        # Returns a pair of tuples, ( (splits...), (lengths...) ), corresponding
+        # to the "tree" at ``index``.
+        # """
+        # return self._tree_split_bitmasks[index], self._tree_edge_lengths[index]
+
+    def __setitem__(self, index, value):
+        raise NotImplementedError
+
+    def clear(self):
+        raise NotImplementedError
+        self._tree_split_bitmasks = []
+        self._tree_edge_lengths = []
+        self._tree_leafset_bitmasks = []
+        self._split_distribution.clear()
+
+    def index(self, splits):
+        raise NotImplementedError
+        return self._tree_split_bitmasks.index(splits)
+
+    def pop(self, index=-1):
+        raise NotImplementedError
+
+    def remove(self, tree):
+        raise NotImplementedError
+
+    def reverse(self):
+        raise NotImplementedError
+
+    def sort(self, key=None, reverse=False):
+        raise NotImplementedError
+
+    ##############################################################################
+    ## Accessors/Settors
+
+    def get_split_bitmask_and_edge_tuple(self, index):
+        """
+        Returns a pair of tuples, ( (splits...), (lengths...) ), corresponding
+        to the "tree" at ``index``.
+        """
+        return self._tree_split_bitmasks[index], self._tree_edge_lengths[index]
+
+    ##############################################################################
+    ## Calculations
+
+    def calculate_log_product_of_split_supports(self,
+            include_external_splits=False,
+            ):
+        """
+        Calculates the log product of split support for each of the trees in
+        the collection.
+
+        Parameters
+        ----------
+        include_external_splits : bool
+            If `True`, then non-internal split posteriors will be included in
+            the score. Defaults to `False`: these are skipped. This should only
+            make a difference when dealing with splits collected from trees of
+            different leaf sets.
+
+        Returns
+        -------
+        s : tuple(list[numeric], integer)
+            Returns a tuple, with the first element being the list of scores
+            and the second being the index of the highest score. The element order
+            corresponds to the trees accessioned in the collection.
+        """
+        assert len(self._tree_leafset_bitmasks) == len(self._tree_split_bitmasks)
+        scores = []
+        max_score = None
+        max_score_tree_idx = None
+        split_frequencies = self._split_distribution.split_frequencies
+        for tree_idx, (tree_leafset_bitmask, split_bitmasks) in enumerate(zip(self._tree_leafset_bitmasks, self._tree_split_bitmasks)):
+            log_product_of_split_support = 0.0
+            for split_bitmask in split_bitmasks:
+                if (include_external_splits
+                        or split_bitmask == tree_leafset_bitmask # count root edge (following BEAST)
+                        or not treemodel.Bipartition.is_trivial_bitmask(split_bitmask, tree_leafset_bitmask)
+                        ):
+                    split_support = split_frequencies.get(split_bitmask, 0.0)
+                    if split_support:
+                        log_product_of_split_support += math.log(split_support)
+            if max_score is None or max_score < log_product_of_split_support:
+                max_score = log_product_of_split_support
+                max_score_tree_idx = tree_idx
+            scores.append(log_product_of_split_support)
+        return scores, max_score_tree_idx
+
+    def maximum_product_of_split_support_tree(self,
+            include_external_splits=False,
+            summarize_splits=True,
+            **split_summarization_kwargs
+            ):
+        """
+        Return the tree with that maximizes the product of split supports, also
+        known as the "Maximum Clade Credibility Tree" or MCCT.
+
+        Parameters
+        ----------
+        include_external_splits : bool
+            If `True`, then non-internal split posteriors will be included in
+            the score. Defaults to `False`: these are skipped. This should only
+            make a difference when dealing with splits collected from trees of
+            different leaf sets.
+
+        Returns
+        -------
+        mcct_tree : Tree
+            Tree that maximizes the product of split supports.
+        """
+        scores, max_score_tree_idx = self.calculate_log_product_of_split_supports(
+                include_external_splits=include_external_splits,
+                )
+        tree = self.restore_tree(
+                index=max_score_tree_idx,
+                **split_summarization_kwargs)
+        tree.log_product_of_split_support = scores[max_score_tree_idx]
+        if summarize_splits:
+            self._split_distribution.summarize_splits_on_tree(
+                tree=tree,
+                is_bipartitions_updated=True,
+                **split_summarization_kwargs
+                )
+        return tree
+
+    def calculate_sum_of_split_supports(self,
+            include_external_splits=False,
+            ):
+        """
+        Calculates the *sum* of split support for all trees in the
+        collection.
+
+        Parameters
+        ----------
+        include_external_splits : bool
+            If `True`, then non-internal split posteriors will be included in
+            the score. Defaults to `False`: these are skipped. This should only
+            make a difference when dealing with splits collected from trees of
+            different leaf sets.
+
+        Returns
+        -------
+        s : tuple(list[numeric], integer)
+            Returns a tuple, with the first element being the list of scores
+            and the second being the index of the highest score. The element order
+            corresponds to the trees accessioned in the collection.
+        """
+        assert len(self._tree_leafset_bitmasks) == len(self._tree_split_bitmasks)
+        scores = []
+        max_score = None
+        max_score_tree_idx = None
+        split_frequencies = self._split_distribution.split_frequencies
+        for tree_idx, (tree_leafset_bitmask, split_bitmasks) in enumerate(zip(self._tree_leafset_bitmasks, self._tree_split_bitmasks)):
+            sum_of_support = 0.0
+            for split_bitmask in split_bitmasks:
+                if (include_external_splits
+                        or split_bitmask == tree_leafset_bitmask # count root edge (following BEAST)
+                        or not treemodel.Bipartition.is_trivial_bitmask(split_bitmask, tree_leafset_bitmask)
+                        ):
+                    split_support = split_frequencies.get(split_bitmask, 0.0)
+                    sum_of_support += split_support
+            if max_score is None or max_score < sum_of_support:
+                max_score = sum_of_support
+                max_score_tree_idx = tree_idx
+            scores.append(sum_of_support)
+        return scores, max_score_tree_idx
+
+    def maximum_sum_of_split_support_tree(self,
+            include_external_splits=False,
+            summarize_splits=True,
+            **split_summarization_kwargs
+            ):
+        """
+        Return the tree with that maximizes the *sum* of split supports.
+
+        Parameters
+        ----------
+        include_external_splits : bool
+            If `True`, then non-internal split posteriors will be included in
+            the score. Defaults to `False`: these are skipped. This should only
+            make a difference when dealing with splits collected from trees of
+            different leaf sets.
+
+        Returns
+        -------
+        mst_tree : Tree
+            Tree that maximizes the sum of split supports.
+        """
+        scores, max_score_tree_idx = self.calculate_sum_of_split_supports(
+                include_external_splits=include_external_splits,
+                )
+        tree = self.restore_tree(
+                index=max_score_tree_idx,
+                **split_summarization_kwargs
+                )
+        tree.sum_of_split_support = scores[max_score_tree_idx]
+        if summarize_splits:
+            self._split_distribution.summarize_splits_on_tree(
+                tree=tree,
+                is_bipartitions_updated=True,
+                **split_summarization_kwargs
+                )
+        return tree
+
+    def consensus_tree(self,
+            min_freq=constants.GREATER_THAN_HALF,
+            summarize_splits=True,
+            **split_summarization_kwargs
+            ):
+        """
+        Returns a consensus tree from splits in ``self``.
+
+        Parameters
+        ----------
+
+        min_freq : real
+            The minimum frequency of a split in this distribution for it to be
+            added to the tree.
+
+        is_rooted : bool
+            Should tree be rooted or not? If *all* trees counted for splits are
+            explicitly rooted or unrooted, then this will default to `True` or
+            `False`, respectively. Otherwise it defaults to `None`.
+
+        \*\*split_summarization_kwargs : keyword arguments
+            These will be passed directly to the underlying
+            `SplitDistributionSummarizer` object. See
+            :meth:`SplitDistributionSummarizer.configure` for options.
+
+        Returns
+        -------
+        t : consensus tree
+
+        """
+        tree = self._split_distribution.consensus_tree(
+                min_freq=min_freq,
+                is_rooted=self.is_rooted_trees,
+                summarize_splits=summarize_splits,
+                **split_summarization_kwargs
+                )
+        # return self._split_distribution.consensus_tree(*args, **kwargs)
+        return tree
+
+    ##############################################################################
+    ## Mapping of Split Support
+
+    def summarize_splits_on_tree(self,
+            tree,
+            is_bipartitions_updated=False,
+            **kwargs):
+        if self.taxon_namespace is not tree.taxon_namespace:
+            raise error.TaxonNamespaceIdentityError(self, tree)
+        self._split_distribution.summarize_splits_on_tree(
+            tree=tree,
+            is_bipartitions_updated=is_bipartitions_updated,
+            **kwargs
+            )
+
+    ##############################################################################
+    ## Tree Reconstructions
+
+    def restore_tree(self,
+            index,
+            summarize_splits_on_tree=False,
+            **split_summarization_kwargs
+            ):
+        split_bitmasks = self._tree_split_bitmasks[index]
+        if self.ignore_edge_lengths:
+            split_edge_lengths = None
+        else:
+            assert len(self._tree_split_bitmasks) == len(self._tree_edge_lengths)
+            edge_lengths = self._tree_edge_lengths[index]
+            split_edge_lengths = dict(zip(split_bitmasks, edge_lengths))
+        tree = self.tree_type.from_split_bitmasks(
+                split_bitmasks=split_bitmasks,
+                taxon_namespace=self.taxon_namespace,
+                is_rooted=self._is_rooted_trees,
+                split_edge_lengths=split_edge_lengths,
+                )
+        # if update_bipartitions:
+        #     tree.encode_bipartitions()
+        if summarize_splits_on_tree:
+            split_summarization_kwargs["is_bipartitions_updated"] = True
+            self._split_distribution.summarize_splits_on_tree(
+                    tree=tree,
+                    **split_summarization_kwargs)
+        return tree
+
+    ##############################################################################
+    ## Topology Frequencies
+
+    def split_bitmask_set_frequencies(self):
+        """
+        Returns a dictionary with keys being sets of split bitmasks and values
+        being the frequency of occurrence of trees represented by those split
+        bitmask sets in the collection.
+        """
+        split_bitmask_set_count_map = collections.Counter()
+        assert len(self._tree_split_bitmasks) == len(self._tree_weights)
+        for split_bitmask_set, weight in zip(self._tree_split_bitmasks, self._tree_weights):
+            split_bitmask_set_count_map[frozenset(split_bitmask_set)] += (1.0 * weight)
+        split_bitmask_set_freqs = {}
+        normalization_weight = self._split_distribution.calc_normalization_weight()
+        # print("===> {}".format(normalization_weight))
+        for split_bitmask_set in split_bitmask_set_count_map:
+            split_bitmask_set_freqs[split_bitmask_set] = split_bitmask_set_count_map[split_bitmask_set] / normalization_weight
+        return split_bitmask_set_freqs
+
+    def bipartition_encoding_frequencies(self):
+        """
+        Returns a dictionary with keys being bipartition encodings of trees
+        (as ``frozenset`` collections of |Bipartition| objects) and
+        values the frequency of occurrence of trees represented by that
+        encoding in the collection.
+        """
+        # split_bitmask_set_freqs = self.split_bitmask_set_frequencies()
+        # bipartition_encoding_freqs = {}
+        # for split_bitmask_set, freq in split_bitmask_set_freqs.items():
+        #     bipartition_encoding = []
+        #     inferred_leafset = max(split_bitmask_set)
+        #     for split_bitmask in split_bitmask_set:
+        #         bipartition = treemodel.Bipartition(
+        #                 bitmask=split_bitmask,
+        #                 tree_leafset_bitmask=inferred_leafset,
+        #                 is_rooted=self._is_rooted_trees,
+        #                 is_mutable=False,
+        #                 compile_bipartition=True,
+        #                 )
+        #         bipartition_encoding.append(bipartition)
+        #     bipartition_encoding_freqs[frozenset(bipartition_encoding)] = freq
+        # return bipartition_encoding_freqs
+        bipartition_encoding_freqs = {}
+        topologies = self.topologies()
+        for tree in topologies:
+            bipartition_encoding_freqs[ frozenset(tree.encode_bipartitions()) ] = tree.frequency
+        return bipartition_encoding_freqs
+
+    def topologies(self,
+            sort_descending=None,
+            frequency_attr_name="frequency",
+            frequency_annotation_name="frequency",
+            ):
+        """
+        Returns a |TreeList| instance containing the reconstructed tree
+        topologies (i.e. |Tree| instances with no edge weights) in the
+        collection, with the frequency added as an attributed.
+
+        Parameters
+        ----------
+        sort_descending : bool
+            If `True`, then topologies will be sorted in *descending* frequency
+            order (i.e., topologies with the highest frequencies will be listed
+            first). If `False`, then they will be sorted in *ascending*
+            frequency. If `None` (default), then they will not be sorted.
+        frequency_attr_name : str
+            Name of attribute to add to each |Tree| representing
+            the frequency of that topology in the collection. If `None`
+            then the attribute will not be added.
+        frequency_annotation_name : str
+            Name of annotation to add to the annotations of each |Tree|,
+            representing the frequency of that topology in the collection. The
+            value of this annotation will be dynamically-bound to the attribute
+            specified by ``frequency_attr_name`` unless that is `None`. If
+            ``frequency_annotation_name`` is `None` then the annotation will not
+            be added.
+        """
+        if sort_descending is not None and frequency_attr_name is None:
+                raise ValueError("Attribute needs to be set on topologies to enable sorting")
+        split_bitmask_set_freqs = self.split_bitmask_set_frequencies()
+        topologies = TreeList(taxon_namespace=self.taxon_namespace)
+        for split_bitmask_set, freq in split_bitmask_set_freqs.items():
+            tree = self.tree_type.from_split_bitmasks(
+                    split_bitmasks=split_bitmask_set,
+                    taxon_namespace=self.taxon_namespace,
+                    is_rooted=self._is_rooted_trees,
+                    )
+            if frequency_attr_name is not None:
+                setattr(tree, frequency_attr_name, freq)
+                if frequency_annotation_name is not None:
+                    tree.annotations.add_bound_attribute(
+                        attr_name=frequency_attr_name,
+                        annotation_name=frequency_annotation_name,
+                        )
+            else:
+                tree.annotations.add_new(
+                    frequency_annotation_name,
+                    freq,
+                    )
+            topologies.append(tree)
+        if sort_descending is not None:
+            topologies.sort(key=lambda t: getattr(t, frequency_attr_name), reverse=sort_descending)
+        return topologies
+
+
+
diff --git a/dendropy/datamodel/treemodel.py b/dendropy/datamodel/treemodel.py
new file mode 100644
index 0000000..32d4f94
--- /dev/null
+++ b/dendropy/datamodel/treemodel.py
@@ -0,0 +1,6020 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+This module handles the core definition of tree data structure class,
+as well as all the structural classes that make up a tree.
+"""
+
+import collections
+import math
+try:
+    from StringIO import StringIO # Python 2 legacy support: StringIO in this module is the one needed (not io)
+except ImportError:
+    from io import StringIO # Python 3
+import copy
+import sys
+from dendropy.utility import GLOBAL_RNG
+from dendropy.utility import container
+from dendropy.utility import terminal
+from dendropy.utility import error
+from dendropy.utility import bitprocessing
+from dendropy.utility import deprecate
+from dendropy.utility import constants
+from dendropy.datamodel import basemodel
+from dendropy.datamodel import taxonmodel
+from dendropy import dataio
+
+##############################################################################
+### Bipartition
+
+class Bipartition(object):
+    """
+    A bipartition on a tree.
+
+    A bipartition of a tree is a division or sorting of the leaves/tips of a
+    tree into two mutually-exclusive and collectively-comprehensive subsets,
+    obtained by bisecting the tree at a particular edge. There is thus a
+    one-to-one correspondence with an edge of a tree and a bipartition. The
+    term "split" is often also used to refer to the same concept, though this
+    is typically applied to unrooted trees.
+
+    A bipartition is modeled using a bitmask. This is a a bit array
+    representing the membership of taxa, with the least-significant bit
+    corresponding to the first taxon, the next least-signficant bit
+    corresponding to the second taxon, and so on, till the last taxon
+    corresponding to the most-significant bit. Taxon membership in one of two
+    arbitrary groups, '0' or '1', is indicated by its corresponding bit being
+    unset or set, respectively.
+
+    To allow comparisons and correct identification of the same bipartition
+    across different rotational and orientiational representations of unrooted
+    trees, we *normalize* the bipartition such that the first taxon is always
+    assigned to group '0' for bipartition representations of unrooted trees.
+
+    The normalization of the bitmask loses information about the actual
+    descendents of a particular edge. Thus in addition to the
+    :attr:`Bipartition.bitmask` attribute, each |Bipartition| object
+    also maintains a :attr:`Bipartition.leafset_bitmask` attribute which is
+    *unnormalized*. This is a bit array representing the presence or absence of
+    taxa in the subtree descending from the child node of the edge of which
+    this bipartition is associated. The least-significant bit corresponds to
+    the first taxon, the next least-signficant bit corresponds to the second
+    taxon, and so on, with the last taxon corresponding to the most-significant
+    bit. For rooted trees, the value of :attr:`Bipartition.bitmask` and
+    :attr:`Bipartition.leafset_bitmask` are identical. For unrooted trees, they
+    may or may not be equal.
+
+    In general, we use :attr:`Bipartition.bitmask` data to establish the *identity*
+    of a split or bipartition across *different* trees: for example, when
+    computing the Robinson-Foulds distances between trees, or in assessing the
+    support for different bipartitions given an MCMC or bootstrap sample of trees.
+    Here the normalization of the bitmask in unrooted trees allows for the
+    (arbitrarily-labeled) group '0' to be consistent across different
+    representations, rotations, and orientations of trees.
+
+    On the other hand, we use :attr:`Bipartition.leafset_bitmask` data to work
+    with various ancestor-descendent relationships *within* the *same* tree:
+    for example, to quickly assess if a taxon descends from a particular
+    node in a given tree, or if a particular node is a common ancestor of
+    two taxa in a given tree.
+
+    The |Bipartition| object might be used in keys in dictionaries and
+    look-up tables implemented as sets to allow for, e.g., calculation of
+    support in terms of the number times a particular bipartition is observed.
+    The :attr:`Bipartition.bitmask` is used as hash value for this purpose. As
+    such, it is crucial that this value does not change once a particular
+    |Bipartition| object is stored in a dictionary or set. To this end,
+    we impose the constraint that |Bipartition| objects are immutable
+    unless the ``is_mutable`` attribute is explicitly set to `True` as a sort
+    of waiver signed by the client code. Client code does this at its risk,
+    with the warning that anything up to and including the implosion of the
+    universe may occur if the |Bipartition| object is a member of an set
+    of dictionary at the time (or, at the very least, the modified
+    |Bipartition| object may not be accessible from dictionaries
+    and sets in which it is stored, or may occlude other
+    |Bipartition| objects in the container).
+
+    Note
+    ----
+
+    There are two possible ways of mapping taxa to bits in a bitarray or bitstring.
+
+    In the "Least-Signficiant-Bit" (LSB) scheme, the first taxon corresponds to the
+    least-significant, or left-most bit. So, given four taxa, indexed from 1 to 4,
+    taxon 1 would map to 0b0001, taxon 2 would map to 0b0010, taxon 3 would map
+    to 0b0100, and taxon 4 would map to 0b1000.
+
+    In the "Most-Significant-Bit" (MSB) scheme, on the other hand, the first taxon
+    corresponds to the most-significant, or right-most bit. So, given four
+    taxa, indexed from 1 to 4, taxon 1 would map to 0b1000, taxon 2 would map
+    to 0b0100, taxon 3 would map to 0b0010, and taxon 4 would map to 0b0001.
+
+    We selected the Least Significant Bit (LSB) approach because the MSB scheme
+    requires the size of the taxon namespace to fixed before the index can be
+    assigned to any taxa. For example, under the MSB scheme, if there are 4
+    taxa, the bitmask for taxon 1 is 0b1000 == 8, but if another taxon is
+    added, then the bitmask for taxon 1 will become 0b10000 == 16. On the other
+    hand, under the LSB scheme, the bitmask for taxon 1 will be 0b0001 == 1 if
+    there are 4 taxa, and 0b00001 == 1 if there 5 taxa, and so on. This
+    stability of taxon indexes even as the taxon namespace grows is a strongly
+    desirable property, and this the adoption of the LSB scheme.
+
+    Constraining the first taxon to be in group 0 (LSB-0) rather than group 1
+    (LSB-1) is motivated by the fact that, in the former, we can would combine
+    the bitmasks of child nodes using OR (logical addition) operations when
+    calculating the bitmask for a parent node, whereas, with the latter, we
+    would need to use AND operations. The former strikes us as more intuitive.
+
+    """
+
+    def normalize_bitmask(bitmask, fill_bitmask, lowest_relevant_bit):
+        if bitmask & lowest_relevant_bit:
+            return (~bitmask) & fill_bitmask             # force least-significant bit to 0
+        else:
+            return bitmask & fill_bitmask                # keep least-significant bit as 0
+    normalize_bitmask = staticmethod(normalize_bitmask)
+
+    def is_trivial_bitmask(bitmask, fill_bitmask):
+        """
+        Returns True if the bitmask occurs in any tree of the taxa ``mask`` -- if
+        there is only fewer than two 1's or fewer than two 0's in ``bitmask`` (among
+        all of the that are 1 in mask).
+        """
+        masked_split = bitmask & fill_bitmask
+        if bitmask == 0 or bitmask == fill_bitmask:
+            return True
+        if ((masked_split - 1) & masked_split) == 0:
+            return True
+        cm = (~bitmask) & fill_bitmask
+        if ((cm - 1) & cm) == 0:
+            return True
+        return False
+    is_trivial_bitmask = staticmethod(is_trivial_bitmask)
+
+    def is_trivial_leafset(leafset_bitmask):
+        return bitprocessing.num_set_bits(leafset_bitmask) == 1
+    is_trivial_leafset = staticmethod(is_trivial_leafset)
+
+    def is_compatible_bitmasks(m1, m2, fill_bitmask):
+        """
+        Returns `True` if ``m1`` is compatible with ``m2``
+
+        Parameters
+        ----------
+        m1 : int
+            A bitmask representing a split.
+        m2 : int
+            A bitmask representing a split.
+
+        Returns
+        -------
+        bool
+            `True` if ``m1`` is compatible with ``m2``. `False` otherwise.
+        """
+        if fill_bitmask != 0:
+            m1 = fill_bitmask & m1
+            m2 = fill_bitmask & m2
+        if 0 == (m1 & m2):
+            return True
+        c2 = m1 ^ m2
+        if 0 == (m1 & c2):
+            return True
+        c1 = fill_bitmask ^ m1
+        if 0 == (c1 & m2):
+            return True
+        if 0 == (c1 & c2):
+            return True
+        return False
+    is_compatible_bitmasks = staticmethod(is_compatible_bitmasks)
+
+    ##############################################################################
+    ## Life-cycle
+
+    def __init__(self, **kwargs):
+        """
+
+        Keyword Arguments
+        -----------------
+        bitmask : integer
+            A bit array representing the membership of taxa, with the
+            least-significant bit corresponding to the first taxon, the next
+            least-signficant bit correspodning to the second taxon, and so on,
+            till the last taxon corresponding to the most-significant bit.
+            Taxon membership in one of two arbitrary groups, '0' or '1', is
+            indicated by its correspondign bit being unset or set,
+            respectively.
+        leafset_bitmask : integer
+            A bit array representing the presence or absence of taxa in the
+            subtree descending from the child node of the edge of which this
+            bipartition is associated. The least-significant bit corresponds to
+            the first taxon, the next least-signficant bit corresponds to the
+            second taxon, and so on, with the last taxon corresponding to the
+            most-significant bit.
+        tree_leafset_bitmask : integer
+            The ``leafset_bitmask`` of the root edge of the tree with which this
+            bipartition is associated. In, general, this will be $0b1111...n$,
+            where $n$ is the number of taxa, *except* in cases of trees with
+            incomplete leaf-sets, where the positions corresponding to the
+            missing taxa will have the bits unset.
+        is_rooted : bool
+            Specifies whether or not the tree with which this bipartition is
+            associated is rooted.
+        """
+        self._split_bitmask = kwargs.get("bitmask", 0)
+        self._leafset_bitmask = kwargs.get("leafset_bitmask", self._split_bitmask)
+        self._tree_leafset_bitmask = kwargs.get("tree_leafset_bitmask", None)
+        self._lowest_relevant_bit = None
+        self._is_rooted = kwargs.get("is_rooted", None)
+        # self.edge = kwargs.get("edge", None)
+        is_mutable = kwargs.get("is_mutable", None)
+        if kwargs.get("compile_bipartition", True):
+            self.is_mutable = True
+            self.compile_split_bitmask(
+                    leafset_bitmask=self._leafset_bitmask,
+                    tree_leafset_bitmask=self._tree_leafset_bitmask)
+            if is_mutable is None:
+                self.is_mutable = True
+            else:
+                self.is_mutable = is_mutable
+        elif is_mutable is not None:
+            self.is_mutable = is_mutable
+
+    ##############################################################################
+    ## Identity
+
+    def __hash__(self):
+        assert not self.is_mutable, "Bipartition is mutable: hash is unstable"
+        return self._split_bitmask or 0
+
+    def __eq__(self, other):
+        # return self._split_bitmask == other._split_bitmask
+        return (self._split_bitmask is not None and self._split_bitmask == other._split_bitmask) or (self._split_bitmask is other._split_bitmask)
+
+    ##############################################################################
+    ## All properties are publically read-only if not mutable
+
+    def _get_split_bitmask(self):
+        return self._split_bitmask
+    def _set_split_bitmask(self, value):
+        assert self.is_mutable, "Bipartition instance is not mutable"
+        self._split_bitmask = value
+    split_bitmask = property(_get_split_bitmask, _set_split_bitmask)
+
+    def _get_leafset_bitmask(self):
+        return self._leafset_bitmask
+    def _set_leafset_bitmask(self, value):
+        assert self.is_mutable, "Bipartition instance is not mutable"
+        self._leafset_bitmask = value
+    leafset_bitmask = property(_get_leafset_bitmask, _set_leafset_bitmask)
+
+    def _get_tree_leafset_bitmask(self):
+        return self._tree_leafset_bitmask
+    def _set_tree_leafset_bitmask(self, value):
+        assert self.is_mutable, "Bipartition instance is not mutable"
+        self.compile_tree_leafset_bitmask(value)
+    tree_leafset_bitmask = property(_get_tree_leafset_bitmask, _set_tree_leafset_bitmask)
+
+    def _get_is_rooted(self):
+        return self._is_rooted
+    def _set_is_rooted(self, value):
+        assert self.is_mutable, "Bipartition instance is not mutable"
+        self._is_rooted = value
+    is_rooted = property(_get_is_rooted, _set_is_rooted)
+
+    ##############################################################################
+    ## Representation
+
+    def __str__(self):
+        return bin(self._split_bitmask)[2:].rjust(bitprocessing.bit_length(self._tree_leafset_bitmask), '0')
+
+    def split_as_bitstring(self, symbol0="0", symbol1="1", reverse=False):
+        """
+        Composes and returns and representation of the bipartition as a
+        bitstring.
+
+        Parameters
+        ----------
+        symbol1 : str
+            The symbol to represent group '0' in the bitmask.
+        symbol1 : str
+            The symbol to represent group '1' in the bitmask.
+        reverse : bool
+            If `True`, then the first taxon will correspond to the
+            most-significant bit, instead of the least-significant bit, as is
+            the default.
+
+        Returns
+        -------
+        str
+            The bitstring representing the bipartition.
+
+        Example
+        -------
+        To represent a bipartition in the same scheme used by, e.g. PAUP* or
+        Mr. Bayes::
+
+            print(bipartition.split_as_bitstring('.', '*', reverse=True))
+        """
+        return self.bitmask_as_bitstring(
+                mask=self._split_bitmask,
+                symbol0=symbol0,
+                symbol1=symbol1,
+                reverse=reverse)
+
+    def leafset_as_bitstring(self, symbol0="0", symbol1="1", reverse=False):
+        """
+        Composes and returns and representation of the bipartition leafset as a
+        bitstring.
+
+        Parameters
+        ----------
+        symbol1 : str
+            The symbol to represent group '0' in the bitmask.
+        symbol1 : str
+            The symbol to represent group '1' in the bitmask.
+        reverse : bool
+            If `True`, then the first taxon will correspond to the
+            most-significant bit, instead of the least-significant bit, as is
+            the default.
+
+        Returns
+        -------
+        str
+            The bitstring representing the bipartition.
+
+        Example
+        -------
+        To represent a bipartition in the same scheme used by, e.g. PAUP* or
+        Mr. Bayes::
+
+            print(bipartition.as_string('.', '*', reverse=True))
+        """
+        return self.bitmask_as_bitstring(
+                mask=self._leafset_bitmask,
+                symbol0=symbol0,
+                symbol1=symbol1,
+                reverse=reverse)
+
+    def bitmask_as_bitstring(self, mask, symbol0=None, symbol1=None, reverse=False):
+        return bitprocessing.int_as_bitstring(mask,
+                length=bitprocessing.bit_length(self._tree_leafset_bitmask),
+                symbol0=symbol0,
+                symbol1=symbol1,
+                reverse=reverse)
+
+    ##############################################################################
+    ## Calculation
+
+    def compile_tree_leafset_bitmask(self,
+            tree_leafset_bitmask,
+            lowest_relevant_bit=None):
+        """
+        Avoids recalculation of ``lowest_relevant_bit`` if specified.
+        """
+        assert self.is_mutable, "Bipartition instance is not mutable"
+        self._tree_leafset_bitmask = tree_leafset_bitmask
+        if lowest_relevant_bit is not None:
+            self._lowest_relevant_bit = lowest_relevant_bit
+        elif self._tree_leafset_bitmask:
+            self._lowest_relevant_bit = bitprocessing.least_significant_set_bit(self._tree_leafset_bitmask)
+        else:
+            self._lowest_relevant_bit = None
+        return self._tree_leafset_bitmask
+
+    def compile_leafset_bitmask(self,
+           leafset_bitmask=None,
+           tree_leafset_bitmask=None):
+        assert self.is_mutable, "Bipartition instance is not mutable"
+        if tree_leafset_bitmask is not None:
+            self.compile_tree_leafset_bitmask(tree_leafset_bitmask)
+        if leafset_bitmask is None:
+            leafset_bitmask = self._leafset_bitmask
+        if self._tree_leafset_bitmask:
+            self._leafset_bitmask = leafset_bitmask & self._tree_leafset_bitmask
+        else:
+            self._leafset_bitmask = leafset_bitmask
+        return self._leafset_bitmask
+
+    def compile_split_bitmask(self,
+           leafset_bitmask=None,
+           tree_leafset_bitmask=None,
+           is_rooted=None,
+           is_mutable=True):
+        """
+        Updates the values of the various masks specified and calculates the
+        normalized bipartition bitmask.
+
+        If a rooted bipartition, then this is set to the value of the leafset
+        bitmask.
+        If an unrooted bipartition, then the leafset bitmask is normalized such that
+        the lowest-significant bit (i.e., the group to which the first taxon
+        belongs) is set to '0'.
+
+        Also makes this bipartition immutable (unless ``is_mutable`` is `False`),
+        which facilitates it being used in dictionaries and sets.
+
+        Parameters
+        ----------
+        leafset_bitmask : integer
+            A bit array representing the presence or absence of taxa in the
+            subtree descending from the child node of the edge of which this
+            bipartition is associated. The least-significant bit corresponds to
+            the first taxon, the next least-signficant bit corresponds to the
+            second taxon, and so on, with the last taxon corresponding to the
+            most-significant bit. If not specified or `None`, the current value
+            of ``self.leafset_bitmask`` is used.
+        tree_leafset_bitmask : integer
+            The ``leafset_bitmask`` of the root edge of the tree with which this
+            bipartition is associated. In, general, this will be $0b1111...n$,
+            where $n$ is the number of taxa, *except* in cases of trees with
+            incomplete leaf-sets, where the positions corresponding to the
+            missing taxa will have the bits unset. If not specified or `None`,
+            the current value of ``self.tree_leafset_bitmask`` is used.
+        is_rooted : bool
+            Specifies whether or not the tree with which this bipartition is
+            associated is rooted. If not specified or `None`, the current value
+            of ``self.is_rooted`` is used.
+
+        Returns
+        -------
+        integer
+            The bipartition bitmask.
+        """
+        assert self.is_mutable, "Bipartition instance is not mutable"
+        if is_rooted is not None:
+            self._is_rooted = is_rooted
+        if tree_leafset_bitmask:
+            self.compile_tree_leafset_bitmask(tree_leafset_bitmask=tree_leafset_bitmask)
+        if leafset_bitmask:
+            self.compile_leafset_bitmask(leafset_bitmask=leafset_bitmask)
+        if self._leafset_bitmask is None:
+            return
+        if self._tree_leafset_bitmask is None:
+            return
+        if self._is_rooted:
+            self._split_bitmask = self._leafset_bitmask
+        else:
+            self._split_bitmask = Bipartition.normalize_bitmask(
+                    bitmask=self._leafset_bitmask,
+                    fill_bitmask=self._tree_leafset_bitmask,
+                    lowest_relevant_bit=self._lowest_relevant_bit)
+        if is_mutable is not None:
+            self.is_mutable = is_mutable
+        return self._split_bitmask
+
+    def compile_bipartition(self, is_mutable=None):
+        """
+        Updates the values of the various masks specified and calculates the
+        normalized bipartition bitmask.
+
+        If a rooted bipartition, then this is set to the value of the leafset
+        bitmask.
+        If an unrooted bipartition, then the leafset bitmask is normalized such that
+        the lowest-significant bit (i.e., the group to which the first taxon
+        belongs) is set to '0'.
+
+        Also makes this bipartition immutable (unless ``is_mutable`` is `False`),
+        which facilitates it being used in dictionaries and sets.
+
+        Note that this requires full population of the following fields:
+            - self._leafset_bitmask
+            - self._tree_leafset_bitmask
+        """
+        self.compile_split_bitmask(self,
+            leafset_bitmask=self._leafset_bitmask,
+            tree_leafset_bitmask=self._tree_leafset_bitmask,
+            is_rooted=self._is_rooted,
+            is_mutable=is_mutable)
+
+    ##############################################################################
+    ## Operations
+
+    def normalize(self, bitmask, convention="lsb0"):
+        """
+        Return ``bitmask`` ensuring that the bit corresponding to the first
+        taxon is 1.
+        """
+        if convention == "lsb0":
+            if self._lowest_relevant_bit & bitmask:
+                return (~bitmask) & self._tree_leafset_bitmask
+            else:
+                return bitmask & self._tree_leafset_bitmask
+        elif convention == "lsb1":
+            if self._lowest_relevant_bit & bitmask:
+                return bitmask & self._tree_leafset_bitmask
+            else:
+                return (~bitmask) & self._tree_leafset_bitmask
+        else:
+            raise ValueError("Unrecognized convention: {}".format(convention))
+
+    def is_compatible_with(self, other):
+        """
+        Returns `True` if ``other`` is compatible with self.
+
+        Parameters
+        ----------
+        other : |Bipartition|
+            The bipartition to check for compatibility.
+
+        Returns
+        -------
+        bool
+            `True` if ``other`` is compatible with ``self``; `False` otherwise.
+        """
+        m1 = self._split_bitmask
+        if isinstance(other, int):
+            m2 = other
+        else:
+            m2 = other._split_bitmask
+        return Bipartition.is_compatible_bitmasks(m1, m2, self._tree_leafset_bitmask)
+
+    def is_incompatible_with(self, other):
+        """
+        Returns `True` if ``other`` conflicts with self.
+
+        Parameters
+        ----------
+        other : |Bipartition|
+            The bipartition to check for conflicts.
+
+        Returns
+        -------
+        bool
+            `True` if ``other`` conflicts with ``self``; `False` otherwise.
+        """
+        return not self.is_compatible_with(other)
+
+    def is_nested_within(self, other, is_other_masked_for_tree_leafset=False):
+        """
+        Returns `True` if the current bipartition is contained
+        within other.
+
+        Parameters
+        ----------
+        other : |Bipartition|
+            The bipartition to check.
+
+        Returns
+        -------
+        bool
+            `True` if the the bipartition is "contained" within ``other``
+        """
+        if self._is_rooted:
+            m1 = self._leafset_bitmask
+            m2 = other._leafset_bitmask
+        else:
+            m1 = self._split_bitmask
+            m2 = other._split_bitmask
+        if not is_other_masked_for_tree_leafset:
+            m2 = self._tree_leafset_bitmask & m2
+        return ( (m1 & m2) == m1 )
+
+    def is_leafset_nested_within(self, other):
+        """
+        Returns `True` if the leafset of ``self`` is a subset of the leafset of
+        ``other``.
+
+        Parameters
+        ----------
+        other : |Bipartition|
+            The bipartition to check for compatibility.
+
+        Returns
+        -------
+        bool
+            `True` if the leafset of ``self`` is contained in ``other``.
+        """
+        if isinstance(other, int):
+            m2 = other
+        else:
+            m2 = other._leafset_bitmask
+        m2 = self._tree_leafset_bitmask & m2
+        return ( (m2 & self._leafset_bitmask) ==  self._leafset_bitmask )
+
+    def is_trivial(self):
+        """
+        Returns
+        -------
+        bool
+            `True` if this bipartition divides a leaf and the rest of the
+            tree.
+        """
+        return Bipartition.is_trivial_bitmask(self._split_bitmask,
+                self._tree_leafset_bitmask)
+
+    def split_as_newick_string(self,
+            taxon_namespace,
+            preserve_spaces=False,
+            quote_underscores=True):
+        """
+        Represents this bipartition split as a newick string.
+
+        Parameters
+        ----------
+        taxon_namespace : |TaxonNamespace| instance
+            The operational taxonomic unit concept namespace to reference.
+        preserve_spaces : boolean, optional
+            If `False` (default), then spaces in taxon labels will be replaced
+            by underscores. If `True`, then taxon labels with spaces will be
+            wrapped in quotes.
+        quote_underscores : boolean, optional
+            If `True` (default), then taxon labels with underscores will be
+            wrapped in quotes. If `False`, then the labels will not be wrapped
+            in quotes.
+
+        Returns
+        -------
+        string
+            NEWICK representation of split specified by ``bitmask``.
+        """
+        return taxon_namespace.bitmask_as_newick_string(
+                bitmask=self._split_bitmask,
+                preserve_spaces=preserve_spaces,
+                quote_underscores=quote_underscores)
+
+    def leafset_as_newick_string(self,
+            taxon_namespace,
+            preserve_spaces=False,
+            quote_underscores=True):
+        """
+        Represents this bipartition leafset as a newick string.
+
+        Parameters
+        ----------
+        taxon_namespace : |TaxonNamespace| instance
+            The operational taxonomic unit concept namespace to reference.
+        preserve_spaces : boolean, optional
+            If `False` (default), then spaces in taxon labels will be replaced
+            by underscores. If `True`, then taxon labels with spaces will be
+            wrapped in quotes.
+        quote_underscores : boolean, optional
+            If `True` (default), then taxon labels with underscores will be
+            wrapped in quotes. If `False`, then the labels will not be wrapped
+            in quotes.
+
+        Returns
+        -------
+        string
+            NEWICK representation of split specified by ``bitmask``.
+        """
+        return taxon_namespace.bitmask_as_newick_string(
+                bitmask=self._leafset_bitmask,
+                preserve_spaces=preserve_spaces,
+                quote_underscores=quote_underscores)
+
+    def leafset_taxa(self, taxon_namespace):
+        """
+        Returns list of |Taxon| objects in the leafset of this
+        bipartition.
+
+        Parameters
+        ----------
+        taxon_namespace : |TaxonNamespace| instance
+            The operational taxonomic unit concept namespace to reference.
+        index : integer, optional
+            Start from this |Taxon| object instead of the first
+            |Taxon| object in the collection.
+
+        Returns
+        -------
+        :py:class:`list` [|Taxon|]
+            List of |Taxon| objects specified or spanned by
+            ``bitmask``.
+        """
+        return taxon_namespace.bitmask_taxa_list(
+                bitmask=self._leafset_bitmask,
+                index=index)
+
+    # def as_newick_string
+    # def is_trivial
+    # def is_non_singleton
+    # def leafset_hash
+    # def leafset_as_bitstring
+    # def is_compatible
+
+##############################################################################
+### Edge
+
+class Edge(
+        basemodel.DataObject,
+        basemodel.Annotable):
+    """
+    An :term:``edge`` on a :term:``tree``.
+    """
+
+    ###########################################################################
+    ### Life-cycle and Identity
+
+    def __init__(self, **kwargs):
+        """
+        Keyword Arguments
+        -----------------
+        head_node : |Node|, optional
+            Node from to which this edge links, i.e., the child node of this
+            node ``tail_node``.
+        length : numerical, optional
+            A value representing the weight of the edge.
+        rootedge : boolean, optional
+            Is the child node of this edge the root or seed node of the tree?
+        label : string, optional
+            Label for this edge.
+
+        """
+        basemodel.DataObject.__init__(self, label=kwargs.pop("label", None))
+        self._head_node = kwargs.pop("head_node", None)
+        if "tail_node" in kwargs:
+            raise TypeError("Setting the tail node directly is no longer supported: instead, set the parent node of the head node")
+        self.rootedge = kwargs.pop("rootedge", None)
+        self.length = kwargs.pop("length", None)
+        if kwargs:
+            raise TypeError("Unsupported keyword arguments: {}".format(kwargs))
+
+        self._bipartition = None
+        self.comments = []
+
+    def __copy__(self, memo=None):
+        raise TypeError("Cannot directly copy Edge")
+
+    def taxon_namespace_scoped_copy(self, memo=None):
+        raise TypeError("Cannot directly copy Edge")
+
+    def __deepcopy__(self, memo=None):
+        # call Annotable.__deepcopy__()
+        return basemodel.Annotable.__deepcopy__(self, memo=memo)
+        # return super(Edge, self).__deepcopy__(memo=memo)
+
+    def __hash__(self):
+        return id(self)
+
+    def __eq__(self, other):
+        return self is other
+
+    ###########################################################################
+    ### Basic Structure
+
+    def _get_tail_node(self):
+        if self._head_node is None:
+            return None
+        return self._head_node._parent_node
+    def _set_tail_node(self, node):
+        if self._head_node is None:
+            raise ValueError("'_head_node' is 'None': cannot assign 'tail_node'")
+        # Go through managed property instead of
+        # setting attribute to ensure book-keeping
+        self._head_node.parent_node = node
+    tail_node = property(_get_tail_node, _set_tail_node)
+
+    def _get_head_node(self):
+        return self._head_node
+    def _set_head_node(self, node):
+        # Go through managed property instead of setting attribute to ensure
+        # book-keeping; following should also set ``_head_node`` of ``self``
+        node.edge = self
+    head_node = property(_get_head_node, _set_head_node)
+
+    def is_leaf(self):
+        "Returns True if the head node has no children"
+        return self.head_node and self.head_node.is_leaf()
+
+    def is_terminal(self):
+        return self.is_leaf()
+
+    def is_internal(self):
+        "Returns True if the head node has children"
+        return self.head_node and not self.head_node.is_leaf()
+
+    def get_adjacent_edges(self):
+        """
+        Returns a list of all edges that "share" a node with ``self``.
+        """
+        he = [i for i in self.head_node.incident_edges() if i is not self]
+        te = [i for i in self.tail_node.incident_edges() if i is not self]
+        he.extend(te)
+        return he
+    adjacent_edges = property(get_adjacent_edges)
+
+    ###########################################################################
+    ### Structural Manipulation
+
+    def collapse(self, adjust_collapsed_head_children_edge_lengths=False):
+        """
+        Inserts all children of the head_node of self as children of the
+        tail_node of self in the same place in the child_node list that
+        head_node had occupied. The edge length and head_node will no longer be
+        part of the tree.
+        """
+        to_del = self.head_node
+        parent = self.tail_node
+        if not parent:
+            return
+        children = to_del.child_nodes()
+        if not children:
+            raise ValueError('collapse_self called with a terminal.')
+        pos = parent.child_nodes().index(to_del)
+        parent.remove_child(to_del)
+        for child in children:
+            parent.insert_child(pos, child)
+            pos += 1
+            if adjust_collapsed_head_children_edge_lengths and self.length is not None:
+                # print id(child), child.edge.length, self.length
+                if child.edge.length is None:
+                    child.edge.length = self.length
+                else:
+                    child.edge.length += self.length
+
+    def invert(self, update_bipartitions=False):
+        """
+        Changes polarity of edge.
+        """
+        # self.head_node, self.tail_node = self.tail_node, self.head_node
+
+        if not self.head_node:
+            raise ValueError("Cannot invert edge with 'None' for head node")
+        if not self.tail_node:
+            raise ValueError("Cannot invert edge with 'None' for tail node")
+
+        old_head_node = self.head_node
+        new_tail_node = old_head_node
+        old_tail_node = self.tail_node
+        new_head_node = old_tail_node
+        grandparent = old_tail_node._parent_node
+        if grandparent is not None:
+            for idx, ch in enumerate(grandparent._child_nodes):
+                if ch is old_tail_node:
+                    grandparent._child_nodes[idx] = old_head_node
+                    break
+            else:
+                # we did not break loop: force insertion of old_head_node if
+                # not already there
+                if old_head_node not in grandparent._child_nodes:
+                    grandparent._child_nodes.append(old_head_node)
+        assert old_head_node in old_tail_node._child_nodes
+        old_tail_node.remove_child(old_head_node)
+        assert old_head_node not in old_tail_node._child_nodes
+        old_head_node.add_child(old_tail_node)
+        old_tail_node.edge.length, old_head_node.edge.length = old_head_node.edge.length, old_tail_node.edge_length
+
+    ###########################################################################
+    ### Bipartition Management
+
+    def _get_bipartition(self):
+        if self._bipartition is None:
+            self._bipartition = Bipartition(
+                    edge=self,
+                    is_mutable=True,
+                    )
+        return self._bipartition
+    def _set_bipartition(self, v=None):
+        self._bipartition = v
+    bipartition = property(_get_bipartition, _set_bipartition)
+
+    def _get_split_bitmask(self):
+        return self.bipartition._split_bitmask
+    def _set_split_bitmask(self, h):
+        self.bipartition._split_bitmask = h
+    split_bitmask = property(_get_split_bitmask, _set_split_bitmask)
+
+    def _get_leafset_bitmask(self):
+        return self.bipartition._leafset_bitmask
+    def _set_leafset_bitmask(self, h):
+        self.bipartition._leafset_bitmask = h
+    leafset_bitmask = property(_get_leafset_bitmask, _set_leafset_bitmask)
+
+    def _get_tree_leafset_bitmask(self):
+        return self.bipartition._tree_leafset_bitmask
+    def _set_tree_leafset_bitmask(self, h):
+        self.bipartition._tree_leafset_bitmask = h
+    tree_leafset_bitmask = property(_get_tree_leafset_bitmask, _set_tree_leafset_bitmask)
+
+    def split_as_bitstring(self):
+        return self.bipartition.split_as_bitstring()
+
+    def leafset_as_bitstring(self):
+        return self.bipartition.leafset_as_bitstring()
+
+    ###########################################################################
+    ### Representation
+
+    def description(self,
+            depth=1,
+            indent=0,
+            itemize="",
+            output=None,
+            taxon_namespace=None):
+        """
+        Returns description of object, up to level ``depth``.
+        """
+        if depth is None or depth < 0:
+            return
+        output_strio = StringIO()
+        if self.label is None:
+            label = " (%s, Length=%s)" % (id(self), str(self.length))
+        else:
+            label = " (%s: '%s', Length=%s)" % (id(self), self.label, str(self.length))
+        output_strio.write('%s%sEdge object at %s%s'
+                % (indent*' ',
+                   itemize,
+                   hex(id(self)),
+                   label))
+        if depth >= 1:
+            leader1 = ' ' * (indent + 4)
+            leader2 = ' ' * (indent + 8)
+            output_strio.write('\n%s[Length]' % leader1)
+            if self.length is not None:
+                length = self.length
+            else:
+                length = "None"
+            output_strio.write('\n%s%s' % (leader2, length))
+            output_strio.write('\n%s[Tail Node]' % leader1)
+            if self.tail_node is not None:
+                tn = self.tail_node.description(0)
+            else:
+                tn = "None"
+            output_strio.write('\n%s%s' % (leader2, tn))
+            output_strio.write('\n%s[Head Node]' % leader1)
+            if self.head_node is not None:
+                hn = self.head_node.description(0)
+            else:
+                hn = "None"
+            output_strio.write('\n%s%s' % (leader2, hn))
+        s = output_strio.getvalue()
+        if output is not None:
+            output.write(s)
+        return s
+
+##############################################################################
+### Node
+
+class Node(
+        basemodel.DataObject,
+        basemodel.Annotable):
+    """
+    A :term:|Node| on a :term:|Tree|.
+    """
+
+    ###########################################################################
+    ### Life-cycle
+
+    def __init__(self, **kwargs):
+        """
+        Keyword Arguments
+        -----------------
+        taxon : |Taxon|, optional
+            The |Taxon| instance representing the operational taxonomic
+            unit concept associated with this Node.
+        label : string, optional
+            A label for this node.
+        edge_length : numeric, optional
+            Length or weight of the edge subtending this node.
+
+        """
+        basemodel.DataObject.__init__(self, label=kwargs.pop("label", None))
+        self.taxon = kwargs.pop("taxon", None)
+        self.age = None
+        self._edge = None
+        self._child_nodes = []
+        self._parent_node = None
+        self.edge = Edge(head_node=self,
+                length=kwargs.pop("edge_length", None))
+        if kwargs:
+            raise TypeError("Unsupported keyword arguments: {}".format(kwargs))
+        self.comments = []
+
+    def __copy__(self, memo=None):
+        raise TypeError("Cannot directly copy Edge")
+
+    def taxon_namespace_scoped_copy(self, memo=None):
+        raise TypeError("Cannot directly copy Node")
+
+    def __deepcopy__(self, memo=None):
+        return basemodel.Annotable.__deepcopy__(self, memo=memo)
+        # if memo is None:
+        #     memo = {}
+        # other = basemodel.Annotable.__deepcopy__(self, memo=memo)
+        # memo[id(self._child_nodes)] = other._child_nodes
+        # for ch in self._child_nodes:
+        #     try:
+        #         och = memo[id(ch)]
+        #         if och not in other._child_nodes:
+        #             other._child_nodes.append(och)
+        #     except KeyError:
+        #         och = copy.deepcopy(ch, memo)
+        #         memo[id(chd)] = och
+        #         if och not in other._child_nodes:
+        #             other._child_nodes.append(och)
+        # return other
+        # return super(Node, self).__deepcopy__(memo=memo)
+
+    ###########################################################################
+    ### Identity
+
+    def __hash__(self):
+        return id(self)
+
+    def __eq__(self, other):
+        # IMPORTANT LESSON LEARNED: if you define __hash__, you *must* define __eq__
+        return self is other
+
+    def __repr__(self):
+        return "<Node object at {}: '{}' ({})>".format(hex(id(self)), self._label, repr(self.taxon))
+
+    ###########################################################################
+    ### Iterators
+
+    def preorder_iter(self, filter_fn=None):
+        """
+        Pre-order iterator over nodes of subtree rooted at this node.
+
+        Visits self and all descendant nodes, with each node visited before its
+        children. Nodes can optionally be filtered by ``filter_fn``: only nodes
+        for which ``filter_fn`` returns `True` when called with the node as an
+        argument are yielded.
+
+        Parameters
+        ----------
+        filter_fn : function object, optional
+            A function object that takes a |Node| object as an argument
+            and returns `True` if the |Node| object is to be yielded by
+            the iterator, or `False` if not. If ``filter_fn`` is `None`
+            (default), then all nodes visited will be yielded.
+
+        Returns
+        -------
+        :py:class:`collections.Iterator` [|Node|]
+            An iterator yielding nodes of the subtree rooted at this node in
+            pre-order sequence.
+        """
+        stack = [self]
+        while stack:
+            node = stack.pop()
+            if filter_fn is None or filter_fn(node):
+                yield node
+            stack.extend(n for n in reversed(node._child_nodes))
+
+    def preorder_internal_node_iter(self, filter_fn=None, exclude_seed_node=False):
+        """
+        Pre-order iterator over internal nodes of subtree rooted at this node.
+
+        Visits self and all internal descendant nodes, with each node visited
+        before its children. In DendroPy, "internal nodes" are nodes that have
+        at least one child node, and thus the root or seed node is typically included
+        unless ``exclude_seed_node`` is `True`. Nodes can optionally be filtered
+        by ``filter_fn``: only nodes for which ``filter_fn`` returns `True` when
+        passed the node as an argument are yielded.
+
+        Parameters
+        ----------
+        filter_fn : function object, optional
+            A function object that takes a |Node| object as an argument
+            and returns `True` if the |Node| object is to be yielded by
+            the iterator, or `False` if not. If ``filter_fn`` is `None`
+            (default), then all nodes visited will be yielded.
+        exclude_seed_node : boolean, optional
+            If `False` (default), then the seed node or root is visited. If
+            `True`, then the seed node is skipped.
+
+        Returns
+        -------
+        :py:class:`collections.Iterator` [|Node|]
+            An iterator yielding the internal nodes of the subtree rooted at
+            this node in pre-order sequence.
+        """
+        if exclude_seed_node:
+            froot = lambda x: x._parent_node is not None
+        else:
+            froot = lambda x: True
+        if filter_fn:
+            f = lambda x: (froot(x) and x._child_nodes and filter_fn(x)) or None
+        else:
+            f = lambda x: (x and froot(x) and x._child_nodes) or None
+        return self.preorder_iter(filter_fn=f)
+
+    def postorder_iter(self, filter_fn=None):
+        """
+        Post-order iterator over nodes of subtree rooted at this node.
+
+        Visits self and all descendant nodes, with each node visited first
+        followed by its children. Nodes can optionally be filtered by
+        ``filter_fn``: only nodes for which ``filter_fn`` returns `True` when
+        called with the node as an argument are yielded.
+
+        Parameters
+        ----------
+        filter_fn : function object, optional
+            A function object that takes a |Node| object as an argument
+            and returns `True` if the |Node| object is to be yielded by
+            the iterator, or `False` if not. If ``filter_fn`` is `None`
+            (default), then all nodes visited will be yielded.
+
+        Returns
+        -------
+        :py:class:`collections.Iterator` [|Node|]
+            An iterator yielding the nodes of the subtree rooted at
+            this node in post-order sequence.
+        """
+        # if self._child_nodes:
+        #     for nd in self._child_nodes:
+        #         for ch in nd.postorder_iter(filter_fn=filter_fn):
+        #             yield ch
+        # if filter_fn is None or filter_fn(self):
+        #     yield self
+        # return
+
+        # stack = [(self, False)]
+        # while stack:
+        #     node, state = stack.pop(0)
+        #     if state:
+        #         if filter_fn is None or filter_fn(node):
+        #             yield node
+        #     else:
+        #         stack.insert(0, (node, True))
+        #         child_nodes = [(n, False) for n in node._child_nodes]
+        #         child_nodes.extend(stack)
+        #         stack = child_nodes
+
+        ## Prefer `pop()` to `pop(0)`.
+        ## Thanks to Mark T. Holder
+        ## From peyotl commits: d1ffef2 + 19fdea1
+        stack = [(self, False)]
+        while stack:
+            node, state = stack.pop()
+            if state:
+                if filter_fn is None or filter_fn(node):
+                    yield node
+            else:
+                stack.append((node, True))
+                stack.extend([(n, False) for n in reversed(node._child_nodes)])
+
+    def postorder_internal_node_iter(self, filter_fn=None, exclude_seed_node=False):
+        """
+        Pre-order iterator over internal nodes of subtree rooted at this node.
+
+        Visits self and all internal descendant nodes, with each node visited
+        after its children. In DendroPy, "internal nodes" are nodes that have
+        at least one child node, and thus the root or seed node is typically
+        included unless ``exclude_seed_node`` is `True`. Nodes can optionally be
+        filtered by ``filter_fn``: only nodes for which ``filter_fn`` returns
+        `True` when passed the node as an argument are yielded.
+
+        Parameters
+        ----------
+        filter_fn : function object, optional
+            A function object that takes a |Node| object as an argument
+            and returns `True` if the |Node| object is to be yielded by
+            the iterator, or `False` if not. If ``filter_fn`` is `None`
+            (default), then all nodes visited will be yielded.
+        exclude_seed_node : boolean, optional
+            If `False` (default), then the seed node or root is visited. If
+            `True`, then the seed node is skipped.
+
+        Returns
+        -------
+        :py:class:`collections.Iterator` [|Node|]
+            An iterator yielding the internal nodes of the subtree rooted at
+            this node in post-order sequence.
+        """
+        if exclude_seed_node:
+            froot = lambda x: x._parent_node is not None
+        else:
+            froot = lambda x: True
+        if filter_fn:
+            f = lambda x: (froot(x) and x._child_nodes and filter_fn(x)) or None
+        else:
+            f = lambda x: (x and froot(x) and x._child_nodes) or None
+        return self.postorder_iter(filter_fn=f)
+
+    def levelorder_iter(self, filter_fn=None):
+        """
+        Level-order iteration over nodes of subtree rooted at this node.
+
+        Visits self and all descendant nodes, with each node and other nodes at
+        the same level (distance from root) visited before their children.
+        Nodes can optionally be filtered by ``filter_fn``: only nodes for which
+        ``filter_fn`` returns `True` when called with the node as an argument are
+        visited.
+
+        Parameters
+        ----------
+        filter_fn : function object, optional
+            A function object that takes a |Node| object as an argument
+            and returns `True` if the |Node| object is to be yielded by
+            the iterator, or `False` if not. If ``filter_fn`` is `None`
+            (default), then all nodes visited will be yielded.
+
+        Returns
+        -------
+        :py:class:`collections.Iterator` [|Node|]
+            An iterator yielding nodes of the subtree rooted at this node in
+            level-order sequence.
+        """
+        if filter_fn is None or filter_fn(self):
+            yield self
+        remaining = self.child_nodes()
+        while len(remaining) > 0:
+            node = remaining.pop(0)
+            if filter_fn is None or filter_fn(node):
+                yield node
+            child_nodes = node.child_nodes()
+            remaining.extend(child_nodes)
+
+    def level_order_iter(self, filter_fn=None):
+        """
+        DEPRECATED: Use :meth:`Node.levelorder_iter()` instead.
+        """
+        deprecate.dendropy_deprecation_warning(
+                message="Deprecated since DendroPy 4: 'level_order_iter()' will no longer be supported in future releases; use 'levelorder_iter()' instead",
+                stacklevel=3)
+        return self.levelorder_iter(filter_fn=filter_fn)
+
+    def inorder_iter(self, filter_fn=None):
+        """
+        In-order iteration over nodes of subtree rooted at this node.
+
+        Visits self and all descendant nodes, with each node visited in-between
+        its children. Only valid for strictly-bifurcating trees. Nodes can
+        optionally be filtered by ``filter_fn``: only nodes for which ``filter_fn``
+        returns `True` when called with the node as an argument are yielded.
+
+        Parameters
+        ----------
+        filter_fn : function object, optional
+            A function object that takes a |Node| object as an argument
+            and returns `True` if the |Node| object is to be yielded by
+            the iterator, or `False` if not. If ``filter_fn`` is `None`
+            (default), then all nodes visited will be yielded.
+
+        Returns
+        -------
+        :py:class:`collections.Iterator` [|Node|]
+            An iterator yielding nodes of the subtree rooted at this node in
+            infix or in-order sequence.
+        """
+        if len(self._child_nodes) == 0:
+            if filter_fn is None or filter_fn(self):
+                yield self
+        elif len(self._child_nodes) == 2:
+            for nd in self._child_nodes[0].inorder_iter(filter_fn=filter_fn):
+                yield nd
+            if filter_fn is None or filter_fn(self):
+                yield self
+            for nd in self._child_nodes[1].inorder_iter(filter_fn=filter_fn):
+                yield nd
+        else:
+            raise TypeError("In-order traversal only supported for binary trees")
+
+    def leaf_iter(self, filter_fn=None):
+        """
+        Iterate over all tips or leaves that ultimately descend from this node.
+
+        Visits all leaf or tip nodes descended from this node. Nodes can
+        optionally be filtered by ``filter_fn``: only nodes for which ``filter_fn``
+        returns `True` when called with the node as an argument are yielded.
+
+        Parameters
+        ----------
+        filter_fn : function object, optional
+            A function object that takes a |Node| object as an argument
+            and returns `True` if the |Node| object is to be yielded by
+            the iterator, or `False` if not. If ``filter_fn`` is `None`
+            (default), then all nodes visited will be yielded.
+
+        Returns
+        -------
+        :py:class:`collections.Iterator` [|Node|]
+            An iterator yielding leaf nodes of the subtree rooted at this node.
+        """
+        if filter_fn:
+            ff = lambda x: x.is_leaf() and filter_fn(x) or None
+        else:
+            ff = lambda x: x.is_leaf() and x or None
+        for node in self.postorder_iter(ff):
+            yield node
+
+    def child_node_iter(self, filter_fn=None):
+        """
+        Iterator over all nodes that are the (immediate) children of this node.
+
+        Parameters
+        ----------
+        filter_fn : function object, optional
+            A function object that takes a |Node| object as an argument
+            and returns `True` if the |Node| object is to be yielded by
+            the iterator, or `False` if not. If ``filter_fn`` is `None`
+            (default), then all nodes visited will be yielded.
+
+        Returns
+        -------
+        :py:class:`collections.Iterator` [|Node|]
+            An iterator yielding nodes that have this node as a parent.
+        """
+        for node in self._child_nodes:
+            if filter_fn is None or filter_fn(node):
+                yield node
+
+    def child_edge_iter(self, filter_fn=None):
+        """
+        Iterator over all edges that are the (immediate) children of this edge.
+
+        Parameters
+        ----------
+        filter_fn : function object, optional
+            A function object that takes a |Edge| object as an argument
+            and returns `True` if the |Edge| object is to be yielded by
+            the iterator, or `False` if not. If ``filter_fn`` is `None`
+            (default), then all edges visited will be yielded.
+
+        Returns
+        -------
+        :py:class:`collections.Iterator` [|Edge|]
+            An iterator yielding edges that have this edge as a parent.
+        """
+        for node in self._child_nodes:
+            if filter_fn is None or filter_fn(node.edge):
+                yield node.edge
+
+    def ancestor_iter(self, filter_fn=None, inclusive=False):
+        """
+        Iterator over all ancestors of this node.
+
+        Visits all nodes that are the ancestors of this node.  If ``inclusive``
+        is `True`, ``self`` is returned as the first item of the sequence;
+        otherwise ``self`` is skipped. Nodes can optionally be filtered by
+        ``filter_fn``: only nodes for which ``filter_fn`` returns `True` when
+        passed the node as an argument are yielded.
+
+        Parameters
+        ----------
+        filter_fn : function object, optional
+            A function object that takes a |Node| object as an argument
+            and returns `True` if the |Node| object is to be yielded by
+            the iterator, or `False` if not. If ``filter_fn`` is `None`
+            (default), then all nodes visited will be yielded.
+        inclusive : boolean, optional
+            If `True`, includes this node in the sequence. If `False`, this is
+            skipped.
+
+        Returns
+        -------
+        :py:class:`collections.Iterator` [|Node|]
+            Iterator over all predecessor/ancestor nodes of this node.
+        """
+        if inclusive and (filter_fn is None or filter_fn(self)):
+            yield self
+        node = self
+        while node is not None:
+            node = node._parent_node
+            if node is not None \
+                   and (filter_fn is None or filter_fn(node)):
+                yield node
+
+    def ageorder_iter(self, filter_fn=None, include_leaves=True, descending=False):
+        """
+        Iterator over nodes of subtree rooted at this node in order of the age
+        of the node (i.e., the time since the present).
+
+        Iterates over nodes in order of age ('age' is as given by the ``age``
+        attribute, which is usually the sum of edge lengths from tips
+        to node, i.e., time since present).
+        If ``include_leaves`` is `True` (default), leaves are included in the
+        iteration; if ``include_leaves`` is `False`, leaves will be skipped.
+        If ``descending`` is `False` (default), younger nodes will be returned
+        before older ones; if `True`, older nodes will be returned before
+        younger ones.
+
+        Parameters
+        ----------
+        filter_fn : function object, optional
+            A function object that takes a |Node| object as an argument
+            and returns `True` if the |Node| object is to be yielded by
+            the iterator, or `False` if not. If ``filter_fn`` is `None`
+            (defau
+        include_leaves : boolean, optional
+            If `True` (default), then leaf nodes are included in the iteration.
+            If `False`, then leaf nodes are skipped.lt), then all nodes visited will be yielded.
+        descending : boolean, optional
+            If `False` (default), then younger nodes are visited before older
+            ones. If `True`, then older nodes are visited before younger ones.
+
+        Returns
+        -------
+        :py:class:`collections.Iterator` [|Node|]
+            Iterator over age-ordered sequence of nodes in subtree rooted at
+            this node.
+        """
+        # if not descending:
+        #     leaves = [nd for nd in self.leaf_iter()]
+        #     queued_pairs = []
+        #     in_queue = set()
+        #     for leaf in leaves:
+        #         age_nd_tuple = (leaf.age, leaf)
+        #         queued_pairs.insert(bisect.bisect(queued_pairs, age_nd_tuple), age_nd_tuple)
+        #         in_queue.add(leaf)
+        #     while queued_pairs:
+        #         next_el = queued_pairs.pop(0)
+        #         age, nd = next_el
+        #         in_queue.remove(nd)
+        #         p = nd._parent_node
+        #         if p and p not in in_queue:
+        #             age_nd_tuple = (p.age, p)
+        #             queued_pairs.insert(bisect.bisect(queued_pairs, age_nd_tuple), age_nd_tuple)
+        #             in_queue.add(p)
+        #         if include_leaves or nd.is_internal():
+        #             yield nd
+        # else:
+        #     nds = [(nd.age, nd) for nd in self.preorder_iter()]
+        #     nds.sort(reverse=True)
+        #     for nd in nds:
+        #         if include_leaves or nd[1].is_internal():
+        #             yield nd[1]
+        nds = [nd for nd in self.preorder_iter()]
+        if descending:
+            reverse = True
+        else:
+            reverse = False
+        nds.sort(key=lambda x: x.age, reverse=reverse)
+        for nd in nds:
+            if (include_leaves or nd._child_nodes) and (filter_fn is None or filter_fn(nd)):
+                yield nd
+
+    def age_order_iter(self, include_leaves=True, filter_fn=None, descending=False):
+        """
+        Deprecated: use :meth:`Node.ageorder_iter()` instead.
+        """
+        deprecate.dendropy_deprecation_warning(
+                message="Deprecated since DendroPy 4: 'age_order_iter()' will no longer be supported in future releases; use 'ageorder_iter()' instead",
+                stacklevel=3)
+        return self.ageorder_iter(include_leaves=include_leaves,
+                filter_fn=filter_fn,
+                descending=descending)
+
+    ###########################################################################
+    ### Node Processesor
+
+    def apply(self, before_fn=None, after_fn=None, leaf_fn=None):
+        """
+        Applies function ``before_fn`` and ``after_fn`` to all internal nodes and
+        ``leaf_fn`` to all terminal nodes in subtree starting with ``self``, with
+        nodes visited in pre-order.
+
+        Given a tree with preorder sequence of nodes of
+        [a,b,i,e,j,k,c,g,l,m,f,n,h,o,p,]::
+
+                           a
+                          / \
+                         /   \
+                        /     \
+                       /       \
+                      /         \
+                     /           \
+                    /             c
+                   b             / \
+                  / \           /   \
+                 /   e         /     f
+                /   / \       /     / \
+               /   /   \     g     /   h
+              /   /     \   / \   /   / \
+             i   j       k l   m n   o   p
+
+
+        the following order of function calls results:
+
+            before_fn(a)
+            before_fn(b)
+            leaf_fn(i)
+            before_fn(e)
+            leaf_fn(j)
+            leaf_fn(k)
+            after_fn(e)
+            after_fn(b)
+            before_fn(c)
+            before_fn(g)
+            leaf_fn(l)
+            leaf_fn(m)
+            after_fn(g)
+            before_fn(f)
+            leaf_fn(n)
+            before_fn(h)
+            leaf_fn(o)
+            leaf_fn(p)
+            after_fn(h)
+            after_fn(f)
+            after_fn(c)
+            after_fn(a)
+
+        Parameters
+        ----------
+        before_fn : function object or `None`
+            A function object that takes a |Node| as its argument.
+        after_fn : function object or `None`
+            A function object that takes a |Node| as its argument.
+        leaf_fn : function object or `None`
+            A function object that takes a |Node| as its argument.
+
+        Notes
+        -----
+        Adapted from work by Mark T. Holder (the ``peyotl`` module of the Open
+        Tree of Life Project):
+
+            https://github.com/OpenTreeOfLife/peyotl.git
+
+        """
+        stack = [self]
+        while stack:
+            node = stack.pop()
+            if not node._child_nodes:
+                if leaf_fn:
+                    leaf_fn(node)
+                # (while node is the last child of parent ...)
+                while (
+                        (node._parent_node is None)
+                        or (node._parent_node._child_nodes[-1] is node)
+                      ):
+                    node = node._parent_node
+                    if node is not None:
+                        if after_fn is not None:
+                            after_fn(node)
+                    else:
+                        break
+            else:
+                if before_fn is not None:
+                    before_fn(node)
+                stack.extend([i for i in reversed(node._child_nodes)])
+        return
+
+    ###########################################################################
+    ### Child Node Access and Manipulation
+
+    def set_child_nodes(self, child_nodes):
+        """
+        Assigns the set of child nodes for this node.
+
+        Results in the ``parent_node`` attribute of each |Node| in ``nodes``
+        as well as the ``tail_node`` attribute of corresponding |Edge|
+        objects being assigned to ``self``.
+
+        Parameters
+        ----------
+        child_nodes : collections.Iterable[|Node|]
+            The (iterable) collection of child nodes to be assigned this node
+            as a parent.
+        """
+        del self._child_nodes[:] # list.clear() is not in Python 2.7
+        # Go through add to ensure book-keeping
+        # (e.g. avoiding multiple adds) takes
+        # place.
+        for nd in child_nodes:
+            self.add_child(nd)
+
+    def set_children(self, child_nodes):
+        """Deprecated: use :meth:`Node.set_child_nodes()` instead."""
+        return self.set_child_nodes(child_nodes)
+
+    def add_child(self, node):
+        """
+        Adds a child node to this node if it is not already a child.
+
+        Results in the ``parent_node`` attribute of ``node`` as well as the
+        ``tail_node`` attribute of ``node.edge`` being assigned to ``self``.
+
+        Parameters
+        ----------
+        node : |Node|
+            The node to be added as a child of this node.
+
+        Returns
+        -------
+        |Node|
+            The node that was added.
+        """
+        assert node is not self, "Cannot add node as child of itself"
+        assert self._parent_node is not node, "Cannot add a node's parent as its child: remove the node from its parent's child set first"
+        node._parent_node = self
+        if node not in self._child_nodes:
+            self._child_nodes.append(node)
+        return node
+
+    def insert_child(self, index, node):
+        """
+        Adds a child node to this node.
+
+        If the node is already a child of this node, then it is moved
+        to the specified position.
+        Results in the ``parent_node`` attribute of ``node`` as well as the
+        ``tail_node`` attribute of ``node.edge`` being assigned to ``self``.
+
+        Parameters
+        ----------
+        index : integer
+            The index before which to insert the new node.
+        node : |Node|
+            The node to be added as a child of this node.
+
+        Returns
+        -------
+        |Node|
+            The node that was added.
+        """
+        node._parent_node = self
+        try:
+            cur_index = self._child_nodes.index(node)
+        except ValueError:
+            pass
+        else:
+            if cur_index == index:
+                return
+            self._child_nodes.remove(node)
+        self._child_nodes.insert(index, node)
+        return node
+
+    def new_child(self, **kwargs):
+        """
+        Create and add a new child to this node.
+
+        Parameters
+        ----------
+        \*\*kwargs : keyword arguments
+            Keyword arguments will be passed directly to the |Node|
+            constructor (:meth:`Node.__init()__`).
+
+        Returns
+        -------
+        |Node|
+            The new child node that was created and added.
+        """
+        node = self.__class__(**kwargs)
+        return self.add_child(node=node)
+
+    def insert_new_child(self, index, **kwargs):
+        """
+        Create and add a new child to this node at a particular position.
+
+        Results in the ``parent_node`` attribute of ``node`` as well as the
+        ``tail_node`` attribute of ``node.edge`` being assigned to ``self``.
+
+        Parameters
+        ----------
+        index : integer
+            The index before which to insert the new node.
+        \*\*kwargs : keyword arguments, optional
+            Keyword arguments will be passed directly to the |Node|
+            constructor (:meth:`Node.__init()__`).
+
+        Returns
+        -------
+        |Node|
+            The new child node that was created and added.
+        """
+        node = self.__class__(**kwargs)
+        return self.insert_child(index=index, node=node)
+
+    def remove_child(self, node, suppress_unifurcations=False):
+        """
+        Removes a node from the child set of this node.
+
+        Results in the parent of the node being removed set to `None`.  If
+        ``suppress_unifurcations`` is `True`, if this node ends up having only one
+        child after removal of the specified node, then this node will be
+        removed from the tree, with its single child added to the child node
+        set of its parent and the edge length adjusted accordingly.
+        ``suppress_unifurcations`` should only be `True` for unrooted trees.
+
+        Parameters
+        ----------
+        node : |Node|
+            The node to be removed.
+        suppress_unifurcations : boolean, optional
+            If `False` (default), no action is taken. If `True`, then if the
+            node removal results in a node with degree of two (i.e., a single
+            parent and a single child), then it will be removed from
+            the tree and its (sole) child will be added as a child of its
+            parent (with edge lengths adjusted accordingly).
+
+        Returns
+        -------
+        |Node|
+            The node removed.
+        """
+        if not node:
+            raise ValueError("Tried to remove an non-existing or null node")
+        children = self._child_nodes
+        if node in children:
+            node._parent_node = None
+            node.edge.tail_node = None
+            index = children.index(node)
+            children.remove(node)
+            if suppress_unifurcations:
+                if self._parent_node:
+                    if len(children) == 1:
+                        child = children[0]
+                        pos = self._parent_node._child_nodes.index(self)
+                        self._parent_node.insert_child(pos, child)
+                        self._parent_node.remove_child(self, suppress_unifurcations=False)
+                        try:
+                            child.edge.length += self.edge.length
+                        except:
+                            pass
+                        self._child_nodes = []
+                else:
+                    to_remove = None
+                    if len(children) == 2:
+                        if children[0].is_internal():
+                            to_remove = children[0]
+                            other = children[1]
+                        elif children[1].is_internal():
+                            to_remove = children[1]
+                            other = children[0]
+                    if to_remove is not None:
+                        try:
+                            other.edge.length += to_remove.edge.length
+                        except:
+                            pass
+                        pos = self._child_nodes.index(to_remove)
+                        self.remove_child(to_remove, suppress_unifurcations=False)
+                        tr_children = to_remove._child_nodes
+                        tr_children.reverse()
+                        for c in tr_children:
+                            self.insert_child(pos, c)
+                        to_remove._child_nodes = []
+        else:
+            raise ValueError("Tried to remove a node that is not listed as a child")
+        return node
+
+    def reversible_remove_child(self, node, suppress_unifurcations=False):
+        """
+        This function is a (less-efficient) version of remove_child that also
+        returns the data needed by reinsert_nodes to "undo" the removal.
+
+        Returns a list of tuples.  The first element of each tuple is the
+        node removed, the other elements are the information needed by
+        ``reinsert_nodes`` in order to restore the tree to the same topology as
+        it was before the call to ``remove_child.`` If ``suppress_unifurcations`` is False
+        then the returned list will contain only one item.
+
+        ``suppress_unifurcations`` should only be called on unrooted trees.
+        """
+        if not node:
+            raise ValueError("Tried to remove an non-existing or null node")
+        children = self._child_nodes
+        try:
+            pos = children.index(node)
+        except:
+            raise ValueError("Tried to remove a node that is not listed as a child")
+        removed = [(node, self, pos, [], None)]
+        node._parent_node = None
+        node.edge.tail_node = None
+        children.remove(node)
+        if suppress_unifurcations:
+            p = self._parent_node
+            if p:
+                if len(children) == 1:
+                    child = children[0]
+                    pos = p._child_nodes.index(self)
+                    p.insert_child(pos, child)
+                    self._child_nodes = []
+                    p.remove_child(self, suppress_unifurcations=False)
+                    e = child.edge
+                    try:
+                        e.length += self.edge.length
+                    except:
+                        e = None
+                    t = (self, p, pos, [child], e)
+                    removed.append(t)
+            else:
+                to_remove = None
+                if len(children) == 2:
+                    if children[0].is_internal():
+                        to_remove = children[0]
+                        other = children[1]
+                    elif children[1].is_internal():
+                        to_remove = children[1]
+                        other = children[0]
+                if to_remove is not None:
+                    e = other.edge
+                    try:
+                        e.length += to_remove.edge.length
+                    except:
+                        e = None
+                    pos = self._child_nodes.index(to_remove)
+                    self.remove_child(to_remove, suppress_unifurcations=False)
+                    tr_children = to_remove._child_nodes
+                    to_remove._child_nodes = []
+                    for n, c in enumerate(tr_children):
+                        new_pos = pos + n
+                        self.insert_child(pos, c)
+                    t = (to_remove, self, pos, tr_children, e)
+                    removed.append(t)
+
+        return removed
+
+    def reinsert_nodes(self, nd_connection_list):
+        """
+        This function should be used to "undo" the effects of
+        Node.reversible_remove_child NOTE: the behavior is only
+        guaranteed if the tree has not been modified between the
+        remove_child and reinsert_nodes calls! (or the tree has been
+        restored such that the node/edge identities are identical to the
+        state before the remove_child call.
+
+        The order of info in each tuple is:
+
+            0 - node removed
+            1 - parent of node removed
+            2 - pos in parent matrix
+            3 - children of node removed that were "stolen"
+            4 - edge that was lengthened by "stealing" length from node's edge
+        """
+        # we unroll the stack of operations
+        for blob in nd_connection_list[-1::-1]:
+            #_LOG.debug(blob)
+            n, p, pos, children, e = blob
+            for c in children:
+                cp = c._parent_node
+                if cp:
+                    cp.remove_child(c)
+                n.add_child(c)
+            p.insert_child(pos, n)
+            if e is not None:
+                e.length -= n.edge.length
+
+    def collapse_neighborhood(self, dist):
+        if dist < 1:
+            return
+        children = self.child_nodes()
+        for ch in children:
+            if not ch.is_leaf():
+                ch.edge.collapse()
+        if self._parent_node:
+            p = self._parent_node
+            self.edge.collapse()
+            p.collapse_neighborhood(dist -1)
+        else:
+            self.collapse_neighborhood(dist - 1)
+
+    def collapse_clade(self):
+        """Collapses all internal edges that are descendants of self."""
+        if self.is_leaf():
+            return
+        leaves = [i for i in self.leaf_iter()]
+        self.set_child_nodes(leaves)
+
+    def collapse_conflicting(self, bipartition):
+        """
+        Collapses every edge in the subtree that conflicts with the given
+        bipartition. This can include the edge subtending subtree_root.
+        """
+        to_collapse_head_nodes = []
+        for nd in self.postorder_iter():
+            if nd._child_nodes and nd.edge.bipartition.is_incompatible_with(bipartition):
+                to_collapse_head_nodes.append(nd)
+        for nd in to_collapse_head_nodes:
+            e = nd.edge
+            e.collapse()
+
+    ###########################################################################
+    ### Edge Access and Manipulation
+
+    def _get_edge(self):
+        """
+        Returns the edge subtending this node.
+        """
+        return self._edge
+    def _set_edge(self, new_edge):
+        """
+        Sets the edge subtending this node, and sets head_node of
+        ``edge`` to point to self.
+        """
+        # if edge is None:
+        #     raise ValueError("A Node cannot have 'None' for an edge")
+        if new_edge is self._edge:
+            return
+        if self._parent_node is not None:
+            try:
+                self._parent_node._child_nodes.remove(self)
+            except ValueError:
+                pass
+
+        ## Minimal management
+        self._edge = new_edge
+        if self._edge:
+            self._edge._head_node = self
+
+    edge = property(_get_edge, _set_edge)
+
+    def _get_edge_length(self):
+        """
+        Returns the length of the edge subtending this node.
+        """
+        return self._edge.length
+    def _set_edge_length(self, v=None):
+        """
+        Sets the edge subtending this node, and sets head_node of
+        ``edge`` to point to self.
+        """
+        self._edge.length = v
+    edge_length = property(_get_edge_length, _set_edge_length)
+
+    def _get_bipartition(self):
+        """
+        Returns the bipartition for the edge subtending this node.
+        """
+        return self._edge.bipartition
+    def _set_bipartition(self, v=None):
+        """
+        Sets the bipartition for the edge subtending this node.
+        """
+        self._edge.bipartition = v
+    bipartition = property(_get_bipartition, _set_bipartition)
+
+    def _get_split_bitmask(self):
+        return self._edge.bipartition._split_bitmask
+    def _set_split_bitmask(self, h):
+        self._edge.bipartition._split_bitmask = h
+    split_bitmask = property(_get_split_bitmask, _set_split_bitmask)
+
+    def _get_leafset_bitmask(self):
+        return self._edge.bipartition._leafset_bitmask
+    def _set_leafset_bitmask(self, h):
+        self._edge.bipartition._leafset_bitmask = h
+    leafset_bitmask = property(_get_leafset_bitmask, _set_leafset_bitmask)
+
+    def _get_tree_leafset_bitmask(self):
+        return self._edge.bipartition._tree_leafset_bitmask
+    def _set_tree_leafset_bitmask(self, h):
+        self._edge.bipartition._tree_leafset_bitmask = h
+    tree_leafset_bitmask = property(_get_tree_leafset_bitmask, _set_tree_leafset_bitmask)
+
+    def split_as_bitstring(self):
+        return self._edge.bipartition.split_as_bitstring()
+
+    def leafset_as_bitstring(self):
+        return self._edge.bipartition.leafset_as_bitstring()
+
+    ###########################################################################
+    ### Parent Access and Manipulation
+
+    def _get_parent_node(self):
+        """Returns the parent node of this node."""
+        return self._parent_node
+    def _set_parent_node(self, parent):
+        """Sets the parent node of this node."""
+        if self._parent_node is not None:
+            try:
+                self._parent_node._child_nodes.remove(self)
+            except ValueError:
+                pass
+        self._parent_node = parent
+        if self._parent_node is not None:
+            if self not in self._parent_node._child_nodes:
+                self._parent_node._child_nodes.append(self)
+    parent_node = property(_get_parent_node, _set_parent_node)
+
+    ###########################################################################
+    ### General Structural Access and Information
+
+    def is_leaf(self):
+        """
+        Returns `True` if the node is a tip or a leaf node, i.e. has no child
+        nodes.
+
+        Returns
+        -------
+        boolean
+            `True` if the node is a leaf, i.e., has no child nodes. `False`
+            otherwise.
+        """
+        return bool(not self._child_nodes)
+
+    def is_internal(self):
+        """
+        Returns `True` if the node is *not* a tip or a leaf node.
+
+        Returns
+        -------
+        boolean
+            `True` if the node is not a leaf. `False` otherwise.
+        """
+        return bool(self._child_nodes)
+
+    def leaf_nodes(self):
+        """
+        Returns list of all leaf_nodes descended from this node (or just
+        list with ``self`` as the only member if ``self`` is a leaf).
+
+        Note
+        ----
+        Usage of  `leaf_iter()` is preferable for efficiency reasons unless
+        actual list is required.
+
+        Returns
+        -------
+        :py:class:`list` [|Node|]
+           A ``list`` of |Node| objects descended from this node
+           (inclusive of ``self``) that are the leaves.
+        """
+        return [node for node in \
+                self.postorder_iter(lambda x: bool(len(x.child_nodes())==0))]
+
+    def num_child_nodes(self):
+        """
+        Returns number of child nodes.
+
+        Returns
+        -------
+        int
+            Number of children in ``self``.
+        """
+        return len(self._child_nodes)
+
+    def child_nodes(self):
+        """
+        Returns a shallow-copy list of all child nodes of this node.
+
+        Note
+        ----
+        Unless an actual ``list`` is needed, iterating over the child nodes using
+        :meth:`Node.child_node_iter()` is preferable to avoid the overhead of
+        list construction.
+
+        Returns
+        -------
+        :py:class:`list` [|Node|]
+           A ``list`` of |Node| objects that have ``self`` as a parent.
+        """
+        return list(self._child_nodes)
+
+    def child_edges(self):
+        """
+        Returns a shallow-copy list of all child edges of this node.
+
+        Note
+        ----
+        Unless an actual ``list`` is needed, iterating over the child edges using
+        :meth:`Node.child_edge_iter()` is preferable to avoid the overhead of
+        list construction.
+
+        Returns
+        -------
+        :py:class:`list` [|Edge|]
+           A ``list`` of |Edge| objects that have ``self`` as a tail node.
+        """
+        return list(ch.edge for ch in self._child_nodes)
+
+    def incident_edges(self):
+        """
+        Return parent and child edges.
+
+        Returns
+        -------
+        :py:class:`list` [|Edge|]
+            A list of edges linking to this node, with outgoing edges (edges
+            connecting to child nodes) followed by the edge connecting
+            this node to its parent.
+        """
+        e = [c.edge for c in self._child_nodes]
+        e.append(self.edge)
+        return e
+
+    def get_incident_edges(self):
+        """Legacy synonym for :meth:`Node.incident_edges()`."""
+        return self.incident_edges()
+
+    def adjacent_nodes(self):
+        """
+        Return parent and child nodes.
+
+        Returns
+        -------
+        :py:class:`list` [|Node|]
+            A list with all child nodes and parent node of this node.
+        """
+        n = [c for c in self._child_nodes]
+        if self._parent_node:
+            n.append(self._parent_node)
+        return n
+
+    def get_adjacent_nodes(self):
+        """Legacy synonym for :meth:`Node.adjacent_edges()`"""
+        return self.adjacent_nodes()
+
+    def sibling_nodes(self):
+        """
+        Return all other children of parent, excluding self.
+
+        Returns
+        -------
+        :py:class:`list` [|Node|]
+            A list of all nodes descended from the same parent as ``self``,
+            excluding ``self``.
+        """
+        p = self._parent_node
+        if not p:
+            return []
+        sisters = [nd for nd in p.child_nodes() if nd is not self]
+        return sisters
+
+    def sister_nodes(self):
+        """Legacy synonym for :meth:`Node.sister_nodes()`"""
+        return self.sibling_nodes()
+
+    ###########################################################################
+    ### Metrics
+
+    def level(self):
+        """
+        Returns the number of nodes between ``self`` and the seed node of the tree.
+
+        Returns
+        -------
+        integer
+            The number of nodes between ``self`` and the seed node of the tree,
+            or 0 if ``self`` has no parent.
+        """
+        if self._parent_node:
+            return self._parent_node.level() + 1
+        else:
+            return 0
+
+    def distance_from_root(self):
+        """
+        Weighted path length of ``self`` from root.
+
+        Returns
+        -------
+        numeric
+            Total weight of all edges connecting ``self`` with the root of the
+            tree.
+        """
+        if self._parent_node and self.edge.length != None:
+            if self._parent_node.distance_from_root == None:
+                return float(self.edge.length)
+            else:
+                distance_from_root = float(self.edge.length)
+                parent_node = self._parent_node
+                # The root is identified when a node with no
+                # parent is encountered. If we want to use some
+                # other criteria (e.g., where a is_root property
+                # is True), we modify it here.
+                while parent_node:
+                    if parent_node.edge.length != None:
+                        distance_from_root = distance_from_root + float(parent_node.edge.length)
+                    parent_node = parent_node._parent_node
+                return distance_from_root
+        elif not self._parent_node and self.edge.length != None:
+            return float(self.edge.length)
+        elif self._parent_node and self.edge.length == None:
+            # what do we do here: parent node exists, but my
+            # length does not?
+            return float(self._parent_node.edge.length)
+        elif not self._parent_node and self.edge.length == None:
+            # no parent node, and no edge length
+            return 0.0
+        else:
+            # WTF????
+            return 0.0
+
+    def distance_from_tip(self):
+        """
+        Maximum weighted length of path of ``self`` to tip.
+
+        If tree is not ultrametric (i.e., descendent edges have different
+        lengths), then count the maximum of edge lengths. Note that
+        :meth:`Tree.calc_node_ages()` is a more efficient way of doing this
+        over the whole tree if this value is need for many or all the nodes on
+        the tree.
+
+        Returns
+        -------
+        numeric
+            Maximum weight of edges connecting ``self`` to tip.
+        """
+        if not self._child_nodes:
+            return 0.0
+        else:
+            distance_from_tips = []
+            for ch in self._child_nodes:
+                if ch.edge.length is not None:
+                    curr_edge_length = ch.edge_length
+                else:
+                    curr_edge_length = 0.0
+                if not hasattr(ch, "_distance_from_tip"):
+                    ch._distance_from_tip = ch.distance_from_tip()
+                distance_from_tips.append(ch._distance_from_tip + curr_edge_length)
+            self._distance_from_tip = float(max(distance_from_tips))
+            return self._distance_from_tip
+
+    ###########################################################################
+    ### Representation
+
+    def description(self, depth=1, indent=0, itemize="", output=None, taxon_namespace=None):
+        """
+        Returns description of object, up to level ``depth``.
+        """
+        if depth is None or depth < 0:
+            return
+        output_strio = StringIO()
+        label = str(self)
+        output_strio.write('%s%sNode object at %s%s'
+                % (indent*' ',
+                   itemize,
+                   hex(id(self)),
+                   label))
+        if depth >= 1:
+            leader1 = ' ' * (indent + 4)
+            leader2 = ' ' * (indent + 8)
+            output_strio.write('\n%s[Edge]' % leader1)
+            if self.edge is not None:
+                edge_desc = self.edge.description(0)
+            else:
+                edge_desc = 'None'
+            output_strio.write('\n%s%s' % (leader2, edge_desc))
+
+            output_strio.write('\n%s[Taxon]' % leader1)
+            if self.taxon is not None:
+                taxon_desc = self.taxon.description(0)
+            else:
+                taxon_desc = 'None'
+            output_strio.write('\n%s%s' % (leader2, taxon_desc))
+
+            output_strio.write('\n%s[Parent]' % leader1)
+            if self._parent_node is not None:
+                parent_node_desc = self._parent_node.description(0)
+            else:
+                parent_node_desc = 'None'
+            output_strio.write('\n%s%s' % (leader2, parent_node_desc))
+            output_strio.write('\n%s[Children]' % leader1)
+            if len(self._child_nodes) == 0:
+                output_strio.write('\n%sNone' % leader2)
+            else:
+                for i, cnd in enumerate(self._child_nodes):
+                    output_strio.write('\n%s[%d] %s' % (leader2, i, cnd.description(0)))
+        s = output_strio.getvalue()
+        if output is not None:
+            output.write(s)
+        return s
+
+    ###########################################################################
+    ### Native NEWICK printer
+    ## For debugging we build-in a full-fledged NEWICK composition independent
+    ## of the nexus/newick family of modules. Client code should prefer to
+    ## use Newick/Nexus readers/writers, or Tree.write(), TreeList.write(),
+    ## DataSet.write() etc.
+
+    def _as_newick_string(self, **kwargs):
+        """
+        This returns the Node as a NEWICK statement according to the given
+        formatting rules. This should be used for debugging purposes only.
+        For production purposes, use the the full-fledged 'as_string()'
+        method of the object.
+        """
+        out = StringIO()
+        self._write_newick(out, **kwargs)
+        return out.getvalue()
+
+    def _write_newick(self, out, **kwargs):
+        """
+        This returns the Node as a NEWICK statement according to the given
+        formatting rules. This should be used for debugging purposes only.  For
+        production purposes, use the the full-fledged 'write_to_stream()'
+        method of the object.
+        """
+        edge_lengths = not kwargs.get('suppress_edge_lengths', False)
+        edge_lengths = kwargs.get('edge_lengths', edge_lengths)
+        child_nodes = self.child_nodes()
+        if child_nodes:
+            out.write('(')
+            f_child = child_nodes[0]
+            for child in child_nodes:
+                if child is not f_child:
+                    out.write(',')
+                child._write_newick(out, **kwargs)
+            out.write(')')
+
+        out.write(self._get_node_token(**kwargs))
+        if edge_lengths:
+            e = self.edge
+            if e:
+                sel = e.length
+                if sel is not None:
+                    fmt = kwargs.get('edge_length_formatter', None)
+                    if fmt:
+                        out.write(":%s" % fmt(sel))
+                    else:
+                        s = ""
+                        try:
+                            s = float(sel)
+                            s = str(s)
+                        except ValueError:
+                            s = str(sel)
+                        if s:
+                            out.write(":%s" % s)
+
+    def _get_node_token(self, **kwargs):
+        """returns a string that is an identifier for the node.  This is called
+        by the newick-writing functions, so the kwargs that affect how node
+        labels show up in a newick string are the same ones used here:
+        ``suppress_internal_labels`` is a Boolean, and defaults to False.
+        """
+        is_leaf = (len(self._child_nodes) == 0)
+        if not is_leaf:
+            if kwargs.get("suppress_internal_labels", False) \
+                    or not kwargs.get("include_internal_labels", True):
+                return ""
+        if self.taxon is not None:
+            if self.taxon.label:
+                label = self.taxon.label
+            else:
+                # return "_" # taxon, but no label: anonymous
+                label = "" # "_" is not anonoymous/unnamed, but a name of <blank>; so we return nothing instead
+        else:
+            if self.label:
+                label = self.label
+            else:
+                label = ""
+        if not label or kwargs.get("raw_labels", False):
+            return label
+        elif " " in label and "_" in label:
+            if "'" in label:
+                label.replace("'", "''")
+            return "'{}'".format(label)
+        else:
+            return label
+
+    ###########################################################################
+    ### alternate representation of tree structure for debugging
+
+    def _get_indented_form(self, **kwargs):
+        out = StringIO()
+        self._write_indented_form(out, **kwargs)
+        return out.getvalue()
+
+    def _write_indented_form(self, out, **kwargs):
+        indentation = kwargs.get("indentation", "    ")
+        level = kwargs.get("level", 0)
+        ancestors = []
+        siblings = []
+        n = self
+        while n is not None:
+            n._write_indented_form_line(out, level, **kwargs)
+            n, lev = _preorder_list_manip(n, siblings, ancestors)
+            level += lev
+
+    def _get_indented_form_line(self, level, **kwargs):
+        out = StringIO()
+        self._write_indented_form_line(out, level, **kwargs)
+        return out.getvalue()
+
+    def _write_indented_form_line(self, out, level, **kwargs):
+        indentation = kwargs.get("indentation", "    ")
+        label = _format_node(self, **kwargs)
+        if kwargs.get("bipartitions"):
+            cm = "%s " % _format_bipartition(self.edge.bipartition, **kwargs)
+        else:
+            cm = ""
+        out.write("%s%s%s\n" % ( cm, indentation*level, label))
+
+##############################################################################
+### Tree
+
+class Tree(
+        taxonmodel.TaxonNamespaceAssociated,
+        basemodel.Annotable,
+        basemodel.Deserializable,
+        basemodel.NonMultiReadable,
+        basemodel.Serializable,
+        basemodel.DataObject):
+    """
+    An arborescence, i.e. a fully-connected directed acyclic graph with all
+    edges directing away from the root and toward the tips. The "root" of the
+    tree is represented by the :attr:`Tree.seed_node` attribute.  In unrooted
+    trees, this node is an algorithmic artifact. In rooted trees this node is
+    semantically equivalent to the root.
+    """
+
+    def _parse_and_create_from_stream(cls,
+            stream,
+            schema,
+            collection_offset=None,
+            tree_offset=None,
+            **kwargs):
+        """
+        Constructs a new |Tree| object and populates it with data from
+        file-like object ``stream``.
+
+        If the source defines multiple tree collections (e.g. multiple NEXUS
+        "Trees" blocks), then the ``collection_offset`` argument can be
+        used to specify the 0-based index of the tree collection, and
+        ``tree_offset`` argument can be used to specify the 0-based index of
+        the tree within the collection, as the source. If ``collection_offset``
+        is not specified or `None`, then the first collection (offset=0) is
+        assumed. If ``tree_offset`` is not specified, then the first tree
+        (offset=0) is returned.
+
+        Keyword arguments `**kwargs` are passed directly to
+        :meth:|TreeList|.read()`, which wraps the actual parsing.
+
+        If no tree is found in the source according to the specified criteria,
+        then `None` is returned.
+
+        Notes
+        -----
+        *All* operational taxonomic unit concepts in the data source will be included
+        in the |TaxonNamespace| object associated with the new
+        |TreeList| object and its contained |Tree| objects, even those
+        not associated with tree being retrieved.
+
+        Parameters
+        ----------
+
+        stream : file or file-like object
+            Source of data.
+
+        schema : string
+            Identifier of format of data in ``stream``
+
+        collection_offset : integer
+            0-based index of tree block or collection in source to be parsed.
+
+        tree_offset : integer
+            0-based index of tree in source to be parsed.  This is the 0-based
+            index of the tree within the collection specified by
+            ``collection_offset`` to be retrieved.
+
+        \*\*kwargs : keyword arguments
+            Arguments to customize parsing and instantiation this |Tree|
+            from the data source, including schema- or format-specific
+            handling. The following optional keyword arguments are recognized
+            and handled by this constructor:
+
+                ``label``
+                    The label or description of the new |Tree| object.
+                ``taxon_namespace``
+                   Specifies the |TaxonNamespace| object to be attached
+                   to the new |TreeList| object. Note that *all*
+                   operational taxonomic unit concepts in the data source will
+                   be accessioned into the specified |TaxonNamespace|
+                   instance. This includes the operation taxonomic unit
+                   definitions associated with all tree collections and
+                   character matrices in the data source.
+
+            Other keyword arguments may be available, depending on the
+            implementation of the reader specialized to handle ``schema``
+            formats. See documentation for details on keyword arguments
+            supported by readers of various schemas.
+
+        Returns
+        -------
+        |Tree| or `None`
+            The |Tree| object corresponding to the tree in the data
+            source, or `None` if no valid tree description was found.
+
+        """
+        from dendropy.datamodel.treecollectionmodel import TreeList
+
+        taxon_namespace = taxonmodel.process_kwargs_dict_for_taxon_namespace(kwargs, None)
+        if taxon_namespace is None:
+            taxon_namespace = taxonmodel.TaxonNamespace()
+
+        def tns_factory(label):
+            if label is not None and taxon_namespace.label is None:
+                taxon_namespace.label = label
+            return taxon_namespace
+
+        tree_list_factory = lambda label, taxon_namespace: TreeList(label=label, taxon_namespace=taxon_namespace, tree_type=cls)
+        label = kwargs.pop("label", None)
+        reader = dataio.get_reader(schema, **kwargs)
+        # if collection_offset is None and tree_offset is not None:
+        #     raise TypeError("Cannot specify ``tree_offset`` without specifying ``collection_offset``")
+        if collection_offset is None:
+            collection_offset = 0
+        if tree_offset is None:
+            tree_offset = 0
+        tree_lists = reader.read_tree_lists(
+                    stream=stream,
+                    taxon_namespace_factory=tns_factory,
+                    tree_list_factory=tree_list_factory,
+                    global_annotations_target=None)
+        if not tree_lists:
+            raise ValueError("No trees in data source")
+        tree_list = tree_lists[collection_offset]
+        if not tree_list:
+            raise ValueError("No trees available at requested location in data source")
+        tree = tree_list[tree_offset]
+        tree.label = label
+        return tree
+    _parse_and_create_from_stream = classmethod(_parse_and_create_from_stream)
+
+    @classmethod
+    def get(cls, **kwargs):
+        """
+        Instantiate and return a *new* |Tree| object from a data source.
+
+        **Mandatory Source-Specification Keyword Argument (Exactly One of the Following Required):**
+
+            - **file** (*file*) -- File or file-like object of data opened for reading.
+            - **path** (*str*) -- Path to file of data.
+            - **url** (*str*) -- URL of data.
+            - **data** (*str*) -- Data given directly.
+
+        **Mandatory Schema-Specification Keyword Argument:**
+
+            - **schema** (*str*) -- Identifier of format of data given by the
+              "``file``", "``path``", "``data``", or "``url``" argument
+              specified above: ":doc:`newick </schemas/newick>`", ":doc:`nexus
+              </schemas/nexus>`", or ":doc:`nexml </schemas/nexml>`". See
+              "|Schemas|" for more details.
+
+        **Optional General Keyword Arguments:**
+
+            - **label** (*str*) -- Name or identifier to be assigned to the new
+              object; if not given, will be assigned the one specified in the
+              data source, or `None` otherwise.
+            - **taxon_namespace** (|TaxonNamespace|) -- The |TaxonNamespace|
+              instance to use to :doc:`manage the taxon names </primer/taxa>`.
+              If not specified, a new one will be created.
+            - **collection_offset** (*int*) -- 0-based index of tree block or
+              collection in source to be parsed. If not specified then the
+              first collection (offset = 0) is assumed.
+            - **tree_offset** (*int*) -- 0-based index of tree within the
+              collection specified by ``collection_offset`` to be parsed. If
+              not specified, then the first tree (offset = 0) is assumed.
+            - **ignore_unrecognized_keyword_arguments** (*bool*) -- If `True`,
+              then unsupported or unrecognized keyword arguments will not
+              result in an error. Default is `False`: unsupported keyword
+              arguments will result in an error.
+
+        **Optional Schema-Specific Keyword Arguments:**
+
+            These provide control over how the data is interpreted and
+            processed, and supported argument names and values depend on
+            the schema as specified by the value passed as the "``schema``"
+            argument. See "|Schemas|" for more details.
+
+        **Examples:**
+
+        ::
+
+            # From a URL
+            t1 = dendropy.Tree.get(
+                    url="http://api.opentreeoflife.org/v2/study/pg_1144/tree/tree2324.nex",
+                    schema="nexus")
+
+            # From a file-like object
+            t2 = Tree.get(file=open('treefile.tre', 'r'),
+                            schema="newick",
+                            tree_offset=0)
+
+            # From a path
+            t3 = Tree.get(path='sometrees.nexus',
+                    schema="nexus",
+                    collection_offset=2,
+                    tree_offset=1)
+
+            # From a string
+            s = "((A,B),(C,D));((A,C),(B,D));"
+            # tree will be '((A,B),(C,D))'
+            t4 = Tree.get(data=s,
+                    schema="newick")
+            # tree will be '((A,C),(B,D))'
+            t5 = Tree.get(data=s,
+                    schema="newick",
+                    tree_offset=1)
+            # passing keywords to underlying tree parser
+            t7 = dendropy.Tree.get(
+                    data="((A,B),(C,D));",
+                    schema="newick",
+                    taxon_namespace=t3.taxon_namespace,
+                    suppress_internal_node_taxa=False,
+                    preserve_underscores=True)
+
+        """
+        return cls._get_from(**kwargs)
+
+    def yield_from_files(cls,
+            files,
+            schema,
+            taxon_namespace=None,
+            **kwargs):
+        """
+        Iterates over trees from files, returning them one-by-one instead of
+        instantiating all of them in memory at once.
+
+        For operations where it is sufficient to process each tree individually
+        (e.g., performing a calculation or set of calculations on a tree and
+        storing the result, after the which the entire tree itself is
+        not needed), this approach is *far* more performant that reading in the
+        trees using a |TreeList|. This is because a full tree structure
+        requires significant memory overhead, and as memory gets used up and
+        the OS starts page faulting, performance starts taking some serious
+        hits.
+
+        Parameters
+        ----------
+        files : iterable of file paths or file-like objects.
+            Iterable of sources, which can either be strings specifying file
+            paths or file-like objects open for reading. If a source element is
+            a string (``isinstance(i,str) == True``), then it is assumed to be
+            a path to a file. Otherwise, the source is assumed to be a file-like
+            object.
+        schema : string
+            The name of the data format (e.g., "newick" or "nexus").
+        taxon_namespace : |TaxonNamespace| instance
+            The operational taxonomic unit concept namespace to use to manage
+            taxon definitions.
+        \*\*kwargs : keyword arguments
+            These will be passed directly to the schema-parser implementation.
+
+        Yields
+        ------
+        t : |Tree|
+            Trees as read from the file.
+
+        Examples
+        --------
+
+        ::
+
+            taxon_namespace = dendropy.TaxonNamespace()
+            f1 = open("path/to/trees1.nex", "r")
+            f2 = open("path/to/trees2.nex", "r")
+            tree_yielder = dendropy.Tree.yield_from_files(
+                    files=[f1, f2, "path/to/trees3.nex", "path/to/trees4.nex"],
+                    schema="nexus",
+                    taxon_namespace=taxon_namespace,
+                    store_tree_weights=True,
+                    preserve_underscores=True,
+                    rooting="default-unrooted",
+                    ignore_unrecognized_keyword_arguments=True,
+                    )
+            lengths = []
+            root_ages = []
+            for tree in tree_yielder:
+                length = 0.0
+                for edge in tree:
+                    length += edge.length
+                lengths.append(length)
+                tree.calc_node_ages()
+                root_ages.append(tree.seed_node.age)
+
+        """
+        if taxon_namespace is None:
+            taxon_namespace = taxonmodel.process_kwargs_dict_for_taxon_namespace(kwargs, None)
+            if taxon_namespace is None:
+                taxon_namespace = taxonmodel.TaxonNamespace()
+        else:
+            assert "taxon_set" not in kwargs
+        if "tree_offset" in kwargs:
+            raise TypeError("'tree_offset' is not supported: trees should be skipped/discarded on the client code side")
+        tree_yielder = dataio.get_tree_yielder(
+                files,
+                schema,
+                taxon_namespace=taxon_namespace,
+                tree_type=cls,
+                **kwargs)
+        return tree_yielder
+    yield_from_files = classmethod(yield_from_files)
+
+    def from_bipartition_encoding(
+            cls,
+            bipartition_encoding,
+            taxon_namespace,
+            is_rooted=False,
+            edge_lengths=None,
+            ):
+        """
+        Reconstructs a tree from a bipartition encoding.
+
+        Parameters
+        ----------
+        bipartition_encoding : iterable[|Bipartition|]
+            An iterable of |Bipartition| instances representing a tree.
+            Bipartitions will be added to the tree in the order given by
+            iterating over this. Bipartitions that are incompatible with the
+            tree will be skipped. So if not all bipartitions are compatible
+            with each other, then the sequence of bipartitions given in
+            ``bipartition_encoding`` should be in order of their support values
+            or some other preference criteria.
+        taxon_namespace : |TaxonNamespace| instance
+            The operational taxonomic unit concept namespace to use to manage
+            taxon definitions.
+        is_rooted : bool
+            Specifies whether or not the tree is rooted.
+        edge_lengths : iterable or `None`
+            An iterable of edge lengths. This should be in the same order
+            as the bipartitions in the bipartition encoding.
+
+        Returns
+        -------
+        |Tree|
+            The tree reconstructed from the given bipartition encoding.
+        """
+        split_bitmasks = [b.split_bitmask for b in bipartition_encoding]
+        if edge_lengths:
+            split_edge_lengths = dict(zip(split_bitmasks, edge_lengths))
+        else:
+            split_edge_lengths = None
+        # elif edge_lengths is not False:
+        #     split_edge_lengths = dict(zip(split_bitmasks,
+        #         [b.edge.length for b in bipartition_encoding]))
+        return cls.from_split_bitmasks(
+                split_bitmasks=split_bitmasks,
+                taxon_namespace=taxon_namespace,
+                split_edge_lengths=split_edge_lengths,
+                is_rooted=is_rooted)
+    from_bipartition_encoding = classmethod(from_bipartition_encoding)
+
+    def from_split_bitmasks(
+            cls,
+            split_bitmasks,
+            taxon_namespace,
+            is_rooted=False,
+            split_edge_lengths=None,
+            ):
+        """
+        Reconstructs a tree from a collection of splits represented as bitmasks.
+
+        Parameters
+        ----------
+        split_bitmasks : iterable[int]
+            An iterable of split bitmasks representing a tree.
+            Splits will be added to the tree in the order given by
+            iterating over this. Splits that are incompatible with the
+            tree will be skipped. So if not all splits are compatible
+            with each other, then the sequence of splits given in
+            ``bipartition_encoding`` should be in order of their support values
+            or some other preference criteria.
+        taxon_namespace : |TaxonNamespace| instance
+            The operational taxonomic unit concept namespace to use to manage
+            taxon definitions.
+        is_rooted : bool
+            Specifies whether or not the tree is rooted.
+        edge_lengths : dict or `False` or `None`
+            If `False` or `None`, then no edge lengths will be added.
+            Otherwise, this should be a dictionary mapping splits to edge
+            lengths.
+        Returns
+        -------
+        |Tree|
+            The tree reconstructed from the given bipartition encoding.
+        """
+        leaf_to_root_search = True
+        reconstructed_tree = cls(taxon_namespace=taxon_namespace)
+        # reconstructed_tree.is_rooted = True
+        reconstructed_tree.is_rooted = is_rooted
+        for taxon in taxon_namespace:
+            reconstructed_tree.seed_node.new_child(taxon=taxon)
+        all_taxa_bitmask = taxon_namespace.all_taxa_bitmask()
+        reconstructed_tree.encode_bipartitions()
+        reconstructed_tree.bipartition_encoding = []
+        leaves = reconstructed_tree.leaf_nodes()
+        if leaf_to_root_search:
+            to_leaf_dict = {}
+            for leaf in leaves:
+                to_leaf_dict[leaf.edge.bipartition.leafset_bitmask] = leaf
+        root = reconstructed_tree.seed_node
+        root_edge = root.edge
+
+        split_bitmasks_to_add = []
+        for s in split_bitmasks:
+            m = s & all_taxa_bitmask
+            if (m != all_taxa_bitmask) and ((m-1) & m): # if not root (i.e., all "1's") and not singleton (i.e., one "1")
+                if is_rooted:
+                    split_bitmasks_to_add.append(m)
+                else:
+                    if 1 & m:
+                        split_bitmasks_to_add.append( (~m) & all_taxa_bitmask )
+                    else:
+                        # "denormalize" split_bitmasks
+                        split_bitmasks_to_add.append(m)
+
+        # Now when we add split_bitmasks in order, we will do a greedy, extended majority-rule consensus tree
+        #for freq, split_to_add, split_in_dict in to_try_to_add:
+        _get_mask = lambda x: getattr(x.bipartition, "_leafset_bitmask")
+        for split_to_add in split_bitmasks_to_add:
+            if (split_to_add & root_edge.bipartition.leafset_bitmask) != split_to_add:
+                # incompatible
+                continue
+            elif leaf_to_root_search:
+                lb = bitprocessing.least_significant_set_bit(split_to_add)
+                one_leaf = to_leaf_dict[lb]
+                parent_node = one_leaf
+                while (split_to_add & parent_node.edge.bipartition.leafset_bitmask) != split_to_add:
+                    parent_node = parent_node.parent_node
+            else:
+                parent_node = reconstructed_tree.mrca(split_bitmask=split_to_add)
+            if parent_node is None or parent_node.edge.bipartition.leafset_bitmask == split_to_add:
+                continue # split is not in tree, or already in tree.
+            new_node = cls.node_factory()
+            #self.map_split_support_to_node(node=new_node, split_support=freq)
+            new_node_children = []
+            new_edge = new_node.edge
+            new_mask = 0
+            for child in parent_node.child_nodes():
+                # might need to modify the following if rooted split_bitmasks
+                # are used
+                cecm = child.edge.bipartition.leafset_bitmask
+                if (cecm & split_to_add):
+                    assert cecm != split_to_add
+                    new_mask |= cecm
+                    new_node_children.append(child)
+                    new_edge.bipartition = Bipartition(
+                            leafset_bitmask=new_mask,
+                            tree_leafset_bitmask=all_taxa_bitmask,
+                            is_mutable=False,
+                            compile_bipartition=True)
+                    reconstructed_tree.bipartition_encoding.append(new_edge.bipartition)
+            # Check to see if we have accumulated all of the bits that we
+            #   needed, but none that we don't need.
+            if new_edge.bipartition.leafset_bitmask == split_to_add:
+                if split_edge_lengths:
+                    new_edge.length = split_edge_lengths[split_to_add]
+                    #old_split = new_old_split_map[split_to_add]
+                    #new_edge.length = split_edge_lengths[old_split]
+                for child in new_node_children:
+                    parent_node.remove_child(child)
+                    new_node.add_child(child)
+                parent_node.add_child(new_node)
+                # reconstructed_tree.split_edge_map[split_to_add] = new_edge
+        return reconstructed_tree
+    from_split_bitmasks = classmethod(from_split_bitmasks)
+
+    def node_factory(cls, **kwargs):
+        """
+        Creates and returns a |Node| object.
+
+        Derived classes can override this method to provide support for
+        specialized or different types of nodes on the tree.
+
+        Parameters
+        ----------
+
+        \*\*kwargs : keyword arguments
+            Passed directly to constructor of |Node|.
+
+        Returns
+        -------
+        |Node|
+            A new |Node| object.
+
+        """
+        return Node(**kwargs)
+    node_factory = classmethod(node_factory)
+
+    ###########################################################################
+    ### Special/Lifecycle methods
+
+    def __init__(self, *args, **kwargs):
+        """
+        The constructor can optionally construct a |Tree| object by
+        cloning another |Tree| object passed as the first positional
+        argument, or out of a data source if ``stream`` and ``schema`` keyword
+        arguments are passed with a file-like object and a schema-specification
+        string object values respectively.
+
+        Parameters
+        ----------
+
+        \*args : positional argument, optional
+            If given, should be exactly one |Tree| object. The new
+            |Tree| will then be a structural clone of this argument.
+
+        \*\*kwargs : keyword arguments, optional
+            The following optional keyword arguments are recognized
+            and handled by this constructor:
+
+                ``label``
+                    The label or description of the new |Tree| object.
+                ``taxon_namespace``
+                    Specifies the |TaxonNamespace| object to be
+                    that the new |Tree| object will reference.
+        Examples
+        --------
+
+        Tree objects can be instantiated in the following ways::
+
+            # /usr/bin/env python
+
+            try:
+                from StringIO import StringIO
+            except ImportError:
+                from io import StringIO
+            from dendropy import Tree, TaxonNamespace
+
+            # empty tree
+            t1 = Tree()
+
+            # Tree objects can be instantiated from an external data source
+            # using the 'get()' factory class method
+
+            # From a file-like object
+            t2 = Tree.get(file=open('treefile.tre', 'r'),
+                            schema="newick",
+                            tree_offset=0)
+
+            # From a path
+            t3 = Tree.get(path='sometrees.nexus',
+                    schema="nexus",
+                    collection_offset=2,
+                    tree_offset=1)
+
+            # From a string
+            s = "((A,B),(C,D));((A,C),(B,D));"
+            # tree will be '((A,B),(C,D))'
+            t4 = Tree.get(data=s,
+                    schema="newick")
+            # tree will be '((A,C),(B,D))'
+            t5 = Tree.get(data=s,
+                    schema="newick",
+                    tree_offset=1)
+            # passing keywords to underlying tree parser
+            t7 = dendropy.Tree.get(
+                    data="((A,B),(C,D));",
+                    schema="newick",
+                    taxon_namespace=t3.taxon_namespace,
+                    suppress_internal_node_taxa=False,
+                    preserve_underscores=True)
+
+            # Tree objects can be written out using the 'write()' method.
+            t1.write(file=open('treefile.tre', 'r'),
+                    schema="newick")
+            t1.write(path='treefile.nex',
+                    schema="nexus")
+
+            # Or returned as a string using the 'as_string()' method.
+            s = t1.as_string("nexml")
+
+            # tree structure deep-copied from another tree
+            t8 = dendropy.Tree(t7)
+            assert t8 is not t7                             # Trees are distinct
+            assert t8.symmetric_difference(t7) == 0         # and structure is identical
+            assert t8.taxon_namespace is t7.taxon_namespace             # BUT taxa are not cloned.
+            nds3 = [nd for nd in t7.postorder_node_iter()]  # Nodes in the two trees
+            nds4 = [nd for nd in t8.postorder_node_iter()]  # are distinct objects,
+            for i, n in enumerate(nds3):                    # and can be manipulated
+                assert nds3[i] is not nds4[i]               # independentally.
+            egs3 = [eg for eg in t7.postorder_edge_iter()]  # Edges in the two trees
+            egs4 = [eg for eg in t8.postorder_edge_iter()]  # are also distinct objects,
+            for i, e in enumerate(egs3):                    # and can also be manipulated
+                assert egs3[i] is not egs4[i]               # independentally.
+            lves7 = t7.leaf_nodes()                         # Leaf nodes in the two trees
+            lves8 = t8.leaf_nodes()                         # are also distinct objects,
+            for i, lf in enumerate(lves3):                  # but order is the same,
+                assert lves7[i] is not lves8[i]             # and associated Taxon objects
+                assert lves7[i].taxon is lves8[i].taxon     # are the same.
+
+            # To create deep copy of a tree with a different taxon namespace,
+            # Use 'copy.deepcopy()'
+            t9 = copy.deepcopy(t7)
+
+            # Or explicitly pass in a new TaxonNamespace instance
+            taxa = TaxonNamespace()
+            t9 = dendropy.Tree(t7, taxon_namespace=taxa)
+            assert t9 is not t7                             # As above, the trees are distinct
+            assert t9.symmetric_difference(t7) == 0         # and the structures are identical,
+            assert t9.taxon_namespace is not t7.taxon_namespace         # but this time, the taxa *are* different
+            assert t9.taxon_namespace is taxa                     # as the given TaxonNamespace is used instead.
+            lves3 = t7.leaf_nodes()                         # Leaf nodes (and, for that matter other nodes
+            lves5 = t9.leaf_nodes()                         # as well as edges) are also distinct objects
+            for i, lf in enumerate(lves3):                  # and the order is the same, as above,
+                assert lves7[i] is not lves9[i]             # but this time the associated Taxon
+                assert lves7[i].taxon is not lves9[i].taxon # objects are distinct though the taxon
+                assert lves7[i].taxon.label == lves9[i].taxon.label # labels are the same.
+
+            # to 'switch out' the TaxonNamespace of a tree, replace the reference and
+            # reindex the taxa:
+            t11 = Tree.get(data='((A,B),(C,D));', 'newick')
+            taxa = TaxonNamespace()
+            t11.taxon_namespace = taxa
+            t11.reindex_subcomponent_taxa()
+
+            # You can also explicitly pass in a seed node:
+            seed = Node(label="root")
+            t12 = Tree(seed_node=seed)
+            assert t12.seed_node is seed
+
+        """
+        if len(args) > 1:
+            # only allow 1 positional argument
+            raise error.TooManyArgumentsError(func_name=self.__class__.__name__, max_args=1, args=args)
+        elif len(args) == 1:
+            if "seed_node" in kwargs:
+                raise TypeError("Cannot specify 'seed_node' if passing in a Tree object to clone")
+            if "stream" in kwargs or "schema" in kwargs:
+                raise TypeError("Constructing from an external stream is no longer supported: use the factory method 'Tree.get(file=...)'")
+            if isinstance(args[0], Node):
+                raise TypeError("Constructing a tree around a Node passed as a position argument is no longer supported; a keyword argument is now required for this approach: use Tree(seed_node=node)")
+            if isinstance(args[0], Tree):
+                self._clone_from(args[0], kwargs)
+            else:
+                raise error.InvalidArgumentValueError(func_name=self.__class__.__name__, arg=args[0])
+        else:
+            basemodel.DataObject.__init__(self, label=kwargs.pop("label", None))
+            taxonmodel.TaxonNamespaceAssociated.__init__(self,
+                    taxon_namespace=taxonmodel.process_kwargs_dict_for_taxon_namespace(kwargs, None))
+            self.comments = []
+            self._is_rooted = None
+            self.weight = None
+            self.length_type = None
+            self._seed_node = None
+            self.seed_node = None
+            self.bipartition_encoding = None
+            self._split_bitmask_edge_map = None
+            self._bipartition_edge_map = None
+            seed_node = kwargs.pop("seed_node", None)
+            if seed_node is None:
+                self.seed_node = self.node_factory()
+            else:
+                self.seed_node = seed_node
+                self.update_taxon_namespace()
+        if kwargs:
+            raise TypeError("Unrecognized or unsupported arguments: {}".format(kwargs))
+
+    ##############################################################################
+    ## Bipartitions
+
+    def _get_split_edges(self):
+        deprecate.dendropy_deprecation_warning(
+                message="Deprecated since DendroPy 4: 'Tree.split_edges' will no longer be supported in future releases; use 'Tree.bipartition_encoding' for a list of bipartitions on the tree, or dereference the edge through the 'Tree.bipartition_edge_map' attribute.",
+                stacklevel=3)
+        return self.bipartition_encoding
+    def _set_split_edges(self, m):
+        deprecate.dendropy_deprecation_warning(
+                message="Deprecated since DendroPy 4: 'Tree.split_edges' will no longer be supported in future releases; use 'Tree.bipartition_encoding' for a list of bipartitions on the tree, or dereference the edge through the 'Tree.bipartition_edge_map' attribute.",
+                stacklevel=3)
+        self.bipartition_encoding = m
+    split_edges = property(_get_split_edges, _set_split_edges)
+
+    ##############################################################################
+    ## Identity
+
+    def __hash__(self):
+        return id(self)
+
+    def __eq__(self, other):
+        return self is other
+
+    def _clone_from(self, tree, kwargs_dict):
+        # super(Tree, self).__init__()
+        memo = {}
+        # memo[id(tree)] = self
+        taxon_namespace = taxonmodel.process_kwargs_dict_for_taxon_namespace(kwargs_dict, tree.taxon_namespace)
+        memo[id(tree.taxon_namespace)] = taxon_namespace
+        if taxon_namespace is not tree.taxon_namespace:
+            for t1 in tree.taxon_namespace:
+                t2 = taxon_namespace.require_taxon(label=t1.label)
+                memo[id(t1)] = t2
+        else:
+            for t1 in tree.taxon_namespace:
+                memo[id(t1)] = t1
+        t = copy.deepcopy(tree, memo)
+        self.__dict__ = t.__dict__
+        self.label = kwargs_dict.pop("label", tree.label)
+        return self
+        # for k in tree.__dict__:
+        #     if k == "_annotations":
+        #         continue
+        #     if k in self.__dict__:
+        #         # do not copy if already populated, perhaps by a derived class
+        #         continue
+        #     self.__dict__[k] = copy.deepcopy(tree.__dict__[k], memo)
+        #     memo[id(tree.__dict__[k])] = self.__dict__[k]
+        # self.deep_copy_annotations_from(tree)
+
+    def __copy__(self):
+        return self.taxon_namespace_scoped_copy()
+
+    def taxon_namespace_scoped_copy(self, memo=None):
+        if memo is None:
+            memo = {}
+        # this populates ``memo`` with references to the
+        # the TaxonNamespace and Taxon objects
+        self.taxon_namespace.populate_memo_for_taxon_namespace_scoped_copy(memo)
+        return self.__deepcopy__(memo=memo)
+
+    def __deepcopy__(self, memo=None):
+        # ensure clone map
+        return basemodel.Annotable.__deepcopy__(self, memo=memo)
+        # if memo is None:
+        #     memo = {}
+        # # get or create clone of self
+        # try:
+        #     other = memo[id(self)]
+        # except KeyError:
+        #     # create object without initialization
+        #     other = self.__class__.__new__(self.__class__)
+        #     # store
+        #     memo[id(self)] = other
+        # # copy other attributes first, skipping annotations
+        # for k in self.__dict__:
+        #     if k == "_annotations":
+        #         continue
+        #     if k in other.__dict__:
+        #         # do not copy if already populated, perhaps by a derived class
+        #         continue
+        #     other.__dict__[k] = copy.deepcopy(self.__dict__[k], memo)
+        #     memo[id(self.__dict__[k])] = other.__dict__[k]
+        #     # assert id(self.__dict__[k]) in memo
+        # # create annotations
+        # other.deep_copy_annotations_from(self, memo)
+        # # return
+        # return other
+
+    ###########################################################################
+    ### I/O
+
+    # def _parse_and_add_from_stream(self, stream, schema, **kwargs):
+    #     """
+    #     Redefines this |Tree| object based on data from ``source``.
+
+    #     The current |TaxonNamespace| reference will be retained (and
+    #     modified if new operational taxonomic unit concept definitions
+    #     are encountered in the data source). *All* other information,
+    #     including metadata/annotations, will be lost or replaced with
+    #     information from the new data source.
+
+    #     If the source defines multiple tree collections (e.g. multiple NEXUS
+    #     "Trees" blocks), then the ``collection_offset`` argument
+    #     can be used to specify the 0-based index of the tree collection, and
+    #     ``tree_offset`` argument can be used to specify the 0-based
+    #     index of the tree within the collection, as the source. If
+    #     ``collection_offset`` is not specified or `None`, then all collections in
+    #     the source are merged before considering ``tree_offset``.  If
+    #     ``tree_offset`` is not specified, then the first tree (offset=0) is
+    #     returned.
+
+    #     Notes
+    #     -----
+    #     *All* operational taxonomic unit concepts in the data source will be included
+    #     in the |TaxonNamespace| object associated with the new
+    #     |TreeList| object and its contained |Tree| objects, even those
+    #     not associated with tree being retrieved.
+
+    #     Parameters
+    #     ----------
+
+    #     stream : file or file-like object
+    #         Source of data.
+
+    #     schema : string
+    #         Identifier of format of data in ``stream``
+
+    #     collection_offset : integer or None
+    #         0-based index of tree block or collection in source to be parsed.
+
+    #     tree_offset : integer or None
+    #         0-based index of tree in source to be parsed. If
+    #         ``collection_offset`` is `None`, then this is the 0-based index of
+    #         the tree across all collections considered together. Otherwise,
+    #         this is the 0-based index within a particular collection. If
+    #         ``tree_offset`` is `None` or not specified, then the first tree is
+    #         returned.
+
+    #     \*\*kwargs : keyword arguments
+    #         Arguments to customize parsing and instantiation this |Tree|
+    #         from the data source, including schema- or format-specific
+    #         handling. The following optional keyword arguments are recognized
+    #         and handled by this constructor:
+
+    #             - ``label``: The label the tree |Tree| object.
+
+    #         Other keyword arguments may be available, depending on the
+    #         implementation of the reader specialized to handle ``schema``
+    #         formats. See documentation for details on keyword arguments
+    #         supported by readers of various schemas.
+
+    #     Returns
+    #     -------
+    #     tree : |Tree|
+    #          Returns ``self``.
+
+    #     Raises
+    #     ------
+    #     ValueError
+    #         If no valid trees matching criteria found in source.
+
+    #     """
+    #     if "taxon_namespace" in kwargs and kwargs['taxon_namespace'] is not self.taxon_namespace:
+    #         raise TypeError("Cannot change ``taxon_namespace`` when reading an existing Tree")
+    #     kwargs["taxon_namespace"] = self.taxon_namespace
+    #     tree = Tree._parse_and_create_from_stream(stream, schema, **kwargs)
+    #     if tree is None:
+    #         raise ValueError("Invalid tree source specification")
+    #     self.__dict__ = tree.__dict__
+
+    def _format_and_write_to_stream(self, stream, schema, **kwargs):
+        """
+        Writes out ``self`` in ``schema`` format to a destination given by
+        file-like object ``stream``.
+
+        Parameters
+        ----------
+        stream : file or file-like object
+            Destination for data.
+        schema : string
+            Must be a recognized and tree file schema, such as "nexus",
+            "newick", etc, for which a specialized tree list writer is
+            available. If this is not implemented for the schema specified, then
+            a UnsupportedSchemaError is raised.
+
+        \*\*kwargs : keyword arguments, optional
+            Keyword arguments will be passed directly to the writer for the
+            specified schema. See documentation for details on keyword
+            arguments supported by writers of various schemas.
+
+        """
+        from dendropy.datamodel.treecollectionmodel import TreeList
+        tree_list = TreeList(taxon_namespace=self.taxon_namespace)
+        tree_list.append(self, taxon_import_strategy="add")
+        # Go through TreeList.write() to reduce testing targets (i.e., testing
+        # Tree.write() tests TreeList.write())
+        tree_list.write_to_stream(stream, schema, **kwargs)
+        # writer.write_tree_list(tree_list, stream)
+
+    ###########################################################################
+    ### Node and Edge Collection Access
+
+    def nodes(self, filter_fn=None):
+        """
+        Returns list of nodes on tree.
+
+        Parameters
+        ----------
+
+        filter_fn : function object, optional
+            A function object that takes a |Node| object as an argument
+            and returns `True` if the |Node| object is to be included in
+            the list, or `False` if not. If ``filter_fn`` is `None` (default),
+            then all nodes visited will be included.
+
+        Returns
+        -------
+        :py:class:`list` [|Node|]
+            List of |Node| objects in the tree.
+        """
+        nodes = [node for node in self.preorder_node_iter(filter_fn)]
+        return nodes
+
+    def leaf_nodes(self):
+        """
+        Returns list of leaf nodes on the tree.
+
+        Returns
+        -------
+        :py:class:`list` [|Node|]
+            List of leaf |Node| objects in ``self``.
+        """
+        return [leaf for leaf in self.leaf_node_iter()]
+
+    def internal_nodes(self, exclude_seed_node=False):
+        """
+        Returns list of internal nodes in the tree.
+
+        Root or seed node is included unless ``exclude_seed_node`` is `True`.
+
+        Parameters
+        ----------
+        exclude_seed_node : boolean, optional
+            If `False` (default), then the seed node or root is included. If
+            `True`, then the seed node is omitted.
+
+        Returns
+        -------
+        :py:class:`list` [|Node|]
+            List of internal |Node| objects in ``self``.
+        """
+        return [nd for nd in self.preorder_internal_node_iter(exclude_seed_node=exclude_seed_node)]
+
+    def edges(self, filter_fn=None):
+        """
+        Returns list of edges on tree.
+
+        Parameters
+        ----------
+        filter_fn : function object, optional
+            A function object that takes a |Edge| object as an argument
+            and returns `True` if the |Edge| object is to be included,
+            or `False` if not. If ``filter_fn`` is `None` (default), then all
+            edges will be included.
+
+        Returns
+        -------
+        :py:class:`list` [|Edge|]
+            List of |Edge| objects in ``self``.
+        """
+        edges = [edge for edge in self.preorder_edge_iter(filter_fn)]
+        return edges
+
+    def leaf_edges(self):
+        """
+        Returns list of leaf edges on the tree.
+
+        Returns
+        -------
+        :py:class:`list` [|Edge|]
+            List of leaf |Edge| objects in ``self``.
+        """
+        return [leaf.edge for leaf in self.leaf_node_iter()]
+
+    def internal_edges(self, exclude_seed_edge=False):
+        """
+        Returns list of internal edges on tree.
+
+        Parameters
+        ----------
+        exclude_seed_node : boolean, optional
+            If `False` (default), then the edge subtending the seed node or
+            root is included. If `True`, then the seed node is omitted.
+
+        Returns
+        -------
+        :py:class:`list` [|Edge|]
+            List of internal |Edge| objects in ``self``.
+        """
+        return [nd.edge for nd in self.preorder_internal_node_iter(exclude_seed_node=exclude_seed_edge)]
+
+    ###########################################################################
+    ### Node Finders
+
+    def find_node(self, filter_fn):
+        """
+        Finds the first node for which ``filter_fn(node) == True``.
+
+        For example, if::
+
+            filter_fn = lambda n: hasattr(n, 'genes') and n.genes is not None
+
+        then::
+
+            t.find_node(filter_fn=filter_fn)
+
+        will return all nodes which have an attribute 'genes' and this value
+        is not None.
+
+        Parameters
+        ----------
+        filter_fn : function object
+            Takes a single |Node| object as an argument and returns
+            `True` if the node should be returned.
+
+        Returns
+        -------
+        |Node| or `None`
+            Returns first |Node| object for which the filter function
+            ``filter_fn`` returns `True`, or `None` if no such node exists on
+            this tree.
+        """
+        for node in self.preorder_node_iter(filter_fn):
+            return node
+        return None
+
+    def find_node_with_label(self, label):
+        """
+        Returns first node with ``label`` attribute matching ``label`` argument.
+
+        Parameters
+        ----------
+        label : string
+            Value for ``label`` attribute of |Node| object in this tree.
+
+        Returns
+        -------
+        |Node| or `None`
+            Returns first |Node| object with ``label`` attribute having value
+            given in ``label``, or `None` if no such node is found.
+
+        """
+        for node in self.preorder_node_iter():
+            if node.label == label:
+                return node
+        return None
+
+    def find_node_for_taxon(self, taxon):
+        """
+        Returns node associated with |Taxon| object ``taxon``.
+
+        Parameters
+        ----------
+        taxon : |Taxon| object
+            |Taxon| object that should be associated with the node to be
+            returned.
+
+        Returns
+        -------
+        |Node| or `None`
+            Returns first |Node| object with ``taxon`` attribute referencing same
+            object as ``taxon`` argument, or `None` if no such node exists.
+        """
+        for node in self.postorder_node_iter():
+            try:
+                if node.taxon is taxon:
+                    return node
+            except AttributeError:
+                pass
+        return None
+
+    def find_node_with_taxon(self, taxon_filter_fn=None):
+        """
+        Returns node associated with |Taxon| object for which ``taxon_filter_fn``
+        returns `True`.
+
+        Parameters
+        ----------
+        taxon_filter_fn : function object
+            Takes a single |Taxon| object as an argument and returns
+            `True` if the node associated with that |Taxon| should be
+            returned.
+
+        Returns
+        -------
+        |Node| or `None`
+            Returns first |Node| object with ``taxon`` attribute passing filter
+            function ``taxon_filter_fn``, or `None` if no such node is found.
+        """
+        for node in self.preorder_node_iter():
+            if hasattr(node, "taxon") and node.taxon is not None:
+                if taxon_filter_fn(node.taxon):
+                    return node
+        return None
+
+    def find_node_with_taxon_label(self, label):
+        """
+        Returns node associated with |Taxon| object with the specified label.
+
+        Parameters
+        ----------
+        label : string
+            Label of |Taxon| object associated with the node to be returned.
+
+        Returns
+        -------
+        |Node| or `None`
+            Returns first |Node| object with ``taxon`` attribute having label
+            ``label``, or`None` if no such node is found.
+
+        """
+        return self.find_node_with_taxon(lambda x: x.label == label)
+        # taxon = self.taxon_namespace.get_taxon(label=label)
+        # if taxon is None:
+        #     return None
+        # return self.find_node_with_taxon(lambda x: x is taxon)
+
+    def mrca(self, **kwargs):
+        """
+        Returns most-recent common ancestor node of a set of taxa on the tree.
+
+        Returns the shallowest node in the tree (the node nearest the tips)
+        that has all of the taxa that:
+
+            * are specified by the leafset bitmask given by the keyword argument
+              ``leafset_bitmask``
+            * are in the list of Taxon objects given by the keyword argument
+              ``taxa``
+            * have the labels specified by the list of strings given by the
+              keyword argument ``taxon_labels``
+
+        Returns `None` if no appropriate node is found. Assumes that
+        bipartitions have been encoded on the tree. It is possible that the
+        leafset bitmask is not compatible with the subtree that is returned!
+        (compatibility tests are not fully performed).  This function is used
+        to find the "insertion point" for a new bipartition via a root to tip
+        search.
+
+        Parameters
+        ----------
+        \*\*kwargs : keyword arguments
+            Exactly one of the following must be specified:
+
+                ``leafset_bitmask`` : integer
+                    Node object subtended by the first edge compatible with this
+                    leafset bitmask will be returned.
+                ``taxa`` : collections.Iterable [|Taxon|]
+                    Shallowest node object with descendent nodes associated with
+                    all the |Taxon| objects specified will be returned.
+                ``taxon_labels`` : collections.Iterable [string]
+                    Shallowest node object with descendent nodes associated
+                    with the minimal set of Taxon objects that
+                    collectively have all the labels specified in
+                    ``taxon_labels`` will be returned.
+
+            In addition, the following optional keywords are supported:
+
+                ``start_node`` : |Node|, optional
+                    If given, specifies the node at which to start searching.
+                    If not, defaults to the root or ``seed_node``.
+
+        Returns
+        -------
+        |Node| or `None`
+            The most-recent common ancestor of the nodes specified, or `None`
+            if no such node exists.
+        """
+        start_node = kwargs.get("start_node", self.seed_node)
+        leafset_bitmask = None
+        if "leafset_bitmask" in kwargs:
+            leafset_bitmask = kwargs["leafset_bitmask"]
+        else:
+            taxa = kwargs.get("taxa", None)
+            if taxa is None:
+                if "taxon_labels" in kwargs:
+                    taxa = self.taxon_namespace.get_taxa(labels=kwargs["taxon_labels"])
+                    if len(taxa) != len(kwargs["taxon_labels"]):
+                        raise KeyError("Not all labels matched to taxa")
+                else:
+                    raise TypeError("Must specify one of: 'leafset_bitmask', 'taxa' or 'taxon_labels'")
+            if taxa is None:
+                raise ValueError("No taxa matching criteria found")
+            leafset_bitmask = self.taxon_namespace.taxa_bitmask(taxa=taxa)
+
+        if leafset_bitmask is None or leafset_bitmask == 0:
+            raise ValueError("Null leafset bitmask (0)")
+
+        if start_node.edge.bipartition.leafset_bitmask == 0 or not kwargs.get("is_bipartitions_updated", True):
+            self.encode_bipartitions(suppress_unifurcations=False)
+
+        if (start_node.edge.bipartition.leafset_bitmask & leafset_bitmask) != leafset_bitmask:
+            return None
+
+        curr_node = start_node
+        last_match = start_node
+        nd_source = iter(start_node.child_nodes())
+        try:
+            while True:
+                cm = curr_node.edge.bipartition.leafset_bitmask
+                cms = (cm & leafset_bitmask)
+                if cms:
+                    # for at least one taxon cm has 1 and bipartition has 1
+                    if cms == leafset_bitmask:
+                        # curr_node has all of the 1's that bipartition has
+                        if cm == leafset_bitmask:
+                            return curr_node
+                        last_match = curr_node
+                        nd_source = iter(curr_node.child_nodes())
+                    else:
+                        # we have reached a child that has some, but not all of the
+                        #   required taxa as descendants, so we return the last_match
+                        return last_match
+                curr_node = next(nd_source)
+        except StopIteration:
+            # we shouldn't reach this if all of the descendants are properly
+            #   decorated with leafset_bitmask attributes, but there may be some hacky
+            #   context in which we want to allow the function to be called with
+            #   leaves that have not been encoded with leafset_bitmasks.
+            return last_match
+
+    ###########################################################################
+    ### Node iterators
+
+    def __iter__(self):
+        """
+        Iterate over nodes on tree in pre-order.
+
+        Example
+        -------
+
+        >>> for nd in tree:
+        ...    print(nd.label)
+        ...
+
+        Returns
+        -------
+        :py:class:`collections.Iterator` [|Node|]
+            An iterator yielding the internal nodes of the subtree rooted at
+            this node in post-order sequence.
+        """
+        return self.preorder_node_iter()
+
+    def preorder_node_iter(self, filter_fn=None):
+        """
+        Pre-order iterator over nodes in tree.
+
+        Visits nodes in ``self``, with each node visited before its children.
+        Nodes can optionally be filtered by ``filter_fn``: only nodes for which
+        ``filter_fn`` returns `True` when called with the node as an argument are
+        yielded.
+
+        Parameters
+        ----------
+        filter_fn : function object, optional
+            A function object that takes a |Node| object as an argument
+            and returns `True` if the |Node| object is to be yielded by
+            the iterator, or `False` if not. If ``filter_fn`` is `None`
+            (default), then all nodes visited will be yielded.
+
+        Returns
+        -------
+        :py:class:`collections.Iterator` [|Node|]
+            An iterator yielding nodes in ``self`` in pre-order sequence.
+        """
+        return self.seed_node.preorder_iter(filter_fn=filter_fn)
+
+    def preorder_internal_node_iter(self, filter_fn=None, exclude_seed_node=False):
+        """
+        Pre-order iterator over internal nodes in tree.
+
+        Visits internal nodes in ``self``, with each node visited before its
+        children. In DendroPy, "internal nodes" are nodes that have at least
+        one child node, and thus the root or seed node is typically included
+        unless ``exclude_seed_node`` is `True`. Nodes can optionally be filtered
+        by ``filter_fn``: only nodes for which ``filter_fn`` returns `True` when
+        passed the node as an argument are yielded.
+
+        Parameters
+        ----------
+        filter_fn : function object, optional
+            A function object that takes a |Node| object as an argument
+            and returns `True` if the |Node| object is to be yielded by
+            the iterator, or `False` if not. If ``filter_fn`` is `None`
+            (default), then all nodes visited will be yielded.
+        exclude_seed_node : boolean, optional
+            If `False` (default), then the seed node or root is visited. If
+            `True`, then the seed node is skipped.
+
+        Returns
+        -------
+        :py:class:`collections.Iterator` [|Node|]
+            An iterator yielding the internal nodes of ``self``.
+        """
+        return self.seed_node.preorder_internal_node_iter(filter_fn=filter_fn,
+                exclude_seed_node=exclude_seed_node)
+
+    def postorder_node_iter(self, filter_fn=None):
+        """
+        Post-order iterator over nodes of tree.
+
+        Visits nodes in ``self``, with each node visited first followed by its
+        children. Nodes can optionally be filtered by ``filter_fn``: only nodes
+        for which ``filter_fn`` returns `True` when called with the node as an
+        argument are yielded.
+
+        Parameters
+        ----------
+        filter_fn : function object, optional
+            A function object that takes a |Node| object as an argument
+            and returns `True` if the |Node| object is to be yielded by
+            the iterator, or `False` if not. If ``filter_fn`` is `None`
+            (default), then all nodes visited will be yielded.
+
+        Returns
+        -------
+        :py:class:`collections.Iterator` [|Node|]
+            An iterator yielding the nodes in ``self`` in post-order sequence.
+        """
+        return self.seed_node.postorder_iter(filter_fn=filter_fn)
+
+    def postorder_internal_node_iter(self, filter_fn=None, exclude_seed_node=False):
+        """
+        Pre-order iterator over internal nodes tree.
+
+        Visits internal nodes in ``self``, with each node visited after its
+        children. In DendroPy, "internal nodes" are nodes that have at least
+        one child node, and thus the root or seed node is typically included
+        unless ``exclude_seed_node`` is `True`. Nodes can optionally be filtered
+        by ``filter_fn``: only nodes for which ``filter_fn`` returns `True` when
+        passed the node as an argument are yielded.
+
+        Parameters
+        ----------
+        filter_fn : function object, optional
+            A function object that takes a |Node| object as an argument
+            and returns `True` if the |Node| object is to be yielded by
+            the iterator, or `False` if not. If ``filter_fn`` is `None`
+            (default), then all nodes visited will be yielded.
+        exclude_seed_node : boolean, optional
+            If `False` (default), then the seed node or root is visited. If
+            `True`, then the seed node is skipped.
+
+        Returns
+        -------
+        :py:class:`collections.Iterator` [|Node|]
+            An iterator yielding the internal nodes of ``self`` in post-order
+            sequence.
+        """
+        return self.seed_node.postorder_internal_node_iter(filter_fn=filter_fn,
+                exclude_seed_node=exclude_seed_node)
+
+    def levelorder_node_iter(self, filter_fn=None):
+        """
+        Level-order iteration over nodes of tree.
+
+        Visits nodes in ``self``, with each node and other nodes at the same
+        level (distance from root) visited before their children.  Nodes can
+        optionally be filtered by ``filter_fn``: only nodes for which ``filter_fn``
+        returns `True` when called with the node as an argument are visited.
+
+        Parameters
+        ----------
+        filter_fn : function object, optional
+            A function object that takes a |Node| object as an argument
+            and returns `True` if the |Node| object is to be yielded by
+            the iterator, or `False` if not. If ``filter_fn`` is `None`
+            (default), then all nodes visited will be yielded.
+
+        Returns
+        -------
+        :py:class:`collections.Iterator` [|Node|]
+            An iterator yielding nodes of ``self`` in level-order sequence.
+        """
+        return self.seed_node.levelorder_iter(filter_fn=filter_fn)
+
+    def level_order_node_iter(self, filter_fn=None):
+        """
+        Deprecated: use :meth:`Tree.levelorder_node_iter()` instead.
+        """
+        deprecate.dendropy_deprecation_warning(
+                message="Deprecated since DendroPy 4: 'level_order_node_iter()' will no longer be supported in future releases; use 'levelorder_node_iter()' instead",
+                stacklevel=3)
+        return self.seed_node.levelorder_iter(filter_fn=filter_fn)
+
+    def inorder_node_iter(self, filter_fn=None):
+        """
+        In-order iteration over nodes of tree.
+
+        Visits nodes in ``self``, with each node visited in-between its children.
+        Only valid for strictly-bifurcating trees. Nodes can optionally be
+        filtered by ``filter_fn``: only nodes for which ``filter_fn`` returns
+        `True` when called with the node as an argument are yielded.
+
+        Parameters
+        ----------
+        filter_fn : function object, optional
+            A function object that takes a |Node| object as an argument
+            and returns `True` if the |Node| object is to be yielded by
+            the iterator, or `False` if not. If ``filter_fn`` is `None`
+            (default), then all nodes visited will be yielded.
+
+        Returns
+        -------
+        :py:class:`collections.Iterator` [|Node|]
+            An iterator yielding nodes of ``self`` in infix or in-order sequence.
+        """
+        return self.seed_node.inorder_iter(filter_fn=filter_fn)
+
+    def leaf_node_iter(self, filter_fn=None):
+        """
+        Iterate over all tips or leaves of tree.
+
+        Visits all leaf or tip in ``self``. Nodes can optionally be filtered by
+        ``filter_fn``: only nodes for which ``filter_fn`` returns `True` when
+        called with the node as an argument are yielded.
+
+        Parameters
+        ----------
+        filter_fn : function object, optional
+            A function object that takes a |Node| object as an argument
+            and returns `True` if the |Node| object is to be yielded by
+            the iterator, or `False` if not. If ``filter_fn`` is `None`
+            (default), then all nodes visited will be yielded.
+
+        Returns
+        -------
+        :py:class:`collections.Iterator` [|Node|]
+            An iterator yielding leaf nodes in ``self``.
+        """
+        return self.seed_node.leaf_iter(filter_fn=filter_fn)
+
+    def leaf_iter(self, filter_fn=None):
+        """
+        Deprecated: use :meth:`Tree.leaf_node_iter()` instead.
+        """
+        deprecate.dendropy_deprecation_warning(
+                message="Deprecated since DendroPy 4: 'leaf_iter()' will no longer be supported in future releases; use 'leaf_node_iter()' instead",
+                stacklevel=3)
+        return self.seed_node.leaf_iter(filter_fn=filter_fn)
+
+    def ageorder_node_iter(self, include_leaves=True, filter_fn=None, descending=False):
+        """
+        Iterator over nodes of tree in order of the age of the node (i.e., the
+                time since the present).
+
+        Iterates over nodes in order of age ('age' is as given by the ``age``
+        attribute, which is usually the sum of edge lengths from tips
+        to node, i.e., time since present).
+        If ``include_leaves`` is `True` (default), leaves are included in the
+        iteration; if ``include_leaves`` is `False`, leaves will be skipped.
+        If ``descending`` is `False` (default), younger nodes will be returned
+        before older ones; if `True`, older nodes will be returned before
+        younger ones.
+
+        Parameters
+        ----------
+        include_leaves : boolean, optional
+            If `True` (default), then leaf nodes are included in the iteration.
+            If `False`, then leaf nodes are skipped.
+        filter_fn : function object, optional
+            A function object that takes a |Node| object as an argument
+            and returns `True` if the |Node| object is to be yielded by
+            the iterator, or `False` if not. If ``filter_fn`` is `None`
+            (default), then all nodes visited will be yielded.
+        descending : boolean, optional
+            If `False` (default), then younger nodes are visited before older
+            ones. If `True`, then older nodes are visited before younger ones.
+
+        Returns
+        -------
+        :py:class:`collections.Iterator` [|Node|]
+            Iterator over age-ordered sequence of nodes of ``self``.
+        """
+        if self.seed_node.age is None:
+            self.calc_node_ages()
+        return self.seed_node.ageorder_iter(include_leaves=include_leaves,
+                filter_fn=filter_fn,
+                descending=descending)
+
+    def age_order_node_iter(self, include_leaves=True, filter_fn=None, descending=False):
+        """
+        Deprecated: use :meth:`Tree.ageorder_node_iter()` instead.
+        """
+        deprecate.dendropy_deprecation_warning(
+                message="Deprecated since DendroPy 4: 'age_order_node_iter()' will no longer be supported in future releases; use 'ageorder_node_iter()' instead",
+                stacklevel=3)
+        return self.ageorder_node_iter(include_leaves=include_leaves,
+                filter_fn=filter_fn,
+                descending=descending)
+
+    def apply(self, before_fn=None, after_fn=None, leaf_fn=None):
+        """
+        Applies function ``before_fn`` and ``after_fn`` to all internal nodes and
+        ``leaf_fn`` to all terminal nodes in subtree starting with ``self``, with
+        nodes visited in pre-order.
+
+        Given a tree with preorder sequence of nodes of
+        [a,b,i,e,j,k,c,g,l,m,f,n,h,o,p,]::
+
+                           a
+                          / \
+                         /   \
+                        /     \
+                       /       \
+                      /         \
+                     /           \
+                    /             c
+                   b             / \
+                  / \           /   \
+                 /   e         /     f
+                /   / \       /     / \
+               /   /   \     g     /   h
+              /   /     \   / \   /   / \
+             i   j       k l   m n   o   p
+
+
+        the following order of function calls results:
+
+            before_fn(a)
+            before_fn(b)
+            leaf_fn(i)
+            before_fn(e)
+            leaf_fn(j)
+            leaf_fn(k)
+            after_fn(e)
+            after_fn(b)
+            before_fn(c)
+            before_fn(g)
+            leaf_fn(l)
+            leaf_fn(m)
+            after_fn(g)
+            before_fn(f)
+            leaf_fn(n)
+            before_fn(h)
+            leaf_fn(o)
+            leaf_fn(p)
+            after_fn(h)
+            after_fn(f)
+            after_fn(c)
+            after_fn(a)
+
+        Parameters
+        ----------
+        before_fn : function object or `None`
+            A function object that takes a |Node| as its argument.
+        after_fn : function object or `None`
+            A function object that takes a |Node| as its argument.
+        leaf_fn : function object or `None`
+            A function object that takes a |Node| as its argument.
+
+        Notes
+        -----
+        Adapted from work by Mark T. Holder (the ``peyotl`` module of the Open
+        Tree of Life Project):
+
+            https://github.com/OpenTreeOfLife/peyotl.git
+
+        """
+        self.seed_node.apply(before_fn, after_fn, leaf_fn)
+
+    ###########################################################################
+    ### Edge iterators
+
+    def preorder_edge_iter(self, filter_fn=None):
+        """
+        Pre-order iterator over nodes in tree.
+
+        Visits nodes in ``self``, with each node visited before its children.
+        Nodes can optionally be filtered by ``filter_fn``: only nodes for which
+        ``filter_fn`` returns `True` when called with the node as an argument are
+        yielded.
+
+        Parameters
+        ----------
+        filter_fn : function object, optional
+            A function object that takes a |Node| object as an argument
+            and returns `True` if the |Node| object is to be yielded by
+            the iterator, or `False` if not. If ``filter_fn`` is `None`
+            (default), then all nodes visited will be yielded.
+
+        Returns
+        -------
+        :py:class:`collections.Iterator` [|Node|]
+            An iterator yielding nodes in ``self`` in pre-order sequence.
+        """
+        # NOTE: from-scratch implementation here instead of wrapping
+        # `preorder_node_iter()`for efficiency
+        stack = [self.seed_node._edge]
+        while stack:
+            edge = stack.pop()
+            if filter_fn is None or filter_fn(edge):
+                yield edge
+            stack.extend(n._edge for n in reversed(edge._head_node._child_nodes))
+
+    def preorder_internal_edge_iter(self, filter_fn=None, exclude_seed_edge=False):
+        """
+        Pre-order iterator over internal edges in tree.
+
+        Visits internal edges in ``self``, with each edge visited before its
+        children. In DendroPy, "internal edges" are edges that have at least
+        one child edge, and thus the root or seed edge is typically included
+        unless ``exclude_seed_edge`` is `True`. Edges can optionally be filtered
+        by ``filter_fn``: only edges for which ``filter_fn`` returns `True` when
+        passed the edge as an argument are yielded.
+
+        Parameters
+        ----------
+        filter_fn : function object, optional
+            A function object that takes a |Edge| object as an argument
+            and returns `True` if the |Edge| object is to be yielded by
+            the iterator, or `False` if not. If ``filter_fn`` is `None`
+            (default), then all edges visited will be yielded.
+        exclude_seed_edge : boolean, optional
+            If `False` (default), then the edge subtending the seed node or
+            root is visited. If `True`, then this edge is skipped.
+
+        Returns
+        -------
+        :py:class:`collections.Iterator` [|Edge|]
+            An iterator yielding the internal edges of ``self``.
+        """
+        # NOTE: from-scratch implementation here instead of wrapping
+        # `preorder_internal_node_iter()`for efficiency
+        if exclude_seed_edge:
+            froot = lambda e: e._head_node._parent_node is not None
+        else:
+            froot = lambda e: True
+        if filter_fn:
+            f = lambda x: (froot(x) and x._head_node._child_nodes and filter_fn(x)) or None
+        else:
+            f = lambda x: (x and froot(x) and x._head_node._child_nodes) or None
+        return self.preorder_edge_iter(filter_fn=f)
+
+
+    def postorder_edge_iter(self, filter_fn=None):
+        """
+        Post-order iterator over edges of tree.
+
+        Visits edges in ``self``, with each edge visited first followed by its
+        children. Edges can optionally be filtered by ``filter_fn``: only edges
+        for which ``filter_fn`` returns `True` when called with the edge as an
+        argument are yielded.
+
+        Parameters
+        ----------
+        filter_fn : function object, optional
+            A function object that takes a |Edge| object as an argument
+            and returns `True` if the |Edge| object is to be yielded by
+            the iterator, or `False` if not. If ``filter_fn`` is `None`
+            (default), then all edges visited will be yielded.
+
+        Returns
+        -------
+        :py:class:`collections.Iterator` [|Edge|]
+            An iterator yielding the edges in ``self`` in post-order sequence.
+
+        """
+        # NOTE: custom implementation here instead of wrapping
+        # `postorder_node_iter()`for efficiency
+
+        # stack = [(self.seed_node._edge, False)]
+        # while stack:
+        #     edge, state = stack.pop(0)
+        #     if state:
+        #         if filter_fn is None or filter_fn(edge):
+        #             yield edge
+        #     else:
+        #         stack.insert(0, (edge, True))
+        #         child_edges = [(n._edge, False) for n in edge._head_node._child_nodes]
+        #         child_edges.extend(stack)
+        #         stack = child_edges
+
+        ## Prefer `pop()` to `pop(0)`.
+        ## Thanks to Mark T. Holder
+        ## From peyotl commits: d1ffef2 + 19fdea1
+        stack = [(self.seed_node._edge, False)]
+        while stack:
+            edge, state = stack.pop()
+            if state:
+                if filter_fn is None or filter_fn(edge):
+                    yield edge
+            else:
+                stack.append((edge, True))
+                stack.extend([(n._edge, False) for n in reversed(edge._head_node._child_nodes)])
+
+    def postorder_internal_edge_iter(self, filter_fn=None, exclude_seed_edge=False):
+        """
+        Pre-order iterator over internal edges tree.
+
+        Visits internal edges in ``self``, with each edge visited after its
+        children. In DendroPy, "internal edges" are edges that have at least
+        one child edge, and thus the root or seed edge is typically included
+        unless ``exclude_seed_edge`` is `True`. Edges can optionally be filtered
+        by ``filter_fn``: only edges for which ``filter_fn`` returns `True` when
+        passed the edge as an argument are yielded.
+
+        Parameters
+        ----------
+        filter_fn : function object, optional
+            A function object that takes a |Edge| object as an argument
+            and returns `True` if the |Edge| object is to be yielded by
+            the iterator, or `False` if not. If ``filter_fn`` is `None`
+            (default), then all edges visited will be yielded.
+        exclude_seed_edge : boolean, optional
+            If `False` (default), then the seed edge or root is visited. If
+            `True`, then the seed edge is skipped.
+
+        Returns
+        -------
+        :py:class:`collections.Iterator` [|Edge|]
+            An iterator yielding the internal edges of ``self`` in post-order
+            sequence.
+        """
+        # NOTE: from-scratch implementation here instead of wrapping
+        # `preorder_internal_node_iter()`for efficiency
+        if exclude_seed_edge:
+            froot = lambda e: e._head_node._parent_node is not None
+        else:
+            froot = lambda e: True
+        if filter_fn:
+            f = lambda x: (froot(x) and x._head_node._child_nodes and filter_fn(x)) or None
+        else:
+            f = lambda x: (x and froot(x) and x._head_node._child_nodes) or None
+        return self.postorder_edge_iter(filter_fn=f)
+
+    def levelorder_edge_iter(self, filter_fn=None):
+        """
+        Level-order iteration over edges of tree.
+
+        Visits edges in ``self``, with each edge and other edges at the same
+        level (distance from root) visited before their children.  Edges can
+        optionally be filtered by ``filter_fn``: only edges for which ``filter_fn``
+        returns `True` when called with the edge as an argument are visited.
+
+        Parameters
+        ----------
+        filter_fn : function object, optional
+            A function object that takes a |Edge| object as an argument
+            and returns `True` if the |Edge| object is to be yielded by
+            the iterator, or `False` if not. If ``filter_fn`` is `None`
+            (default), then all edges visited will be yielded.
+
+        Returns
+        -------
+        :py:class:`collections.Iterator` [|Edge|]
+            An iterator yielding edges of ``self`` in level-order sequence.
+        """
+        if filter_fn is not None:
+            f = lambda x : filter_fn(x.edge)
+        else:
+            f = None
+        for nd in self.seed_node.levelorder_iter(filter_fn=f):
+            yield nd.edge
+
+    def level_order_edge_iter(self, filter_fn=None):
+        """
+        Deprecated: use :meth:`Tree.levelorder_edge_iter()` instead.
+        """
+        deprecate.dendropy_deprecation_warning(
+                message="Deprecated since DendroPy 4: 'level_order_edge_iter()' will no longer be supported in future releases; use 'levelorder_edge_iter()' instead",
+                stacklevel=3)
+        return self.levelorder_edge_iter(filter_fn=filter_fn)
+
+    def inorder_edge_iter(self, filter_fn=None):
+        """
+        In-order iteration over edges of tree.
+
+        Visits edges in ``self``, with each edge visited in-between its children.
+        Only valid for strictly-bifurcating trees. Edges can optionally be
+        filtered by ``filter_fn``: only edges for which ``filter_fn`` returns
+        `True` when called with the edge as an argument are yielded.
+
+        Parameters
+        ----------
+        filter_fn : function object, optional
+            A function object that takes a |Edge| object as an argument
+            and returns `True` if the |Edge| object is to be yielded by
+            the iterator, or `False` if not. If ``filter_fn`` is `None`
+            (default), then all edges visited will be yielded.
+
+        Returns
+        -------
+        :py:class:`collections.Iterator` [|Edge|]
+            An iterator yielding edges of ``self`` in infix or in-order sequence.
+        """
+        if filter_fn is not None:
+            f = lambda x : filter_fn(x.edge)
+        else:
+            f = None
+        for nd in self.seed_node.inorder_iter(filter_fn=f):
+            yield nd.edge
+
+    def leaf_edge_iter(self, filter_fn=None):
+        """
+        Iterate over all tips or leaves of tree.
+
+        Visits all leaf or tip in ``self``. Edges can optionally be filtered by
+        ``filter_fn``: only edges for which ``filter_fn`` returns `True` when
+        called with the edge as an argument are yielded.
+
+        Parameters
+        ----------
+        filter_fn : function object, optional
+            A function object that takes a |Edge| object as an argument
+            and returns `True` if the |Edge| object is to be yielded by
+            the iterator, or `False` if not. If ``filter_fn`` is `None`
+            (default), then all edges visited will be yielded.
+
+        Returns
+        -------
+        :py:class:`collections.Iterator` [|Edge|]
+            An iterator yielding leaf edges in ``self``.
+        """
+        if filter_fn is not None:
+            f = lambda x : filter_fn(x.edge)
+        else:
+            f = None
+        for nd in self.seed_node.leaf_iter(filter_fn=f):
+            yield nd.edge
+
+    ###########################################################################
+    ### Taxa Management
+
+    def reconstruct_taxon_namespace(self,
+            unify_taxa_by_label=True,
+            taxon_mapping_memo=None):
+        if taxon_mapping_memo is None:
+            taxon_mapping_memo = {}
+        for node in self:
+            if (node.taxon is not None
+                    and (unify_taxa_by_label or node.taxon not in self.taxon_namespace)):
+                t = taxon_mapping_memo.get(node.taxon, None)
+                if t is None:
+                    # taxon to use not given and
+                    # we have not yet created a counterpart
+                    if unify_taxa_by_label:
+                        # this will force usage of any taxon with
+                        # a label that matches the current taxon
+                        t = self.taxon_namespace.require_taxon(label=node.taxon.label)
+                    else:
+                        # this will unconditionally create a new taxon
+                        t = self.taxon_namespace.new_taxon(label=node.taxon.label)
+                    taxon_mapping_memo[node.taxon] = t
+                else:
+                    # taxon to use is given by mapping
+                    self.taxon_namespace.add_taxon(t)
+                node.taxon = t
+
+    def update_taxon_namespace(self):
+        """
+        All |Taxon| objects in ``self`` that are not in
+        ``self.taxon_namespace`` will be added.
+        """
+        for nd in self:
+            if nd.taxon is not None:
+                self.taxon_namespace.add_taxon(nd.taxon)
+        return self.taxon_namespace
+
+    def poll_taxa(self, taxa=None):
+        """
+        Returns a set populated with all of |Taxon| instances associated
+        with ``self``.
+
+        Parameters
+        ----------
+        taxa : set()
+            Set to populate. If not specified, a new one will be created.
+
+        Returns
+        -------
+        set[|Taxon|]
+            Set of taxa associated with ``self``.
+        """
+        if taxa is None:
+            taxa = set()
+        for nd in self:
+            if nd.taxon is not None:
+                taxa.add(nd.taxon)
+        return taxa
+
+    def infer_taxa(self):
+        """
+        Creates (and returns) a new TaxonNamespace object for ``self`` populated
+        with taxa from this tree.
+        """
+        deprecate.dendropy_deprecation_warning(
+                message="Deprecated since DendroPy 4: 'infer_taxa()' will no longer be supported in future releases; use 'update_taxon_namespace()' instead",
+                stacklevel=3)
+        taxon_namespace = taxonmodel.TaxonNamespace()
+        for node in self.postorder_node_iter():
+            if node.taxon is not None:
+                taxon_namespace.add_taxon(node.taxon)
+        self.taxon_namespace = taxon_namespace
+        return taxon_namespace
+
+    def reindex_subcomponent_taxa(self):
+        """
+        Remaps node taxon objects
+        """
+        deprecate.dendropy_deprecation_warning(
+                message="Deprecated since DendroPy 4: 'reindex_subcomponent_taxa()' will no longer be supported in future releases; use 'reconstruct_taxon_namespace()' instead",
+                stacklevel=3)
+        for node in self.postorder_node_iter():
+            t = node.taxon
+            if t:
+                node.taxon = self.taxon_namespace.require_taxon(label=t.label)
+
+    def unassign_taxa(self, exclude_leaves=False, exclude_internal=False):
+        """
+        Strips taxon assignments from tree. If ``exclude_leaves`` is True,
+        then taxa on leaves will be retained. If ``exclude_internal`` is True,
+        then taxa on internal nodes will be retained. The ``taxon_namespace`` is not
+        affected by this operation.
+        """
+        for nd in self.postorder_node_iter():
+            if (len(nd._child_nodes) == 0) and not exclude_leaves:
+                nd.taxon = None
+            elif (len(nd._child_nodes) > 0) and not exclude_internal:
+                nd.taxon = None
+
+    def randomly_assign_taxa(self, create_required_taxa=True, rng=None):
+        """
+        Randomly assigns taxa to leaf nodes. If the number of taxa defined in
+        the taxon set of the tree is more than the number of tips, then a random
+        subset of taxa in ``taxon_namespace`` will be assigned to the tips of tree.
+        If the number of tips is more than the number of taxa in the ``taxon_namespace``,
+        and ``add_extra_taxa`` is not True [default], then new Taxon
+        objects will be created and added to the ``taxon_namespace``; if ``create_required_taxa``
+        is False, then an exception is raised.
+
+        In addition, a Random() object or equivalent can be passed using ``rng``;
+        otherwise GLOBAL_RNG is used.
+        """
+        if rng is None:
+            rng = GLOBAL_RNG
+        if len(self.taxon_namespace) == 0:
+            for i, nd in enumerate(self.leaf_nodes()):
+                nd.taxon = self.taxon_namespace.require_taxon(label=("T%d" % (i+1)))
+        else:
+            taxa = [t for t in self.taxon_namespace]
+            for i, nd in enumerate(self.leaf_nodes()):
+                if len(taxa) > 0:
+                    nd.taxon = taxa.pop(rng.randint(0, len(taxa)-1))
+                else:
+                    if not create_required_taxa:
+                        raise ValueError("TaxonNamespace has %d taxa, but tree has %d tips" % (len(self.taxon_namespace), len(self.leaf_nodes())))
+                    label = "T%d" % (i+1)
+                    k = 0
+                    while self.taxon_namespace.has_taxon(label=label):
+                        label = "T%d" % (i+1+k)
+                        k += 1
+                    nd.taxon = self.taxon_namespace.require_taxon(label=label)
+
+    ###########################################################################
+    ### Structure
+
+    def _get_is_rootedness_undefined(self):
+        return self._is_rooted is None
+    is_rootedness_undefined = property(_get_is_rootedness_undefined)
+    # legacy:
+    rooting_state_is_undefined = property(_get_is_rootedness_undefined)
+
+    def _get_is_rooted(self):
+        return None if self._is_rooted is None else self._is_rooted
+    def _set_is_rooted(self, val):
+        self._is_rooted = val
+    is_rooted = property(_get_is_rooted, _set_is_rooted)
+
+    def _get_is_unrooted(self):
+        return None if self._is_rooted is None else (not self._is_rooted)
+    def _set_is_unrooted(self, val):
+        self._is_rooted = not val
+    is_unrooted = property(_get_is_unrooted, _set_is_unrooted)
+
+    def collapse_basal_bifurcation(self, set_as_unrooted_tree=True):
+        "Converts a degree-2 node at the root to a degree-3 node."
+        seed_node = self.seed_node
+        if not seed_node:
+            return
+        child_nodes = seed_node.child_nodes()
+        if len(child_nodes) != 2:
+            return
+
+        if len(child_nodes[1].child_nodes()) >= 2:
+            to_keep, to_del = child_nodes
+        elif len(child_nodes[0].child_nodes()) >= 2:
+            to_del, to_keep = child_nodes
+        else:
+            return
+        to_del_edge = to_del.edge
+        try:
+            to_keep.edge.length += to_del_edge.length
+        except:
+            pass
+        # print to_keep.edge.length, to_del_edge.length, [id(c) for c in to_del_edge.head_node.child_nodes()]
+        to_del_edge.collapse(adjust_collapsed_head_children_edge_lengths=False)
+        if set_as_unrooted_tree:
+            self.is_rooted = False
+        return self.seed_node
+
+    def _get_seed_node(self):
+        return self._seed_node
+    def _set_seed_node(self, node):
+        self._seed_node = node
+        if self._seed_node is not None:
+            self._seed_node.parent_node = None
+    seed_node = property(_get_seed_node, _set_seed_node)
+
+    def deroot(self):
+        self.collapse_basal_bifurcation(set_as_unrooted_tree=True)
+
+    def reseed_at(self,
+            new_seed_node,
+            update_bipartitions=False,
+            collapse_unrooted_basal_bifurcation=True,
+            suppress_unifurcations=True):
+        """
+        Reseeds the tree at a different (existing) node.
+
+        Takes an internal node, ``new_seed_node`` that must already be in the
+        tree and rotates the tree such that ``new_seed_node`` is the ``seed_node``
+        of the tree. This is a 'soft' rerooting -- i.e., changes the tree
+        representation so tree traversal behaves as if the tree is rooted at
+        'new_seed_node', but it does not actually change the tree's rooting
+        state.  If ``update_bipartitions`` is True, then the edges'
+        ``bipartition_bitmask`` and the tree's ``bipartition_edge_map`` attributes
+        will be updated. If the *old* root of the tree had an outdegree of 2,
+        then after this operation, it will have an outdegree of one. In this
+        case, unless ``suppress_unifurcations`` is False, then it will be removed
+        from the tree.
+        """
+
+        # def _dump_node(nd, name):
+        #     print("- {}: {}".format(name, nd.label))
+        #     if nd._parent_node:
+        #         print("    Node Parent: {}".format(nd._parent_node.label))
+        #     else:
+        #         print("    Node Parent: None")
+        #     if nd._edge.tail_node:
+        #         print("    Edge Parent: {}".format(nd._edge.tail_node.label))
+        #     else:
+        #         print("    Edge Parent: None")
+        #     debug_children = []
+        #     for ch in nd._child_nodes:
+        #         parts = []
+        #         if ch._parent_node:
+        #             parts.append(ch._parent_node.label)
+        #         else:
+        #             parts.append("None")
+        #         if ch.edge.tail_node:
+        #             parts.append(ch.edge.tail_node.label)
+        #         else:
+        #             parts.append("None")
+        #         debug_children.append("{} ({})".format(ch.label, "/".join(parts)))
+        #     debug_children = ", ".join(debug_children)
+        #     print("    Children (Node Parent, Edge Tail Node Parent): {}".format(debug_children))
+
+        if self.seed_node is new_seed_node:
+            # do not just return: allow for updating of bipartitions,
+            # collapsing of unifurcations, collapsing of unrooted basal
+            # bifurcations
+            pass
+        else:
+            old_seed_node = self.seed_node
+            old_parent_node = new_seed_node._parent_node
+            if old_parent_node is None:
+                return
+
+            if new_seed_node._child_nodes:
+                new_seed_node_is_leaf = False
+            else:
+                new_seed_node_is_leaf = True
+
+            edges_to_invert = []
+            current_node = new_seed_node
+            while current_node:
+                if current_node._parent_node is not None:
+                    edges_to_invert.append(current_node.edge)
+                current_node = current_node._parent_node
+            while edges_to_invert:
+                edge = edges_to_invert.pop()
+                edge.invert(update_bipartitions=update_bipartitions)
+
+            if new_seed_node_is_leaf and suppress_unifurcations:
+                ## Cannot just suppress_unifurcations, because wrong node will be deleted
+                ## need to remove child (i.e. new seed node's old parent, which is now its child, needs to be deleted)
+                # self.suppress_unifurcations(update_bipartitions=update_bipartitions)
+                if len(new_seed_node._child_nodes) == 1:
+                    nsn_ch = new_seed_node._child_nodes[0]
+                    new_seed_node.remove_child(nsn_ch)
+                    for ch in nsn_ch._child_nodes:
+                        new_seed_node.add_child(ch)
+            self.seed_node = new_seed_node
+
+        if update_bipartitions:
+            self.encode_bipartitions(
+                    suppress_unifurcations=suppress_unifurcations,
+                    collapse_unrooted_basal_bifurcation=collapse_unrooted_basal_bifurcation)
+        else:
+            if (collapse_unrooted_basal_bifurcation
+                    and not self._is_rooted
+                    and len(self.seed_node._child_nodes) == 2):
+                self.collapse_basal_bifurcation()
+            if suppress_unifurcations:
+                self.suppress_unifurcations()
+
+        return self.seed_node
+
+    def to_outgroup_position(self, outgroup_node, update_bipartitions=False, suppress_unifurcations=True):
+        """Reroots the tree at the parent of ``outgroup_node`` and makes ``outgroup_node`` the first child
+        of the new root.  This is just a convenience function to make it easy
+        to place a clade as the first child under the root.
+        Assumes that ``outgroup_node`` and ``outgroup_node._parent_node`` and are in the tree/
+        If ``update_bipartitions`` is True, then the edges' ``bipartition`` and the tree's
+        ``bipartition_encoding`` attributes will be updated.
+        If the *old* root of the tree had an outdegree of 2, then after this
+        operation, it will have an outdegree of one. In this case, unless
+        ``suppress_unifurcations`` is False, then it will be
+        removed from the tree.
+        """
+        p = outgroup_node._parent_node
+        assert p is not None
+        self.reseed_at(p, update_bipartitions=update_bipartitions, suppress_unifurcations=suppress_unifurcations)
+        p.remove_child(outgroup_node)
+        _ognlen = outgroup_node.edge.length
+        p.insert_child(0, outgroup_node)
+        assert outgroup_node.edge.length == _ognlen
+        return self.seed_node
+
+    def reroot_at_node(self, new_root_node, update_bipartitions=False, suppress_unifurcations=True):
+        """
+        Takes an internal node, ``new_seed_node`` that must already be in the tree and
+        roots the tree at that node.
+        This is a 'hard' rerooting -- i.e., changes the tree
+        representation so tree traversal behaves as if the tree is rooted at
+        'new_seed_node', *and* changes the tree's rooting state.
+        If ``update_bipartitions`` is True, then the edges' ``bipartition`` and the tree's
+        ``bipartition_encoding`` attributes will be updated.
+        If the *old* root of the tree had an outdegree of 2, then after this
+        operation, it will have an outdegree of one. In this case, unless
+        ``suppress_unifurcations`` is False, then it will be
+        removed from the tree.
+        """
+        self.reseed_at(new_seed_node=new_root_node,
+                update_bipartitions=False,
+                suppress_unifurcations=suppress_unifurcations)
+        self.is_rooted = True
+        if update_bipartitions:
+            self.update_bipartitions(suppress_unifurcations=suppress_unifurcations)
+        return self.seed_node
+
+    def reroot_at_edge(self,
+            edge,
+            length1=None,
+            length2=None,
+            update_bipartitions=False,
+            suppress_unifurcations=True):
+        """
+        Takes an internal edge, ``edge``, adds a new node to it, and then roots
+        the tree on the new node.
+        ``length1`` and ``length2`` will be assigned to the new (sub-)edge leading
+        to the old parent of the original edge, while ``length2`` will be
+        assigned to the old child of the original edge.
+        If ``update_bipartitions`` is True, then the edges' ``bipartition`` and the tree's
+        ``bipartition_encoding`` attributes will be updated.
+        If the *old* root of the tree had an outdegree of 2, then after this
+        operation, it will have an outdegree of one. In this case, unless
+        ``suppress_unifurcations`` is False, then it will be
+        removed from the tree.
+        """
+        old_tail = edge.tail_node
+        old_head = edge.head_node
+        new_seed_node = old_tail.new_child(edge_length=length1)
+        old_tail.remove_child(old_head)
+        # new_seed_node.add_child(old_head, edge_length=length2)
+        new_seed_node.add_child(old_head)
+        old_head.edge.length = length2
+        self.reroot_at_node(new_seed_node,
+                update_bipartitions=update_bipartitions,
+                suppress_unifurcations=suppress_unifurcations)
+        return self.seed_node
+
+    def reroot_at_midpoint(self, update_bipartitions=False, suppress_unifurcations=True):
+        """
+        Reroots the tree at the the mid-point of the longest distance between
+        two taxa in a tree.
+        Sets the rooted flag on the tree to True.
+        If ``update_bipartitions`` is True, then the edges' ``bipartition`` and the tree's
+        ``bipartition_encoding`` attributes will be updated.
+        If the *old* root of the tree had an outdegree of 2, then after this
+        operation, it will have an outdegree of one. In this case, unless
+        ``suppress_unifurcations`` is False, then it will be
+        removed from the tree.
+        """
+        from dendropy.calculate import treemeasure
+        pdm = treemeasure.PatristicDistanceMatrix(self)
+        n1, n2 = pdm.max_dist_nodes
+        plen = float(pdm.max_dist) / 2
+        mrca_node = pdm.mrca(n1.taxon, n2.taxon)
+        #assert mrca_node is self.mrca(taxa=[n1.taxon, n2.taxon])
+        #mrca_node = self.mrca(taxa=[n1.taxon, n2.taxon])
+        cur_node = n1
+
+        break_on_node = None # populated *iff* midpoint is exactly at an existing node
+        target_edge = None
+        head_node_edge_len = None
+
+        # going up ...
+        while cur_node is not mrca_node:
+            if cur_node.edge.length > plen:
+                target_edge = cur_node.edge
+                head_node_edge_len = plen #cur_node.edge.length - plen
+                plen = 0
+                break
+            elif cur_node.edge.length < plen:
+                plen -= cur_node.edge.length
+                cur_node = cur_node._parent_node
+            else:
+                break_on_node = cur_node
+
+        assert break_on_node is not None or target_edge is not None
+
+        if break_on_node:
+            self.reseed_at(break_on_node, update_bipartitions=False, suppress_unifurcations=suppress_unifurcations)
+            new_seed_node = break_on_node
+        else:
+            tail_node_edge_len = target_edge.length - head_node_edge_len
+            old_head_node = target_edge.head_node
+            old_tail_node = target_edge.tail_node
+            old_tail_node.remove_child(old_head_node)
+            new_seed_node = Node()
+            # new_seed_node.add_child(old_head_node, edge_length=head_node_edge_len)
+            new_seed_node.add_child(old_head_node)
+            old_head_node.edge.length = head_node_edge_len
+            # old_tail_node.add_child(new_seed_node, edge_length=tail_node_edge_len)
+            old_tail_node.add_child(new_seed_node)
+            new_seed_node.edge.length = tail_node_edge_len
+            self.reseed_at(new_seed_node, update_bipartitions=False, suppress_unifurcations=suppress_unifurcations)
+        self.is_rooted = True
+        if update_bipartitions:
+            self.update_bipartitions(suppress_unifurcations=False)
+        return self.seed_node
+
+    def suppress_unifurcations(self, update_bipartitions=False):
+        """
+        Delete all nodes of outdegree-one from this tree.
+
+        Parameters
+        ----------
+        update_bipartitions : bool
+            If `True` then the bipartitions encoding will be calculated.
+
+        """
+        if update_bipartitions and self.bipartition_encoding:
+            bipartitions_to_delete = set()
+        else:
+            bipartitions_to_delete = None
+        for nd in self.postorder_node_iter():
+            children = nd._child_nodes
+            if len(children) == 1:
+                if nd.edge.length is not None:
+                    if children[0].edge.length is None:
+                        children[0].edge.length = nd.edge.length
+                    else:
+                        children[0].edge.length += nd.edge.length
+                if bipartitions_to_delete is not None:
+                    bipartitions_to_delete.add(id(nd.edge.bipartition))
+                if nd._parent_node is not None:
+                    parent = nd._parent_node
+                    pos = parent._child_nodes.index(nd)
+                    parent.remove_child(nd)
+                    parent.insert_child(index=pos, node=children[0])
+                    # assert children[0]._parent_node is parent
+                    # assert children[0] in parent._child_nodes
+                    # assert children[0].edge.tail_node is parent
+                    # assert children[0].edge.head_node is children[0]
+                    nd._parent_node = None
+                else:
+                    # assert nd is self.seed_node
+                    self.seed_node = children[0]
+                    self.seed_node._parent_node = None
+        if bipartitions_to_delete:
+            old_encoding = self.bipartition_encoding
+            self.bipartition_encoding = [b for b in old_encoding if id(b) not in bipartitions_to_delete]
+
+    def delete_outdegree_one_nodes(self):
+        deprecate.dendropy_deprecation_warning(
+                message="Deprecated since DendroPy 4: 'delete_outdegree_one_nodes()' has been replaced by 'suppress_unifurcations()'",
+                stacklevel=3)
+        return self.suppress_unifurcations()
+
+    def collapse_unweighted_edges(self,
+            threshold=0.0000001,
+            update_bipartitions=False):
+        """
+        Collapse all *internal* edges with edge lengths less than or equal to
+        ``threshold`` (or with `None` for edge length).
+        """
+        for e in self.postorder_edge_iter():
+            if e.length is None or (e.length <= threshold) and e.is_internal():
+               e.collapse()
+        if update_bipartitions:
+            self.update_bipartitions()
+
+    def resolve_polytomies(self,
+            limit=2,
+            update_bipartitions=False,
+            rng=None):
+        """
+        Arbitrarily resolve polytomies using 0-length edges.
+
+        Parameters
+        ----------
+        limit : int
+            The maximum number of children a node can have before being
+            resolved.
+        update_bipartitions : bool
+            If `True`, then bipartitions will be calculated.
+        rng : ``random.Random`` object or `None`
+            If ``rng`` is an object with a ``sample()`` method then the polytomy
+            will be resolved by sequentially adding, generating all tree
+            topologies equiprobably. ``rng.sample()`` should behave like
+            ``random.sample()``
+            If ``rng`` is `None`, then polytomy is broken deterministically by
+            repeatedly joining pairs of children.
+        """
+        polytomies = []
+        for node in self.postorder_node_iter():
+            if len(node._child_nodes) > limit:
+                polytomies.append(node)
+        for node in polytomies:
+            if rng:
+                to_attach = rng.sample(node._child_nodes, len(node._child_nodes)-limit)
+                for child in to_attach:
+                    node.remove_child(child)
+                attachment_points = list(node._child_nodes)
+                while len(to_attach) > 0:
+                    next_child = to_attach.pop()
+                    next_sib = rng.choice(attachment_points)
+                    next_attachment = Node()
+                    p = next_sib._parent_node
+                    p.add_child(next_attachment)
+                    p.remove_child(next_sib)
+                    next_attachment.add_child(next_sib)
+                    next_attachment.add_child(next_child)
+                    attachment_points.append(next_attachment)
+                    attachment_points.append(next_child)
+            else:
+                while len(node._child_nodes) > limit:
+                    nn1 = Node()
+                    nn1.edge.length = 0
+                    c1 = node._child_nodes[0]
+                    c2 = node._child_nodes[1]
+                    node.remove_child(c1)
+                    node.remove_child(c2)
+                    nn1.add_child(c1)
+                    nn1.add_child(c2)
+                    node.add_child(nn1)
+        if update_bipartitions:
+            self.update_bipartitions()
+
+    def prune_subtree(self,
+            node,
+            update_bipartitions=False,
+            suppress_unifurcations=True):
+        """
+        Removes subtree starting at ``node`` from tree.
+        """
+        if not node:
+            raise ValueError("Tried to remove an non-existing or null node")
+        if node._parent_node is None:
+            raise TypeError('Node has no parent and is implicit root: cannot be pruned')
+        node._parent_node.remove_child(node)
+        if suppress_unifurcations:
+            self.suppress_unifurcations()
+        if update_bipartitions:
+            self.update_bipartitions()
+
+    def filter_leaf_nodes(
+            self,
+            filter_fn,
+            recursive=True,
+            update_bipartitions=False,
+            suppress_unifurcations=True):
+        """
+        Removes all leaves for which ``filter_fn`` returns `False`. If recursive
+        is `True`, then process is repeated until all leaf nodes in the tree will
+        evaluate to `True` when passed to ``filter_fn``.
+
+        Parameters
+        ----------
+        ``filter_fn`` : function object
+            A function that takes a |Node| object and returns `True` if
+            the object is to be allowed as a leaf node, and `False` if otherwise.
+        recursive : bool
+            If `True`, then filter is repeatedly applied until all leaf nodes
+            evaluate to `True` under ``filter_fn``. If `False`, then only a
+            single pass is made on the current leaf set. This may result in new
+            leaves for which the ``filter_fn`` is `False` (e.g., the parent node
+            of a cherry in which both children evaluated to `False`
+            under ``filter_fn`` now is a leaf node which may be `False`
+            under ``filter_fn``).
+        suppress_unifurcations : bool
+            If `True`, nodes of outdegree 1 will be deleted as they are
+            encountered.
+        update_bipartitions : bool
+            If `True`, then bipartitions will be calculated.
+        """
+        while True:
+            nodes_deleted = False
+            for nd in self.leaf_node_iter():
+                if not filter_fn(nd):
+                    if nd.edge.tail_node is None:
+                        raise error.SeedNodeDeletionException("Attempting to remove seed node or node without parent")
+                    nd.edge.tail_node.remove_child(nd)
+                    nodes_deleted = True
+            if not nodes_deleted or not recursive:
+                break
+        if suppress_unifurcations:
+            self.suppress_unifurcations()
+        if update_bipartitions:
+            self.update_bipartitions()
+
+    def prune_leaves_without_taxa(self,
+            update_bipartitions=False,
+            suppress_unifurcations=True):
+        """
+        Removes all terminal nodes that have their ``taxon`` attribute set to
+        `None`.
+        """
+        for nd in self.leaf_node_iter():
+            if nd.taxon is None:
+                nd.edge.tail_node.remove_child(nd)
+        if suppress_unifurcations:
+            self.suppress_unifurcations()
+        if update_bipartitions:
+            self.update_bipartitions()
+
+    def prune_taxa(self, taxa, update_bipartitions=False, suppress_unifurcations=True):
+        """
+        Removes terminal nodes associated with Taxon objects given by the container
+        ``taxa`` (which can be any iterable, including a TaxonNamespace object) from ``self``.
+        """
+        nodes_to_remove = []
+        for nd in self.postorder_node_iter():
+            if nd.taxon and nd.taxon in taxa:
+                nd.edge.tail_node.remove_child(nd)
+        self.prune_leaves_without_taxa(update_bipartitions=update_bipartitions,
+                suppress_unifurcations=suppress_unifurcations)
+
+    def prune_nodes(self, nodes, prune_leaves_without_taxa=False, update_bipartitions=False, suppress_unifurcations=True):
+        for nd in nodes:
+            if nd.edge.tail_node is None:
+                raise Exception("Attempting to remove root node or node without parent")
+            nd.edge.tail_node.remove_child(nd)
+        if prune_leaves_without_taxa:
+            self.prune_leaves_without_taxa(update_bipartitions=update_bipartitions,
+                    suppress_unifurcations=suppress_unifurcations)
+
+    def prune_taxa_with_labels(self,
+            labels,
+            update_bipartitions=False,
+            suppress_unifurcations=True):
+        """
+        Removes terminal nodes that are associated with Taxon objects with
+        labels given by ``labels``.
+        """
+        taxa = self.taxon_namespace.get_taxa(labels=labels)
+        self.prune_taxa(taxa=taxa,
+                update_bipartitions=update_bipartitions,
+                suppress_unifurcations=suppress_unifurcations)
+
+    def retain_taxa(self,
+            taxa,
+            update_bipartitions=False,
+            suppress_unifurcations=True):
+        """
+        Removes terminal nodes that are not associated with any
+        of the Taxon objects given by ``taxa`` (which can be any iterable, including a
+        TaxonNamespace object) from the ``self``.
+        """
+        to_prune = [t for t in self.taxon_namespace if t not in taxa]
+        self.prune_taxa(to_prune,
+                update_bipartitions=update_bipartitions,
+                suppress_unifurcations=suppress_unifurcations)
+
+    def retain_taxa_with_labels(self,
+            labels,
+            update_bipartitions=False,
+            suppress_unifurcations=True):
+        """
+        Removes terminal nodes that are not associated with Taxon objects with
+        labels given by ``labels``.
+        """
+        taxa = self.taxon_namespace.get_taxa(labels=labels)
+        self.retain_taxa(taxa=taxa,
+                update_bipartitions=update_bipartitions,
+                suppress_unifurcations=suppress_unifurcations)
+
+    def randomly_reorient(self, rng=None, update_bipartitions=False):
+        """
+        Randomly picks a new rooting position and rotates the branches around all
+        internal nodes in the ``self``. If ``update_bipartitions`` is True, the the ``bipartition_bitmask``
+        and ``bipartition_edge_map`` attributes kept valid.
+        """
+        if rng is None:
+            rng = GLOBAL_RNG # use the global rng by default
+        nd = rng.sample(self.nodes(), 1)[0]
+        if nd.is_leaf():
+            self.to_outgroup_position(nd, update_bipartitions=update_bipartitions)
+        else:
+            self.reseed_at(nd, update_bipartitions=update_bipartitions)
+        self.randomly_rotate(rng=rng)
+
+    def randomly_rotate(self, rng=None):
+        "Randomly rotates the branches around all internal nodes in ``self``"
+        if rng is None:
+            rng = GLOBAL_RNG # use the global rng by default
+        internal_nodes = self.internal_nodes()
+        for nd in internal_nodes:
+            c = nd.child_nodes()
+            rng.shuffle(c)
+            nd.set_child_nodes(c)
+
+    def ladderize(self, ascending=True):
+        """
+        Sorts child nodes in ascending (if ``ascending`` is `False`) or
+        descending (if ``ascending`` is `False`) order in terms of the number of
+        children each child node has.
+        """
+        node_desc_counts = {}
+        for nd in self.postorder_node_iter():
+            if len(nd._child_nodes) == 0:
+                node_desc_counts[nd] = 0
+            else:
+                total = 0
+                for child in nd._child_nodes:
+                    total += node_desc_counts[child]
+                total += len(nd._child_nodes)
+                node_desc_counts[nd] = total
+                nd._child_nodes.sort(key=lambda n: node_desc_counts[n], reverse=not ascending)
+
+    def truncate_from_root(self, distance_from_root):
+        self.calc_node_root_distances()
+        new_terminals = []
+        for nd in self.preorder_node_iter():
+            if not nd._parent_node:
+                # root node
+                # TODO: strictly speaking, this might be a terminal if distance_from_root == 0
+                pass
+            else:
+                if nd.root_distance == distance_from_root:
+                    new_terminals.append(nd)
+                elif nd.root_distance > distance_from_root and nd._parent_node.root_distance < distance_from_root:
+                    # cut above current node
+                    nd.edge.length = distance_from_root - nd._parent_node.root_distance
+                    nd.root_distance = distance_from_root
+                    new_terminals.append(nd)
+        for nd in new_terminals:
+            for ch in nd.child_nodes():
+                nd.remove_child(ch)
+
+    ###########################################################################
+    ### Ages, depths, branch lengths etc. (mutation)
+
+    def scale_edges(self, edge_len_multiplier):
+        """Multiplies every edge length in ``self`` by ``edge_len_multiplier``"""
+        for e in self.postorder_edge_iter():
+            if e.length is not None:
+                e.length *= edge_len_multiplier
+
+    def set_edge_lengths_from_node_ages(self,
+            minimum_edge_length=0.0,
+            error_on_negative_edge_lengths=False,
+            ):
+        """
+        Sets the edge lengths of the tree so that the path lengths from the
+        tips equal the value of the ``age`` attribute of the nodes.
+
+        Parameters
+        ----------
+        minimum_edge_length : numeric
+            All edge lengths calculated to have a value less than this will be
+            set to this.
+        error_on_negative_edge_lengths : bool
+            If `True`, an inferred edge length that is less than 0 will result
+            in a ValueError.
+        """
+        for nd in self.preorder_node_iter():
+            if nd._parent_node is not None:
+                #if nd._parent_node.age < nd.age:
+                #    nd.edge.length = 0.0
+                #else:
+                #    nd.edge.length = nd._parent_node.age - nd.age
+                edge_length = nd._parent_node.age - nd.age
+                if minimum_edge_length is not None and edge_length < minimum_edge_length:
+                    edge_length = minimum_edge_length
+                if error_on_negative_edge_lengths and edge_length < 0.0:
+                    raise ValueError("Negative edge length: {}".foramt(edge_length))
+                nd.edge.length = edge_length
+
+    ###########################################################################
+    ### Ages, depths, branch lengths etc. (calculation)
+
+    def calc_node_ages(self, ultrametricity_precision=constants.DEFAULT_ULTRAMETRICITY_PRECISION, internal_only=False):
+        """
+        Adds an attribute called "age" to  each node, with the value equal to
+        the sum of edge lengths from the node to the tips. If the lengths of
+        different paths to the node differ by more than ``ultrametricity_precision``, then a
+        ValueError exception will be raised indicating deviation from
+        ultrametricity. If ``ultrametricity_precision`` is negative or False, then this check
+        will be skipped.
+        """
+        ages = []
+        for node in self.postorder_node_iter():
+            ch = node.child_nodes()
+            if len(ch) == 0:
+               node.age = 0.0
+               if not internal_only:
+                   ages.append(node.age)
+            else:
+                first_child = ch[0]
+                node.age = first_child.age + first_child.edge.length
+                if not (ultrametricity_precision is None or ultrametricity_precision is False or ultrametricity_precision < 0):
+                    for nnd in ch[1:]:
+                        try:
+                            ocnd = nnd.age + nnd.edge.length
+                        except TypeError:
+                            nnd.edge.length = 0.0
+                            ocnd = nnd.age
+                        d = abs(node.age - ocnd)
+                        if  d > ultrametricity_precision:
+                            # raise ValueError("Tree is not ultrametric. Node '{}': expecting {}, but found {}".format(node.label, node.age, ocnd))
+                            raise error.UltrametricityError("Tree is not ultrametric within threshold of {threshold}: {deviance}".format(
+                                threshold=ultrametricity_precision,
+                                deviance=d,
+                                ))
+                ages.append(node.age)
+        return ages
+
+    def calc_node_root_distances(self, return_leaf_distances_only=True):
+        """
+        Adds attribute "root_distance" to each node, with value set to the
+        sum of edge lengths from the node to the root. Returns list of
+        distances. If ``return_leaf_distances_only`` is True, then only
+        leaf distances will be true.
+        """
+        dists = []
+        for node in self.preorder_node_iter():
+            if node._parent_node is None:
+                node.root_distance = 0.0
+            else:
+                node.root_distance = node.edge.length + node._parent_node.root_distance
+            if (not return_leaf_distances_only or node.is_leaf()):
+                dists.append(node.root_distance)
+        return dists
+
+    def internal_node_ages(self, ultrametricity_precision=constants.DEFAULT_ULTRAMETRICITY_PRECISION):
+        """
+        Returns list of ages of speciation events / coalescence times on tree.
+        """
+        ages = self.calc_node_ages(ultrametricity_precision=ultrametricity_precision, internal_only=True)
+        ages.sort()
+        return ages
+
+    def node_ages(self, ultrametricity_precision=constants.DEFAULT_ULTRAMETRICITY_PRECISION, internal_only=False):
+        """
+        Returns list of ages of all nodes on tree.
+        NOTE: Changed from DendroPy3: this function now returns the ages of
+        *ALL* nodes. To get only internal node ages, use
+        `Tree.internal_node_ages`.
+        """
+        ages = self.calc_node_ages(ultrametricity_precision=ultrametricity_precision, internal_only=internal_only)
+        ages.sort()
+        return ages
+
+    def length(self):
+        """
+        Returns sum of edge lengths of self. Edges with no lengths defined
+        (None) will be considered to have a length of 0.
+        Note that we do not overrride ``__len__`` as this requires an integer
+        return value.
+        """
+        total = 0
+        for edge in self.postorder_edge_iter():
+            if edge.length is not None:
+                total += edge.length
+        return total
+
+    def max_distance_from_root(self):
+        """
+        Returns distance of node furthest from root.
+        """
+        dists = self.calc_node_root_distances()
+        return max(dists)
+
+    def minmax_leaf_distance_from_root(self):
+        """
+        Returns pair of values, representing the distance of the leaf closest
+        to a furthest from the root.
+        """
+        dists = self.calc_node_root_distances(return_leaf_distances_only=True)
+        return min(dists), max(dists)
+
+    def coalescence_intervals(self):
+        """
+        Returns list of coalescence intervals of self., i.e., the waiting
+        times between successive coalescence events.
+        """
+        ages = self.node_ages()
+        intervals = []
+        intervals.append(ages[0])
+        for i, d in enumerate(ages[1:]):
+            intervals.append(d - ages[i])
+        return intervals
+
+    def num_lineages_at(self, distance_from_root):
+        """
+        Returns the number of lineages on the tree at a particular distance
+        from the root.
+        """
+        self.calc_node_root_distances()
+        num_lineages = 0
+        for nd in self.preorder_node_iter():
+            if not nd._parent_node:
+                # root node
+                pass
+            else:
+                if nd.root_distance == distance_from_root:
+                    num_lineages += 1
+                elif nd.root_distance >= distance_from_root and nd._parent_node.root_distance < distance_from_root:
+                    num_lineages += 1
+        return num_lineages
+
+    ###########################################################################
+    ### Bipartition Management
+
+    def _compile_mutable_bipartition_for_edge(self, edge):
+        edge.bipartition.compile_split_bitmask(
+                tree_leafset_bitmask=self.seed_node.edge.bipartition._leafset_bitmask,
+                is_mutable=True)
+        return edge.bipartition
+
+    def _compile_immutable_bipartition_for_edge(self, edge):
+        edge.bipartition.compile_split_bitmask(
+                tree_leafset_bitmask=self.seed_node.edge.bipartition._leafset_bitmask,
+                is_mutable=False)
+        return edge.bipartition
+
+    def encode_bipartitions(self,
+            suppress_unifurcations=True,
+            collapse_unrooted_basal_bifurcation=True,
+            suppress_storage=False,
+            is_bipartitions_mutable=False):
+        """
+        Calculates the bipartitions of this tree.
+
+        Parameters
+        ----------
+        suppress_unifurcations : bool
+            If `True`, nodes of outdegree 1 will be deleted as they are
+            encountered.
+        collapse_unrooted_basal_bifurcation: bool
+            If `True`, then a basal bifurcation on an unrooted tree will be
+            collapsed to a trifurcation. This mean that an unrooted tree like
+            '(A,(B,C))' will be changed to '(A,B,C)' after this.
+        suppress_storage : bool
+            By default, the bipartition encoding is stored as a list (assigned
+            to ``self.bipartition_encoding``) and returned. If ``suppress_storage``
+            is `True`, then the list is not created.
+        is_bipartitions_mutable : bool
+            By default, the |Bipartition| instances coded will be locked
+            or frozen, allowing their use in hashing containers such as
+            dictionary (keys) and sets. To allow modification of values, the
+            ``is_mutable`` attribute must be set to `True`.
+
+        Returns
+        -------
+        list[|Bipartition|] or `None`
+            A list of |Bipartition| objects of this |Tree|
+            representing the structure of this tree, or, if ``suppress_storage``
+            is `True`, then `None`.
+
+        """
+        self._bipartition_edge_map = None
+        taxon_namespace = self._taxon_namespace
+        seed_node = self.seed_node
+        if not seed_node:
+            return
+        if (collapse_unrooted_basal_bifurcation
+                and not self._is_rooted
+                and len(seed_node._child_nodes) == 2):
+            # We do this because an unrooted tree
+            # has no *true* degree-3 internal nodes:
+            #
+            #      \  | |  /
+            #       +-+-+-+
+            #      /       \
+            #
+            # (whereas, with a rooted tree, the basal bipartition is a true
+            # degree-3 node: the edge subtending it does not really
+            # exist in the graph -- it is not a true link connecting
+            # two nodes).
+            self.collapse_basal_bifurcation()
+        tree_edges = []
+        for edge in self.postorder_edge_iter():
+            leafset_bitmask = 0
+            head_node = edge._head_node
+            child_nodes = head_node._child_nodes
+            num_children = len(child_nodes)
+            if num_children == 1 and suppress_unifurcations:
+                # collapsing node: remove, and do not process/add edge
+                if head_node.edge.length is not None:
+                    if child_nodes[0].edge.length is None:
+                        child_nodes[0].edge.length = head_node.edge.length
+                    else:
+                        child_nodes[0].edge.length += head_node.edge.length
+                if head_node._parent_node is not None:
+                    parent = head_node._parent_node
+                    pos = parent._child_nodes.index(head_node)
+                    parent.remove_child(head_node)
+                    parent.insert_child(index=pos, node=child_nodes[0])
+                    head_node._parent_node = None
+                else:
+                    self.seed_node = child_nodes[0]
+                    self.seed_node._parent_node = None
+            else:
+                if num_children == 0:
+                    tree_edges.append(edge)
+                    taxon = edge._head_node.taxon
+                    if taxon:
+                        leafset_bitmask = taxon_namespace.taxon_bitmask(taxon)
+                else:
+                    tree_edges.append(edge)
+                    for child in child_nodes:
+                        leafset_bitmask |= child.edge.bipartition._leafset_bitmask
+                edge.bipartition = Bipartition(compile_bipartition=False, is_mutable=True)
+                edge.bipartition._leafset_bitmask = leafset_bitmask
+                edge.bipartition._is_rooted = self._is_rooted
+        # Create normalized bitmasks, where the full (self) bipartition mask is *not*
+        # all the taxa, but only those found on the self; this is to handle
+        # cases where we are dealing with selfs with incomplete leaf-sets.
+        tree_leafset_bitmask = self.seed_node.edge.bipartition._leafset_bitmask
+        if is_bipartitions_mutable:
+            _compile_bipartition = self._compile_mutable_bipartition_for_edge
+        else:
+            _compile_bipartition = self._compile_immutable_bipartition_for_edge
+        if suppress_storage:
+            self.bipartition_encoding = None
+            for x in map(_compile_bipartition, tree_edges):
+                pass
+        else:
+            # self.bipartition_encoding = dict(zip(map(self._compile_bipartition_for_edge, tree_edges), tree_edges))
+            self.bipartition_encoding = list(map(_compile_bipartition, tree_edges))
+        return self.bipartition_encoding
+
+    def update_bipartitions(self, *args, **kwargs):
+        """
+        Recalculates bipartition hashes for tree.
+        """
+        self.encode_bipartitions(*args, **kwargs)
+
+    def encode_splits(self, *args, **kwargs):
+        """
+        Recalculates bipartition hashes for tree.
+        """
+        deprecate.dendropy_deprecation_warning(
+                message="Deprecated since DendroPy 4: 'Tree.encode_splits()' will no longer be supported in future releases; use 'Tree.encode_bipartitions()' instead",
+                stacklevel=3)
+        return self.encode_bipartitions(*args, **kwargs)
+
+    def update_splits(self, *args, **kwargs):
+        """
+        Recalculates bipartition hashes for tree.
+        """
+        deprecate.dendropy_deprecation_warning(
+                message="Deprecated since DendroPy 4: 'Tree.encode_splits()' will no longer be supported in future releases; use 'Tree.update_bipartitions()' instead",
+                stacklevel=3)
+        return self.encode_bipartitions(*args, **kwargs)
+
+    def _get_bipartition_edge_map(self):
+        if not self._bipartition_edge_map:
+            if not self.bipartition_encoding:
+                self.encode_bipartitions()
+            self._bipartition_edge_map = {}
+            for edge in self.postorder_edge_iter():
+                self._bipartition_edge_map[edge.bipartition] = edge
+        return self._bipartition_edge_map
+    bipartition_edge_map = property(_get_bipartition_edge_map)
+
+    ###########################################################################
+    ### Metrics -- Unary
+
+    def B1(self):
+        """DEPRECATED: Use :func:`dendropy.calculate.treemeasure.B1()`."""
+        deprecate.dendropy_deprecation_warning(
+                preamble="Deprecated since DendroPy 4: Unary statistics on trees are now implemented in the 'dendropy.calculate.treemeasure' module.",
+                old_construct="tree.B1()",
+                new_construct="from dendropy.calculate import treemeasure\ntreemeasure.B1(tree)")
+        from dendropy.calculate import treemeasure
+        return treemeasure.B1(self)
+
+    def colless_tree_imbalance(self, normalize="max"):
+        """DEPRECATED: Use 'dendropy.calculate.treemeasure.colless_tree_imbalance()'."""
+        deprecate.dendropy_deprecation_warning(
+                preamble="Deprecated since DendroPy 4: Unary statistics on trees are now implemented in the 'dendropy.calculate.treemeasure' module.",
+                old_construct="tree.colless_tree_imbalance()",
+                new_construct="from dendropy.calculate import treemeasure\ntreemeasure.colless_tree_imbalance(tree)")
+        from dendropy.calculate import treemeasure
+        return treemeasure.colless_tree_imbalance(self, normalize)
+
+    def pybus_harvey_gamma(self, prec=0.00001):
+        """DEPRECATED: Use 'dendropy.calculate.treemeasure.pybus_harvey_gamma()'."""
+        deprecate.dendropy_deprecation_warning(
+                preamble="Deprecated since DendroPy 4: Unary statistics on trees are now implemented in the 'dendropy.calculate.treemeasure' module.",
+                old_construct="tree.pybus_harvey_gamma()",
+                new_construct="from dendropy.calculate import treemeasure\ntreemeasure.pybus_harvey_gamma(tree)")
+        from dendropy.calculate import treemeasure
+        return treemeasure.pybus_harvey_gamma(self, prec)
+
+    def N_bar(self):
+        """DEPRECATED: Use 'dendropy.calculate.treemeasure.N_bar()'."""
+        deprecate.dendropy_deprecation_warning(
+                preamble="Deprecated since DendroPy 4: Unary statistics on trees are now implemented in the 'dendropy.calculate.treemeasure' module.",
+                old_construct="tree.N_bar()",
+                new_construct="from dendropy.calculate import treemeasure\ntreemeasure.N_bar(tree)")
+        from dendropy.calculate import treemeasure
+        return treemeasure.N_bar(self)
+
+    def sackin_index(self, normalize=True):
+        """DEPRECATED: Use 'dendropy.calculate.treemeasure.sackin_index()'."""
+        deprecate.dendropy_deprecation_warning(
+                preamble="Deprecated since DendroPy 4: Unary statistics on trees are now implemented in the 'dendropy.calculate.treemeasure' module.",
+                old_construct="tree.sackin_index()",
+                new_construct="from dendropy.calculate import treemeasure\ntreemeasure.sackin_index(tree)")
+        from dendropy.calculate import treemeasure
+        return treemeasure.sackin_index(self, normalize)
+
+    def treeness(self):
+        """DEPRECATED: Use 'dendropy.calculate.treemeasure.treeness()'."""
+        deprecate.dendropy_deprecation_warning(
+                preamble="Deprecated since DendroPy 4: Unary statistics on trees are now implemented in the 'dendropy.calculate.treemeasure' module.",
+                old_construct="tree.treeness()",
+                new_construct="from dendropy.calculate import treemeasure\ntreemeasure.treeness(tree)")
+        from dendropy.calculate import treemeasure
+        return treemeasure.treeness(self)
+
+    ###########################################################################
+    ### Comparisons with Another Tree
+
+    def is_compatible_with_bipartition(self, bipartition, is_bipartitions_updated=False):
+        """
+        Returns true if the |Bipartition| ``bipartition`` is compatible
+        with all bipartitions of this tree.
+        """
+        if not is_bipartitions_updated or not self.bipartitions_encoding:
+            self.encode_bipartitions()
+        if bipartition in self.bipartition_encoding:
+            return True
+        else:
+            for b in self.bipartition_encoding:
+                if not b.is_compatible_with(bipartition):
+                    return False
+            return True
+
+    def is_compatible_with_tree(self, other):
+        raise NotImplementedError
+
+    def find_missing_splits(self, other_tree):
+        """DEPRECATED: Use 'dendropy.treecompare.find_missing_bipartitions()'."""
+        deprecate.dendropy_deprecation_warning(
+                preamble="Deprecated since DendroPy 4: Statistics comparing two trees are now implemented in the 'dendropy.calculate.treecompare' module.",
+                old_construct="tree1.find_missing_splits(tree2)",
+                new_construct="from dendropy.calculate import treecompare\ntreecompare.find_missing_bipartitions(tree1, tree2)")
+        from dendropy.calculate import treecompare
+        return treecompare.find_missing_splits(self, other_tree)
+
+    def symmetric_difference(self, other_tree):
+        """DEPRECATED: Use 'dendropy.treecompare.symmetric_difference()'."""
+        deprecate.dendropy_deprecation_warning(
+                preamble="Deprecated since DendroPy 4: Statistics comparing two trees are now implemented in the 'dendropy.calculate.treecompare' module.",
+                old_construct="tree1.symmetric_difference(tree2)",
+                new_construct="from dendropy.calculate import treecompare\ntreecompare.symmetric_difference(tree1, tree2)")
+        from dendropy.calculate import treecompare
+        return treecompare.symmetric_difference(self, other_tree)
+
+    def false_positives_and_negatives(self, other_tree):
+        """DEPRECATED: Use 'dendropy.treecompare.false_positives_and_negatives()'."""
+        deprecate.dendropy_deprecation_warning(
+                preamble="Deprecated since DendroPy 4: Statistics comparing two trees are now implemented in the 'dendropy.calculate.treecompare' module.",
+                old_construct="tree1.false_positives_and_negatives(tree2)",
+                new_construct="from dendropy.calculate import treecompare\ntreecompare.false_positives_and_negatives(tree1, tree2)")
+        from dendropy.calculate import treecompare
+        return treecompare.false_positives_and_negatives(self, other_tree)
+
+    def robinson_foulds_distance(self, other_tree):
+        """DEPRECATED: Use 'dendropy.treecompare.weighted_robinson_foulds_distance()'."""
+        deprecate.dendropy_deprecation_warning(
+                preamble="Deprecated since DendroPy 4: Statistics comparing two trees are now implemented in the 'dendropy.calculate.treecompare' module, and this method's functionality is available through the 'weighted_robinson_foulds_distance()' function. For the *unweighted* RF distance, see 'dendropy.calculate.treecompare.symmetric_difference()'.",
+                old_construct="tree1.robinson_foulds_distance(tree2)",
+                new_construct="from dendropy.calculate import treecompare\ntreecompare.weighted_robinson_foulds_distance(tree1, tree2)")
+        from dendropy.calculate import treecompare
+        return treecompare.weighted_robinson_foulds_distance(self, other_tree)
+
+    def euclidean_distance(self, other_tree):
+        """DEPRECATED: Use 'dendropy.treecompare.euclidean_distance()'."""
+        deprecate.dendropy_deprecation_warning(
+                preamble="Deprecated since DendroPy 4: Statistics comparing two trees are now implemented in the 'dendropy.calculate.treecompare' module.",
+                old_construct="tree1.euclidean_distance(tree2)",
+                new_construct="from dendropy.calculate import treecompare\ntreecompare.euclidean_distance(tree1, tree2)")
+        from dendropy.calculate import treecompare
+        return treecompare.euclidean_distance(self, other_tree)
+
+    ###########################################################################
+    ### Metadata
+
+    def strip_comments(self):
+        """
+        Remove comments from tree/nodes.
+        """
+        self.comments = []
+        for nd in self.postorder_node_iter():
+            nd.comments = []
+            nd.edge.comments = []
+
+    ###########################################################################
+    ### Representation
+
+    def __str__(self):
+        "Dump Newick string."
+        return "%s" % self._as_newick_string()
+
+    def __repr__(self):
+        return "<Tree object at %s>" % (hex(id(self)))
+
+    def description(self, depth=1, indent=0, itemize="", output=None):
+        """
+        Returns description of object, up to level ``depth``.
+        """
+        if depth is None or depth < 0:
+            return
+        output_strio = StringIO()
+        if self.label is None:
+            label = " (%s)" % id(self)
+        else:
+            label = " (%s: '%s')" % (id(self), self.label)
+        output_strio.write('%s%sTree object at %s%s'
+                % (indent*' ',
+                   itemize,
+                   hex(id(self)),
+                   label))
+        if depth >= 1:
+            newick_str = self.as_newick_string()
+            if not newick_str:
+                newick_str = "()"
+            if depth == 1:
+                output_strio.write(': %s' % newick_str)
+            elif depth >= 2:
+                num_nodes = len([nd for nd in self.preorder_node_iter()])
+                num_edges = len([ed for ed in self.preorder_edge_iter()])
+                output_strio.write(': %d Nodes, %d Edges' % (num_nodes, num_edges))
+                if self.taxon_namespace is not None:
+                    output_strio.write("\n%s[Taxon Set]\n" % (" " * (indent+4)))
+                    self.taxon_namespace.description(depth=depth-1, indent=indent+8, itemize="", output=output_strio)
+                output_strio.write('\n%s[Tree]' % (" " * (indent+4)))
+                output_strio.write('\n%s%s' % (" " * (indent+8), newick_str))
+                if depth >= 3:
+                    output_strio.write("\n%s[Nodes]" % (" " * (indent+4)))
+                    for i, nd in enumerate(self.preorder_node_iter()):
+                        output_strio.write('\n')
+                        nd.description(depth=depth-3, indent=indent+8, itemize="[%d] " % i, output=output_strio, taxon_namespace=self.taxon_namespace)
+                    output_strio.write("\n%s[Edges]" % (" " * (indent+4)))
+                    for i, ed in enumerate(self.preorder_edge_iter()):
+                        output_strio.write('\n')
+                        ed.description(depth=depth-3, indent=indent+8, itemize="[%d] " % i, output=output_strio, taxon_namespace=self.taxon_namespace)
+
+        s = output_strio.getvalue()
+        if output is not None:
+            output.write(s)
+        return s
+
+    def as_python_source(self, tree_obj_name=None, tree_args=None, oids=False):
+        """
+        Returns string that will rebuild this tree in Python.
+        """
+        p = []
+
+        if tree_obj_name is None:
+            tree_obj_name = "tree_%s" % id(self)
+        if self.label is not None:
+            label = "'" + self.label + "'"
+        else:
+            label = "None"
+        if tree_args is None:
+            tree_args = ""
+        else:
+            tree_args = ", " + tree_args
+        p.append("%s = dendropy.Tree(label=%s%s%s)" \
+            % (tree_obj_name,
+               label,
+               oid_str,
+               tree_args))
+
+        taxon_obj_namer = lambda x: "tax_%s" % id(x)
+        for taxon in self.taxon_namespace:
+            tobj_name = taxon_obj_namer(taxon)
+            if taxon.label is not None:
+                label = "'" + taxon.label + "'"
+            else:
+                label = "None"
+            p.append("%s = %s.taxon_namespace.require_taxon(label=%s%s)" \
+                % (tobj_name,
+                   tree_obj_name,
+                   label,
+                   oid_str))
+
+        node_obj_namer = lambda x: "nd_%s" % id(x)
+        for node in self.preorder_node_iter():
+            for child in node.child_nodes():
+                if node is self.seed_node:
+                    nn = "%s.seed_node" % tree_obj_name
+                else:
+                    nn = node_obj_namer(node)
+                if child.label is not None:
+                    label = "'" + child.label + "'"
+                else:
+                    label = "None"
+                if child.taxon is not None:
+                    ct = taxon_obj_namer(child.taxon)
+                else:
+                    ct = "None"
+                p.append("%s = %s.new_child(label=%s, taxon=%s, edge_length=%s%s)" %
+                        (node_obj_namer(child),
+                         nn,
+                         label,
+                         ct,
+                         child.edge.length,
+                         oid_str))
+
+        return "\n".join(p)
+
+    ###########################################################################
+    ### Representation
+
+    def as_ascii_plot(self, **kwargs):
+        """
+        Returns a string representation a graphic of this tree using ASCII
+        characters.
+        """
+        ap = AsciiTreePlot(**kwargs)
+        return ap.compose(self)
+
+    def write_ascii_plot(self, stream, **kwargs):
+        """
+        Writes an ASCII text graphic of this tree to ``stream``.
+        """
+        return stream.write(self.as_ascii_plot(**kwargs))
+
+    def print_plot(self, **kwargs):
+        """
+        Writes an ASCII text graphic of this tree to standard output.
+        """
+        import sys
+        self.write_ascii_plot(sys.stdout, **kwargs)
+        sys.stdout.write("\n")
+
+    def write_as_dot(self, out, **kwargs):
+        """
+        Writes the tree to ``out`` as a DOT formatted digraph
+        """
+        if not kwargs.get("taxon_namespace"):
+            kwargs["taxon_namespace"] = self.taxon_namespace
+        out.write("digraph G {\n")
+
+        nd_id_to_dot_nd = {}
+        for n, nd in enumerate(self.preorder_node_iter()):
+            label = _format_node(nd, **kwargs)
+            if nd is self.seed_node:
+                label = "root %s" % label
+            dot_nd = "n%d" % n
+            out.write(' %s  [label="%s"];\n' % (dot_nd, label))
+            nd_id_to_dot_nd[nd] = dot_nd
+        for nd, dot_nd in nd_id_to_dot_nd.iteritems():
+            try:
+                e = nd.edge
+                par_dot_nd = nd_id_to_dot_nd[e.tail_node]
+            except:
+                pass
+            else:
+                label = _format_edge(e, **kwargs)
+                s = ' %s -> %s [label="%s"];\n' % (par_dot_nd, dot_nd, label)
+                out.write(s)
+        out.write("}\n")
+
+    ###########################################################################
+    ### Debugging/Testing
+
+    def _assign_node_labels_from_taxon(self):
+        for nd in self.postorder_node_iter():
+            if nd.label is not None:
+                continue
+            if nd.taxon is not None:
+                nd.label = nd.taxon.label
+
+    def _get_indented_form(self, **kwargs):
+        out = StringIO()
+        self._write_indented_form(out, **kwargs)
+        return out.getvalue()
+
+    def _write_indented_form(self, out, **kwargs):
+        if kwargs.get("bipartitions"):
+            if not kwargs.get("taxon_namespace"):
+                kwargs["taxon_namespace"] = self.taxon_namespace
+        self.seed_node._write_indented_form(out, **kwargs)
+
+    def _debug_check_tree(self, logger_obj=None, **kwargs):
+        import logging, inspect
+        if logger_obj and logger_obj.isEnabledFor(logging.DEBUG):
+            try:
+                assert self._debug_tree_is_valid(logger_obj=logger_obj, **kwargs)
+            except:
+                calling_frame = inspect.currentframe().f_back
+                co = calling_frame.f_code
+                emsg = "\nCalled from file %s, line %d, in %s" % (co.co_filename, calling_frame.f_lineno, co.co_name)
+                _LOG.debug("%s" % str(self))
+                _LOG.debug("%s" % self._get_indented_form(**kwargs))
+        assert self._debug_tree_is_valid(logger_obj=logger_obj, **kwargs)
+
+    def _debug_tree_is_valid(self, **kwargs):
+        """Performs sanity-checks of the tree data structure.
+
+        kwargs:
+            ``check_bipartitions`` if True specifies that the bipartition attributes are checked.
+        """
+        check_bipartitions = kwargs.get('check_bipartitions', False)
+        unique_bipartition_edge_mapping = kwargs.get('unique_bipartition_edge_mapping', False)
+        taxon_namespace = kwargs.get('taxon_namespace')
+        if taxon_namespace is None:
+            taxon_namespace = self.taxon_namespace
+        if check_bipartitions:
+            taxa_mask = self.seed_node.edge.bipartition._leafset_bitmask
+        nodes = {}
+        edges = {}
+        curr_node = self.seed_node
+        assert curr_node._parent_node is None, \
+                "{} is seed node, but has non-'None' parent node: {}".format(curr_node, curr_node._parent_node)
+        assert curr_node.edge.tail_node is None, \
+                "{} is seed node, but edge has non-'None' tail node: {}".format(curr_node, curr_node.edge._parent_node)
+        ancestors = []
+        siblings = []
+        while curr_node:
+            assert curr_node not in nodes, \
+                    "Node {} seen multiple times".format(curr_node)
+            curr_edge = curr_node.edge
+            assert curr_edge not in edges, \
+                    "Edge of {}, {}, is also an edge of {}".format(curr_node, curr_node.edge, edges[curr_edge])
+            edges[curr_edge] = curr_node
+            nodes[curr_node] = curr_edge
+            assert curr_edge.head_node is curr_node, \
+                    "Head node of edge of {}, {}, is {}, not {}".format(curr_node, curr_edge, curr_edge.head_node, curr_node)
+            assert curr_edge.tail_node is curr_node._parent_node, \
+                    "Tail node of edge of {}, {}, is {}, but parent node is {}".format(curr_node, curr_edge, curr_edge.tail_node, curr_node._parent_node)
+            if check_bipartitions:
+                cm = 0
+                assert (curr_edge.bipartition._leafset_bitmask | taxa_mask) == taxa_mask, \
+                        "Bipartition mask error: {} | {} == {} (expecting: {})".format(
+                                curr_edge.bipartition.leafset_as_bitstring(),
+                                self.seed_node.edge.bipartition.leafset_as_bitstring(),
+                                self.seed_node.edge.bipartition.bitmask_as_bitstring(curr_edge.bipartition._leafset_bitmask | taxa_mask),
+                                self.seed_node.edge.bipartition.leafset_as_bitstring(), )
+            c = curr_node._child_nodes
+            if c:
+                for child in c:
+                    assert child._parent_node is curr_node, \
+                            "Child of {}, {}, has {} as parent".format(curr_node, child, child._parent_node)
+                    if check_bipartitions:
+                        cm |= child.edge.bipartition._leafset_bitmask
+            elif check_bipartitions:
+                assert curr_node.taxon is not None, \
+                        "Cannot check bipartitions: {} is a leaf node, but its 'taxon' attribute is 'None'".format(curr_node)
+                cm = taxon_namespace.taxon_bitmask(curr_node.taxon)
+            if check_bipartitions:
+                assert (cm & taxa_mask) == curr_edge.bipartition._leafset_bitmask, \
+                        "Bipartition leafset bitmask error: {} (taxa: {}, leafset: {})".format(
+                                curr_edge.bipartition.bitmask_as_bitstring(cm),
+                                curr_edge.bipartition.bitmask_as_bitstring(taxa_mask),
+                                curr_edge.bipartition.leafset_as_bitstring())
+                if unique_bipartition_edge_mapping:
+                    assert self.bipartition_edge_map[curr_edge.bipartition] is curr_edge, \
+                            "Expecting edge {} for bipartition {}, but instead found {}".format(curr_edge, curr_edge.bipartition, self.bipartition_edge_map[curr_edge.bipartition])
+            curr_node, level = _preorder_list_manip(curr_node, siblings, ancestors)
+        if check_bipartitions:
+            for b in self.bipartition_encoding:
+                e = self.bipartition_edge_map[b]
+                assert e in edges, "{}: {} => {}".format(e, e.tail_node, e.head_node)
+                if unique_bipartition_edge_mapping:
+                    assert b is e.bipartition
+        return True
+
+    def _as_newick_string(self, **kwargs):
+        """
+        This returns the Node as a NEWICK statement according to the given
+        formatting rules. This should be used for debugging purposes only.
+        For production purposes, use the the full-fledged 'as_string()'
+        method of the object.
+        """
+        return self.seed_node._as_newick_string(**kwargs)
+
+    def _print_newick(self, **kwargs):
+        """
+        Convenience method to newick string representation of this tree
+        to the standard output stream.
+        """
+        import sys
+        sys.stdout.write(self._as_newick_string(**kwargs))
+        sys.stdout.write("\n")
+
+    def _write_newick(self, out, **kwargs):
+        """
+        This returns the Node as a NEWICK statement according to the given
+        formatting rules. This should be used for debugging purposes only.  For
+        production purposes, use the the full-fledged 'write_to_stream()'
+        method of the object.
+        """
+        self.seed_node._write_newick(out, **kwargs)
+
+    def _plot_bipartitions_on_tree(self,
+            show_splits=True,
+            show_leafsets=True,
+            show_taxon_labels=False,
+            is_bipartitions_updated=False,
+            width=120):
+        if not is_bipartitions_updated:
+            self.encode_bipartitions()
+        def _print_node(nd):
+            d = []
+            if show_splits:
+                d.append(nd.bipartition.split_as_bitstring())
+            if show_leafsets:
+                d.append(nd.bipartition.leafset_as_bitstring())
+            s = "/".join(d)
+            if show_taxon_labels and nd.taxon is not None:
+                s = s + " ({})".format(nd.taxon.label)
+            return s
+        return self.as_ascii_plot(
+                show_internal_node_labels=True,
+                node_label_compose_fn=_print_node,
+                width=width,
+                )
+
+###############################################################################
+### AsciiTreePlot
+
+class AsciiTreePlot(object):
+
+    class NullEdgeLengthError(ValueError):
+        def __init__(self, *args, **kwargs):
+            ValueError.__init__(self, *args, **kwargs)
+
+    def __init__(self, **kwargs):
+        """
+
+        Keyword Arguments
+        -----------------
+
+        plot_metric : str
+            A string which specifies how branches should be scaled, one of:
+            'age' (distance from tips), 'depth' (distance from root),
+            'level' (number of branches from root) or 'length' (edge
+            length/weights).
+        show_internal_node_labels : bool
+            Whether or not to write out internal node labels.
+        leaf_spacing_factor : int
+            Positive integer: number of rows between each leaf.
+        width : int
+            Force a particular display width, in terms of number of columns.
+        node_label_compose_fn : function object
+            A function that takes a Node object as an argument and returns
+            the string to be used to display it.
+
+        """
+        self.plot_metric = kwargs.pop('plot_metric', 'depth')
+        self.show_internal_node_labels = kwargs.pop('show_internal_node_labels', False)
+        self.show_external_node_labels = kwargs.pop('show_internal_node_labels', True)
+        self.leaf_spacing_factor = kwargs.pop('leaf_spacing_factor', 2)
+#        self.null_edge_length = kwargs.pop('null_edge_length', 0)
+        self.width = kwargs.pop('width', None)
+        self.display_width = kwargs.pop('display_width', self.width) # legacy
+        self.compose_node = kwargs.pop("node_label_compose_fn", None)
+        if self.compose_node is None:
+            self.compose_node = self.default_compose_node
+        if kwargs:
+            raise TypeError("Unrecognized or unsupported arguments: {}".format(kwargs))
+
+    def default_compose_node(self, node):
+        if node.taxon is not None and node.taxon.label is not None:
+            return node.taxon.label
+        elif node.label is not None:
+            return node.label
+        else:
+            return "@"
+
+    def reset(self):
+        self.grid = []
+        self.node_row = {}
+        self.node_col = {}
+        self.node_offset = {}
+        self.current_leaf_row = 0
+        self.node_label_map = {}
+
+    def _calc_node_offsets(self, tree):
+        if self.plot_metric == 'age' or self.plot_metric == 'depth':
+
+            for nd in tree.postorder_node_iter():
+                cnds = nd.child_nodes()
+                if self.plot_metric == 'depth': # 'number of branchings from tip'
+                    if len(cnds) == 0:
+                        curr_node_offset = 0.0
+                    else:
+                        depths = [self.node_offset[v] for v in cnds]
+                        curr_node_offset = max(depths) + 1
+                elif self.plot_metric == 'age': # 'sum of edge weights from tip'
+                    # note: no enforcement of ultrametricity!
+                    if len(cnds) == 0:
+                        curr_node_offset = 0.0
+                    else:
+                        if cnds[0].edge.length is not None:
+                            curr_node_offset = self.node_offset[cnds[0]] + cnds[0].edge.length
+                else:
+                    raise ValueError("Unrecognized plot metric '%s' (must be one of: 'age', 'depth', 'level', or 'length')" % self.plot_metric)
+                self.node_offset[nd] = curr_node_offset
+            flipped_origin = max(self.node_offset.values())
+            for nd in self.node_offset:
+                self.node_offset[nd] = flipped_origin - self.node_offset[nd]
+        else:
+            for nd in tree.preorder_node_iter():
+                if self.plot_metric == 'level': # 'number of branchings from root'
+                    curr_edge_len = 1
+                elif self.plot_metric == 'length': # 'sum of edge weights from root'
+                    if nd.edge.length is not None:
+                        curr_edge_len = nd.edge.length
+                    else:
+                        curr_edge_len = 0
+                else:
+                    raise ValueError("Unrecognized plot metric '%s' (must be one of: 'age', 'depth', 'level', or 'length')" % self.plot_metric)
+                if nd._parent_node is None:
+                    self.node_offset[nd] = curr_edge_len
+                else:
+                    self.node_offset[nd] =  curr_edge_len + self.node_offset[nd._parent_node]
+#        print "\n".join([str(k) for k in self.node_offset.values()])
+
+    def draw(self, tree, dest):
+        dest.write(self.compose(tree))
+
+    def get_label_for_node(self, node):
+        try:
+            return self.node_label_map[node]
+        except KeyError:
+            if node._child_nodes and self.show_internal_node_labels:
+                label = self.compose_node(node)
+            elif not node._child_nodes and self.show_external_node_labels:
+                label = self.compose_node(node)
+            else:
+                label = ""
+            self.node_label_map[node] = label
+            return label
+
+    def compose(self, tree):
+        self.reset()
+        if self.display_width is None:
+            display_width = terminal.terminal_width() - 1
+        else:
+            display_width = self.display_width
+        max_label_len = max([len(self.get_label_for_node(i)) for i in tree.leaf_node_iter()])
+        if max_label_len <= 0:
+            max_label_len = 0
+        #effective_display_width = display_width - max_label_len - len(tree.internal_nodes) - 1
+        effective_display_width = display_width - max_label_len - 1
+        self._calc_node_offsets(tree)
+        widths = [self.node_offset[i] for i in tree.leaf_node_iter() if self.node_offset[i] is not None]
+        max_width = float(max(widths))
+        if max_width == 0:
+            raise AsciiTreePlot.NullEdgeLengthError("Tree cannot be plotted under metric '%s' due to zero or null edge lengths: '%s'" % (self.plot_metric, tree.as_newick_string()))
+        edge_scale_factor = float(effective_display_width) / max_width
+        self.calc_plot(tree.seed_node,
+                       edge_scale_factor=edge_scale_factor)
+        for i in range(len(tree.leaf_nodes())*self.leaf_spacing_factor + 1):
+            self.grid.append([' ' for i in range(0, display_width)])
+        self.draw_node(tree.seed_node)
+        display = '\n'.join([''.join(i) for i in self.grid])
+        return display
+
+    def calc_plot(self, node, edge_scale_factor):
+        """
+        First pass through tree, post-order traversal to calculate
+        coordinates of each node.
+        """
+        child_nodes = node.child_nodes()
+        if child_nodes:
+            for n in child_nodes:
+                self.calc_plot(n, edge_scale_factor)
+            ys = [self.node_row[n] for n in child_nodes]
+            self.node_row[node] = int(float((max(ys)-min(ys)) / 2) + min(ys))
+        else:
+            self.node_row[node] = self.current_leaf_row
+            self.current_leaf_row = self.current_leaf_row + self.leaf_spacing_factor
+        if node.edge.length is None:
+            self.node_col[node] = 1
+        else:
+            self.node_col[node] = int(float(self.node_offset[node]) * edge_scale_factor)
+        self.node_col[node] = int(float(self.node_offset[node]) * edge_scale_factor)
+
+    def draw_label(self, label, row, start_col):
+        if label:
+            for i in range(len(label)):
+                if start_col + i < len(self.grid[row]):
+                    self.grid[row][start_col+i] = label[i]
+
+    def draw_node(self, node):
+        """
+        Second pass through tree, plotting nodes onto given self.grid.
+        """
+        child_nodes = node.child_nodes()
+        if child_nodes:
+            for i, child_node in enumerate(child_nodes):
+                start_row = min([self.node_row[node], self.node_row[child_node]])
+                end_row = max([self.node_row[node], self.node_row[child_node]])
+                if i == 0:
+                    self.grid[self.node_row[child_node]][self.node_col[node]] = '/'
+                    start_row = start_row+1
+                    edge_row = self.node_row[child_node]
+                elif i == len(child_nodes)-1:
+                    self.grid[self.node_row[child_node]][self.node_col[node]] = '\\'
+                    edge_row = self.node_row[child_node]
+                else:
+                    self.grid[self.node_row[child_node]][self.node_col[node]] = '+'
+                    edge_row = self.node_row[child_node]
+                self.draw_node(child_node)
+                for x in range(self.node_col[node]+1, self.node_col[child_node]):
+                    self.grid[edge_row][x] = '-'
+                for y in range(start_row, end_row):
+                    self.grid[y][self.node_col[node]] = '|'
+            label = []
+            if self.show_internal_node_labels:
+                label = self.get_label_for_node(node)
+                self.draw_internal_text(label, self.node_row[node], self.node_col[node])
+            else:
+                self.grid[self.node_row[node]][self.node_col[node]]='+'
+        else:
+            label = self.get_label_for_node(node)
+            self.draw_label(label, self.node_row[node], self.node_col[node]+1)
+
+    def draw_internal_text(self, label, r, c):
+        row = self.grid[r]
+        try:
+            for n, letter in enumerate(label):
+                row[c + n] = letter
+        except:
+            pass
+
+###############################################################################
+### Helper Functions
+
+def _preorder_list_manip(n, siblings, ancestors):
+    """
+    Helper function for recursion free preorder traversal, that does
+    not rely on attributes of the node other than child_nodes() (thus it
+    is useful for debuggging).
+
+    Returns the next node (or None) and the number of levels toward the
+    root the function "moved".
+    """
+    levels_moved = 0
+    c = n.child_nodes()
+    if c:
+        levels_moved += 1
+        ancestors.append(list(siblings))
+        del siblings[:]
+        siblings.extend(c[1:])
+        return c[0], levels_moved
+    while not siblings:
+        if ancestors:
+            levels_moved -= 1
+            del siblings[:]
+            siblings.extend(ancestors.pop())
+        else:
+            return None, levels_moved
+    return siblings.pop(0), levels_moved
+
+def _format_node(nd, **kwargs):
+    nf = kwargs.get('node_formatter', None)
+    if nf:
+        return nf(nd)
+    if nd.taxon is not None:
+        return str(nd.taxon)
+    if nd.label is not None:
+        return nd.label
+    return ""
+
+def _format_edge(e, **kwargs):
+    ef = kwargs.get('edge_formatter', None)
+    if ef:
+        return ef(e)
+    return str(e)
+
+def _format_split(split, length=None, **kwargs):
+    if length is None:
+        length = len(kwargs.get("taxon_namespace"))
+    return bitprocessing.int_as_bitstring(split, length=length)
+
+def _convert_node_to_root_polytomy(nd):
+    """If ``nd`` has two children and at least on of them is an internal node,
+    then it will be converted to an out-degree three node (with the edge length
+    added as needed).
+
+    Returns a tuple of child nodes that were detached (or() if the tree was not
+    modified). This can be useful for removing the deleted node from the split_edge_map
+    dictionary.
+    """
+    nd_children = nd.child_nodes()
+    if len(nd_children) > 2:
+        return ()
+    try:
+        left_child = nd_children[0]
+    except:
+        return ()
+    if not left_child:
+        return ()
+    if len(nd_children) == 1:
+        right_child = None
+        dest_edge_head = nd
+    else:
+        right_child = nd_children[1]
+        dest_edge_head = right_child
+    curr_add = None
+    if right_child and right_child.is_internal():
+        try:
+            left_child.edge.length += right_child.edge.length
+        except:
+            pass
+        nd.remove_child(right_child)
+        grand_kids = right_child.child_nodes()
+        for gc in grand_kids:
+            nd.add_child(gc)
+        curr_add = right_child
+    elif left_child.is_internal():
+        try:
+            dest_edge_head.edge.length += left_child.edge.length
+        except:
+            pass
+        nd.remove_child(left_child)
+        grand_kids = left_child.child_nodes()
+        for gc in grand_kids:
+            nd.add_child(gc)
+        curr_add = left_child
+    if curr_add:
+        ndl = [curr_add]
+        t = _convert_node_to_root_polytomy(nd)
+        ndl.extend(t)
+        return tuple(ndl)
+    return ()
+
diff --git a/dendropy/interop/__init__.py b/dendropy/interop/__init__.py
new file mode 100644
index 0000000..93fdf74
--- /dev/null
+++ b/dendropy/interop/__init__.py
@@ -0,0 +1,21 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Wrappers for interacting with other libraries.
+"""
diff --git a/dendropy/interop/ape.py b/dendropy/interop/ape.py
new file mode 100644
index 0000000..8c93b0d
--- /dev/null
+++ b/dendropy/interop/ape.py
@@ -0,0 +1,189 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Wrappers for interacting with the APE library for R.
+"""
+
+import tempfile
+import re
+from dendropy.utility import messaging
+_LOG = messaging.get_logger(__name__)
+import dendropy
+
+DENDROPY_APE_INTEROPERABILITY = False
+try:
+    from rpy2 import robjects
+    from rpy2.rinterface import RRuntimeError
+    _R = robjects.r
+    _R('library(ape)')
+    DENDROPY_APE_INTEROPERABILITY = True
+except ImportError:
+    _LOG.warn("rpy2 not installed: APE interoperability not available")
+except RRuntimeError:
+    _LOG.warn("APE library not installed: APE interoperability not available")
+else:
+
+    def as_ape_object(o):
+        """
+        Returns ``o`` as an ape object.
+        """
+        kwargs = {}
+        if isinstance(o, dendropy.TreeList):
+            kwargs['keep.multi'] = True
+            text = o.as_string("newick")
+            return _R['read.tree'](text=text, **kwargs)
+        elif isinstance(o, dendropy.Tree):
+            kwargs['keep.multi'] = False
+            text = o.as_string("newick")
+            return _R['read.tree'](text=text, **kwargs)
+#        if isinstance(o, dendropy.Tree) or isinstance(o, dendropy.TreeList):
+#            f = tempfile.NamedTemporaryFile()
+#            o.write_to_stream(f, "nexus", simple=True, block_titles=False)
+#            f.flush()
+#            return _R['read.nexus'](f.name)
+        elif isinstance(o, dendropy.CharacterMatrix):
+            f = tempfile.NamedTemporaryFile()
+            o.write_to_stream(f, "nexus", simple=True, block_titles=False)
+            f.flush()
+            return _R['read.nexus.data'](f.name)
+        else:
+            return robjects.default_py2ri(o)
+
+    def as_r_vector(o, val_type):
+        if isinstance(o, dict):
+            keys = o.keys()
+            vals = [o[k] for k in keys]
+            robj = as_r_vector(vals, val_type=val_type)
+            robj.setnames(keys)
+        else:
+            if val_type == int:
+                robj = robjects.IntVector(o)
+            elif val_type == float:
+                robj = robjects.FloatVector(o)
+            elif val_type == bool:
+                robj = robjects.BoolVector(o)
+            else:
+                robj = robjects.RVector(o)
+        return robj
+
+    def as_dendropy_object(o, taxon_set=None):
+        """
+        Returns a DendroPy object corresponding to the ape object ``o``. If ``o`` is
+        a single tree (i.e., ``phylo``), then a DendroPy Tree is returned. If ``o`` is
+        a list of trees (i.e., a ``multiPhylo`` object, or list of ``phylo`` objects),
+        then a DendroPy TreeList is returned.
+        """
+        if o.rclass[0] == "multiPhylo":
+            f = tempfile.NamedTemporaryFile()
+            _R['write.nexus'](o, file=f.name)
+            return dendropy.TreeList.get_from_path(f.name, "nexus", taxon_set=taxon_set)
+        elif o.rclass[0] == "phylo":
+            f = tempfile.NamedTemporaryFile()
+            _R['write.nexus'](o, file=f.name)
+            return dendropy.Tree.get_from_path(f.name, "nexus", taxon_set=taxon_set)
+        elif o.rclass[0] == "list":
+            f = tempfile.NamedTemporaryFile()
+            _R['write.nexus.data'](o, file=f.name)
+    #        print open(f.name, "r").read()
+            d = dendropy.DataSet.get_from_path(f.name, "nexus", taxon_set=taxon_set)
+            if len(d.char_matrices) == 0:
+                raise ValueError("No character data found")
+            elif len(d.char_matrices) == 1:
+                return d.char_matrices[0]
+            else:
+                raise ValueError("Multiple character matrices returned")
+        else:
+            return robjects.default_ri2py(o)
+
+    def exec_and_capture(rfunc, *args, **kwargs):
+        stdoutf = tempfile.NamedTemporaryFile()
+        stderrf = tempfile.NamedTemporaryFile()
+        _R('sink("%s")' % stdoutf.name)
+        _R('zz = file("%s", "wt")' % stderrf.name)
+        _R('sink(zz, type="message")')
+        rfunc(*args, **kwargs)
+        _R('sink(type="message")')
+        _R('sink()')
+        i = open(stdoutf.name, "rU")
+        stdout = i.read()
+        i = open(stderrf.name, "rU")
+        stderr = i.read()
+        return stdout, stderr
+
+    def bd_ext(t, num_species_node_attr='num_species'):
+        """
+        This function fits by maximum likelihood a birth-death model to
+        the combined phylogenetic and taxonomic data of a given clade. The
+        phylogenetic data are given by a tree, ``t``, and the taxonomic data by
+        an attribute ``num_species`` of each of the taxa in the tree.
+        Returns dictionary, where keys are param names and values are param
+        values.
+        """
+        taxon_num_species = []
+        for taxon in t.taxon_set:
+            taxon_num_species.append(taxon.num_species)
+    #    taxon_num_species_map = {}
+    #    for taxon in t.taxon_set:
+    #        taxon_num_species_map[taxon.label.replace(" ", "_")] = taxon.num_species
+    #    taxon_num_species_map = [10, 47, 69, 214, 161, 17,
+    #            355, 51, 56, 10, 39, 152,
+    #            6, 143, 358, 103, 319,
+    #            23, 291, 313, 196, 1027, 5712]
+        stdout, stderr = exec_and_capture(_R['bd.ext'], as_ape_object(t), as_r_vector(taxon_num_species, int))
+        patterns = {
+            'deviance' : '\s*Deviance: ([\d\-\.Ee\+]+).*',
+            'log-likelihood' : '\s*Log-likelihood: ([\d\-\.Ee\+]+)',
+            'd/b' : '\s*d / b = ([\d\-\.Ee\+]+)',
+            'd/b s.e.' : '\s*d / b = .* StdErr = ([\d\-\.Ee\+]+)',
+            'b-d' : '\s*b - d = ([\d\-\.Ee\+]+)',
+            'b-d s.e.' : '\s*b - d = .* StdErr = ([\d\-\.Ee\+]+)',
+        }
+        results = {}
+        for k, v in patterns.items():
+            m = re.findall(v, stdout)
+            if m:
+                results[k] = float(m[0])
+        return results
+
+    def birthdeath(t):
+        """
+        This function fits by maximum likelihood a birth-death model to
+        the branching times computed from a phylogenetic tree using the
+        method of Nee et al. (1994).
+        Returns dictionary, where keys are param names and values are param
+        values.
+        """
+        _R('options(warn=-99)')
+        rval = _R['birthdeath'](as_ape_object(t))
+        _R('options(warn=0)')
+        results = {}
+        names = [n for n in rval.names]
+        results['deviance'] = float(rval[names.index('dev')][0])
+        results['log-likelihood'] = results['deviance'] / -2
+        para = rval[names.index('para')]
+        para_names = [n for n in para.names]
+        results['d/b'] = float(para[para_names.index('d/b')])
+        results['b-d'] = float(para[para_names.index('b-d')])
+        se = rval[names.index('se')]
+        se_names = [n for n in se.names]
+        results['d/b s.e.'] = float(se[se_names.index('d/b')])
+        results['b-d s.e.'] = float(se[se_names.index('b-d')])
+        return results
+
+
diff --git a/dendropy/interop/biopython.py b/dendropy/interop/biopython.py
new file mode 100644
index 0000000..6756d93
--- /dev/null
+++ b/dendropy/interop/biopython.py
@@ -0,0 +1,53 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Wrappers for interacting with the Biopython library.
+"""
+
+import tempfile
+import re
+from dendropy.utility import messaging
+_LOG = messaging.get_logger(__name__)
+import dendropy
+
+DENDROPY_BIOPYTHON_INTEROPERABILITY = False
+try:
+    from Bio.Seq import Seq
+    from Bio.Alphabet import generic_dna, generic_rna, generic_nucleotide, generic_protein
+    DENDROPY_BIOPYTHON_INTEROPERABILITY = True
+except ImportError:
+    _LOG.warn("BioPython could not be imported: BioPython interoperability not available")
+else:
+
+    def as_biopython_object(o):
+        """
+        Returns ``o`` as an biopython object.
+        """
+        if isinstance(o, dendropy.CharacterMatrix):
+            if isinstance(o, dendropy.DnaCharacterMatrix):
+                bpa = generic_dna
+            elif isinstance(o, dendropy.RnaCharacterMatrix):
+                bpa = generic_rna
+            elif isinstance(o, dendropy.ProteinCharacterMatrix):
+                bpa = generic_protein
+            else:
+                raise ValueError("Character data type not supported in Biopython: '%s'" % type(o))
+        else:
+            raise ValueError("Invalid object type for conversion to Biopython: '%s'" % type(o))
+
diff --git a/dendropy/interop/entrez.py b/dendropy/interop/entrez.py
new file mode 100644
index 0000000..3c520cf
--- /dev/null
+++ b/dendropy/interop/entrez.py
@@ -0,0 +1,91 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Low-level wrappers around the NCBI E-Utilities. Primarily meant to open
+file-like object handles on responses.
+"""
+
+import sys
+if sys.version_info.major < 3:
+    from urllib import urlencode
+    from urllib import urlopen
+else:
+    from urllib.parse import urlencode
+    from urllib.request import urlopen
+
+ENTREZ_EUTILS_BASE_URL = "http://eutils.ncbi.nlm.nih.gov/entrez/eutils"
+
+def efetch(db, ids, rettype, retmode="xml", email=None):
+    """
+    Raw fetch. Returns file-like object opened for reading on string
+    returned by query.
+    """
+    if isinstance(ids, str):
+        id_list = ids
+    else:
+        id_list = ",".join([str(i) for i in set(ids)])
+    params = {'db': db,
+            'id': id_list,
+            'rettype': rettype,
+            'retmode': retmode}
+    if email is not None:
+        params["email"] = email
+    query_url = ENTREZ_EUTILS_BASE_URL + "/efetch.fcgi?" + urlencode(params)
+    response = urlopen(query_url)
+    return response
+
+def get_taxonomy(**kwargs):
+    params = dict(kwargs)
+    params["db"] = "taxonomy"
+    # if "rettype" not in params:
+    #     params["rettype"] = "gbc"
+    if "retmode" not in params:
+        params["retmode"] = "xml"
+    if "email" in kwargs and kwargs["email"] is None:
+        del params["email"]
+    query_url = "http://www.ncbi.nlm.nih.gov/sites/entrez?" + urlencode(params)
+    response = urlopen(query_url)
+    return response
+
+# # def efetch(db, ids, rettype, retmode="xml", email=None):
+# def efetch(db, **kwargs):
+#     """
+#     Raw fetch. Returns file-like object opened for reading on string
+#     returned by query.
+#     """
+#     params = dict(kwargs)
+#     params["db"] = db
+#     if "email" in kwargs and kwargs["email"] is None:
+#         del params["email"]
+#     if "id" in params:
+#         params["ids"] = params["id"]
+#         del params["id"]
+#     if "rettype" not in params:
+#         params["rettype"] = "gbc"
+#     if "retmode" not in params:
+#         params["retmode"] = "xml"
+#     if "ids" in params:
+#         ids = params["ids"]
+#         if isinstance(ids, str):
+#             id_list = ids
+#         else:
+#             id_list = ",".join([str(i) for i in list(ids)])
+#         del params["ids"]
+#         params["id"] = id_list
+#     return entrez_get(**params)
diff --git a/dendropy/interop/ete.py b/dendropy/interop/ete.py
new file mode 100644
index 0000000..42a842b
--- /dev/null
+++ b/dendropy/interop/ete.py
@@ -0,0 +1,68 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Wrappers for interacting with the ETE library.
+"""
+
+import tempfile
+import re
+from dendropy.utility import messaging
+_LOG = messaging.get_logger(__name__)
+import dendropy
+
+DENDROPY_ETE_INTEROPERABILITY = False
+try:
+    import ete2
+    DENDROPY_ETE_INTEROPERABILITY = True
+except ImportError:
+    _LOG.warn("ete2 not installed: ETE interoperability not available")
+else:
+
+    def as_ete_object(o):
+        if isinstance(o, ete2.Tree):
+            return o
+        elif isinstance(o, dendropy.Tree) or isinstance(o, dendropy.Node):
+            s = o.as_newick_string() + ";"
+#            _LOG.debug(s)
+            return ete2.Tree(s)
+        elif isinstance(o, list) or isinstance(o, dendropy.TreeList):
+            return [as_ete_object(t) for t in o]
+        else:
+            raise ValueError("Object of type '%s' does not have a native ete2 representation" % type(o))
+
+    def as_dendropy_object(o, taxon_set=None):
+        if isinstance(o, dendropy.Tree) or isinstance(o, dendropy.Node) or isinstance(o, dendropy.TreeList):
+            return o
+        elif isinstance(o, ete2.Tree):
+            s = o.write()
+#            _LOG.debug(s)
+            return dendropy.Tree.get_from_string(s, 'newick', taxon_set=taxon_set)
+        elif isinstance(o, list) or isinstance(o, dendropy.TreeList):
+            return dendropy.TreeList([as_dendropy_object(t, taxon_set=taxon_set) for t in o], taxon_set=taxon_set)
+        else:
+            raise ValueError("Object of type '%s' does not have a DendroPy representation" % type(o))
+
+    def show(o):
+        if not isinstance(o, dendropy.Tree) \
+                and not isinstance(o, dendropy.Node)\
+                and not isinstance(o, ete2.Tree):
+            raise ValueError("Object of type '%s' cannot be rendered by ETE2" % type(o))
+        ete_o = as_ete_object(o)
+        ete_o.show()
+
diff --git a/dendropy/interop/gbif.py b/dendropy/interop/gbif.py
new file mode 100644
index 0000000..dc88852
--- /dev/null
+++ b/dendropy/interop/gbif.py
@@ -0,0 +1,394 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Wrappers for interacting with GBIF.
+"""
+
+import sys
+if sys.version_info.major < 3:
+    from urllib import urlencode
+    from urllib import urlopen
+else:
+    from urllib.parse import urlencode
+    from urllib.request import urlopen
+from dendropy.datamodel import basemodel
+from dendropy.dataio import xmlprocessing
+
+class GbifXmlElement(xmlprocessing.XmlElement):
+
+    GBIF_NAMESPACE = "http://portal.gbif.org/ws/response/gbif"
+    TAXON_OCCURRENCE_NAMESPACE = "http://rs.tdwg.org/ontology/voc/TaxonOccurrence#"
+    TAXON_CONCEPT_NAMESPACE = "http://rs.tdwg.org/ontology/voc/TaxonConcept#"
+    TAXON_NAME_NAMESPACE = "http://rs.tdwg.org/ontology/voc/TaxonName#"
+    RDF_NAMESPACE = "http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+
+    def __init__(self, element, default_namespace=None):
+        if default_namespace is None:
+            default_namespace = GbifXmlElement.GBIF_NAMESPACE
+        xmlprocessing.XmlElement.__init__(self,
+                element=element,
+                default_namespace=default_namespace)
+
+    def get_about_attr(self):
+        return self._element.get("{%s}about" % self.RDF_NAMESPACE)
+
+    # /gbifResponse/dataProviders/dataProvider/dataResources/dataResource/occurrenceRecords/occurrenceRecord
+    def iter_taxon_occurrence(self):
+        return self.namespaced_getiterator("TaxonOccurrence",
+                namespace=self.TAXON_OCCURRENCE_NAMESPACE)
+
+    def _process_ret_val(self, element, text_only=False):
+        if text_only:
+            if element:
+                return element.text
+            else:
+                return None
+        else:
+            return element
+
+    def find_institution_code(self, text_only=False):
+        e = self.namespaced_find("institutionCode", namespace=self.TAXON_OCCURRENCE_NAMESPACE)
+        return self._process_ret_val(e, text_only)
+
+    def find_collection_code(self, text_only=False):
+        e = self.namespaced_find("collectionCode", namespace=self.TAXON_OCCURRENCE_NAMESPACE)
+        return self._process_ret_val(e, text_only)
+
+    def find_catalog_number(self, text_only=False):
+        e = self.namespaced_find("catalogNumber", namespace=self.TAXON_OCCURRENCE_NAMESPACE)
+        return self._process_ret_val(e, text_only)
+
+    def find_longitude(self, text_only=False):
+        e = self.namespaced_find("decimalLongitude", namespace=self.TAXON_OCCURRENCE_NAMESPACE)
+        return self._process_ret_val(e, text_only)
+
+    def find_latitude(self, text_only=False):
+        e = self.namespaced_find("decimalLatitude", namespace=self.TAXON_OCCURRENCE_NAMESPACE)
+        return self._process_ret_val(e, text_only)
+
+    def find_taxon_name(self, text_only=False):
+        # path = "{%(ns)s}identifiedTo/{%(ns)s}Identification/{%(ns)s}taxon_name" % {"ns": self.TAXON_OCCURRENCE_NAMESPACE}
+        path = ["identifiedTo", "Identification", "taxonName"]
+        e = self.namespaced_find(path, namespace=self.TAXON_OCCURRENCE_NAMESPACE)
+        return self._process_ret_val(e, text_only)
+
+class GbifDataProvenance(object):
+
+    def __init__(self, xml=None):
+        self.name = None
+        self.gbif_key = None
+        self.uri = None
+        self.rights = None
+        self.citation = None
+        if xml:
+            self.parse_xml(xml)
+
+    def parse_xml(self, xml):
+        self.gbif_key = xml.get("gbifKey")
+        self.uri = xml.get_about_attr()
+        self.name = xml.namespaced_find("name").text
+        self.rights = xml.namespaced_find("rights").text
+        self.citation = xml.namespaced_find("citation").text
+
+class GbifOccurrenceRecord(object):
+
+    def parse_from_stream(stream):
+        xml_doc = xmlprocessing.XmlDocument(file_obj=stream,
+                subelement_factory=GbifXmlElement)
+        gb_recs = []
+        for txo in xml_doc.root.iter_taxon_occurrence():
+            gbo = GbifOccurrenceRecord()
+            gbo.parse_taxon_occurrence_xml(txo)
+            gb_recs.append(gbo)
+        return gb_recs
+    parse_from_stream = staticmethod(parse_from_stream)
+
+    def __init__(self):
+        self.gbif_key = None
+        self.uri = None
+        self.institution_code = None
+        self.collection_code = None
+        self.catalog_number = None
+        self.taxon_name = None
+        self.data_provider = None
+        self.data_resource = None
+        self._longitude = None
+        self._latitude = None
+
+    def subelement_factory(self, element):
+        return GbifXmlElement(element)
+
+    def parse_taxon_occurrence_xml(self, txo):
+        self.gbif_key = txo.get("gbifKey")
+        self.uri = txo.get_about_attr()
+        self.institution_code = txo.find_institution_code(text_only=True)
+        self.collection_code = txo.find_collection_code(text_only=True)
+        self.catalog_number = txo.find_catalog_number(text_only=True)
+        self.longitude = txo.find_longitude(text_only=True)
+        self.latitude = txo.find_latitude(text_only=True)
+        self.taxon_name = txo.find_taxon_name(text_only=True)
+
+    def _get_longitude(self):
+        return self._longitude
+    def _set_longitude(self, value):
+        if value is not None:
+            try:
+                self._longitude = float(value)
+            except ValueError:
+                self._longitude = value
+    longitude = property(_get_longitude, _set_longitude)
+
+    def _get_latitude(self):
+        return self._latitude
+    def _set_latitude(self, value):
+        if value is not None:
+            try:
+                self._latitude = float(value)
+            except ValueError:
+                self._latitude = value
+    latitude = property(_get_latitude, _set_latitude)
+
+    def __str__(self):
+        return "%s (%s) %s: %s [%s %s]" % (
+                self.institution_code,
+                self.collection_code,
+                self.catalog_number,
+                self.taxon_name,
+                self.longitude,
+                self.latitude)
+
+    def _get_coordinates_as_string(self, sep=" "):
+        return "%s%s%s" % (self.longitude, sep, self.latitude)
+    coordinates_as_string = property(_get_coordinates_as_string)
+
+    # def as_coordinate_annotation(self,
+    #         name=None,
+    #         name_prefix=None,
+    #         namespace=None,
+    #         name_is_prefixed=False,
+    #         include_gbif_reference=True,
+    #         include_metadata=True,
+    #         dynamic=False):
+    #     if name is None:
+    #         name = "coordinates"
+    #     if name_prefix is None or namespace is None:
+    #         # name_prefix = "kml"
+    #         # namespace = "http://earth.google.com/kml/2.2"
+    #         # name_prefix = "ogckml"
+    #         # namespace = "http://www.opengis.net/kml/2.2"
+    #         name_prefix = "gml"
+    #         namespace = "http://www.opengis.net/gml"
+    #     if dynamic:
+    #         is_attribute = True
+    #         value = (self, "coordinates_as_string")
+    #     else:
+    #         is_attribute = False
+    #         value = self.coordinates_as_string
+    #     annote = basemodel.Annotation(
+    #             name="pos",
+    #             value=value,
+    #             name_prefix=name_prefix,
+    #             namespace=namespace,
+    #             name_is_prefixed=name_is_prefixed,
+    #             is_attribute=is_attribute,
+    #             annotate_as_reference=False,
+    #             )
+    #     if include_gbif_reference:
+    #         if dynamic:
+    #             value = (self, "uri")
+    #         else:
+    #             value = self.uri
+    #         subannote = basemodel.Annotation(
+    #                 name="source",
+    #                 value=value,
+    #                 # name_prefix="dc",
+    #                 # namespace="http://purl.org/dc/elements/1.1/",
+    #                 name_prefix="dcterms",
+    #                 namespace="http://purl.org/dc/terms/",
+    #                 name_is_prefixed=False,
+    #                 is_attribute=is_attribute,
+    #                 annotate_as_reference=True,
+    #                 )
+    #         annote.annotations.add(subannote)
+    #     if include_metadata:
+    #         for attr in [
+    #             ("institution_code", "institutionCode"),
+    #             ("collection_code", "collectionCode"),
+    #             ("catalog_number", "catalogNumber"),
+    #             ("taxon_name", "scientificName"),
+    #             ]:
+    #             if dynamic:
+    #                 value = (self, attr[0])
+    #             else:
+    #                 value = getattr(self, attr[0])
+    #             subannote = basemodel.Annotation(
+    #                     name=attr[1],
+    #                     value=value,
+    #                     name_prefix="dwc",
+    #                     namespace="http://rs.tdwg.org/dwc/terms/",
+    #                     name_is_prefixed=False,
+    #                     is_attribute=is_attribute,
+    #                     annotate_as_reference=False,
+    #                     )
+    #             annote.annotations.add(subannote)
+    #     return annote
+
+    def as_annotation(self,
+            name="TaxonOccurrence",
+            name_prefix="to",
+            namespace="http://rs.tdwg.org/ontology/voc/TaxonOccurrence#",
+            include_gbif_reference=True,
+            dynamic=False):
+        """
+        Sample output (NeXML)::
+
+            <meta xsi:type="nex:ResourceMeta" rel="to:TaxonOccurrence" id="d4324014736" >
+                <meta xsi:type="nex:ResourceMeta" rel="dcterms:source" href="http://data.gbif.org/ws/rest/occurrence/get/44726287" id="d4324014800" />
+                <meta xsi:type="nex:LiteralMeta" property="to:decimalLongitude" content="-116.02004" datatype="xsd:float" id="d4324014928" />
+                <meta xsi:type="nex:LiteralMeta" property="to:decimalLatitude" content="34.67338" datatype="xsd:float" id="d4324014992" />
+                <meta xsi:type="nex:LiteralMeta" property="to:institutionCode" content="ROM" datatype="xsd:string" id="d4324015056" />
+                <meta xsi:type="nex:LiteralMeta" property="to:collectionCode" content="Herps" datatype="xsd:string" id="d4324015120" />
+                <meta xsi:type="nex:LiteralMeta" property="to:catalogNumber" content="14584" datatype="xsd:string" id="d4324015184" />
+                <meta xsi:type="nex:LiteralMeta" property="to:scientificName" content="Crotaphytus bicinctores" datatype="xsd:string" id="d4324015248" />
+            </meta>
+
+        """
+        # name_prefix="dwc",
+        # namespace="http://rs.tdwg.org/dwc/terms/",
+        top_node = basemodel.Annotation(
+                name=name,
+                value=None,
+                name_prefix=name_prefix,
+                namespace=namespace,
+                name_is_prefixed=False,
+                is_attribute=False,
+                annotate_as_reference=True,
+                )
+        if dynamic:
+            is_attribute=True
+        else:
+            is_attribute=False
+        if include_gbif_reference:
+            if dynamic:
+                value = (self, "uri")
+            else:
+                value = self.uri
+            subannote = basemodel.Annotation(
+                    name="source",
+                    value=value,
+                    # name_prefix="dc",
+                    # namespace="http://purl.org/dc/elements/1.1/",
+                    name_prefix="dcterms",
+                    namespace="http://purl.org/dc/terms/",
+                    name_is_prefixed=False,
+                    is_attribute=is_attribute,
+                    annotate_as_reference=True,
+                    )
+            top_node.annotations.add(subannote)
+        for attr in [
+            ("longitude", "decimalLongitude", "xsd:float"),
+            ("latitude", "decimalLatitude", "xsd:float"),
+            ("institution_code", "institutionCode", "xsd:string"),
+            ("collection_code", "collectionCode", "xsd:string"),
+            ("catalog_number", "catalogNumber", "xsd:string"),
+            ("taxon_name", "scientificName", "xsd:string"),
+            ]:
+            if dynamic:
+                value = (self, attr[0])
+                is_attribute=True
+            else:
+                value = getattr(self, attr[0])
+                is_attribute=False
+            subannote = basemodel.Annotation(
+                    name=attr[1],
+                    value=value,
+                    datatype_hint=attr[2],
+                    name_prefix=name_prefix,
+                    namespace=namespace,
+                    name_is_prefixed=False,
+                    is_attribute=is_attribute,
+                    annotate_as_reference=False,
+                    )
+            top_node.annotations.add(subannote)
+        return top_node
+
+class GbifDb(object):
+
+    def __init__(self):
+        self.base_url = None
+
+    def compose_query_url(self, action, query_dict):
+        parts = []
+        for k, v in query_dict.items():
+            parts.append("%s=%s" % (k, v))
+        query_url = self.base_url + action + "?" + "&".join(parts)
+        return query_url
+
+class GbifOccurrenceDb(GbifDb):
+
+    def __init__(self):
+        GbifDb.__init__(self)
+        self.base_url = "http://data.gbif.org/ws/rest/occurrence/"
+
+    def fetch_keys(self, **kwargs):
+        url = self.compose_query_url(action="list",
+                query_dict=kwargs)
+        response = urlopen(url)
+        return self.parse_list_keys(response)
+
+    def fetch_occurrences(self, **kwargs):
+        keys = self.fetch_keys(**kwargs)
+        occurrences = []
+        for key in keys:
+            url = self.compose_query_url(action="get",
+                    query_dict={"key": key})
+            response = urlopen(url)
+            occurrences.extend(self.parse_occurrence_records(response))
+        return occurrences
+        # url = self.compose_query_url(action="list",
+        #         query_dict=kwargs)
+        # response = urlopen(url)
+        # return self.parse_occurrence_records(response)
+
+    def parse_list_keys(self, stream):
+        keys = []
+        xml_doc = xmlprocessing.XmlDocument(file_obj=stream,
+                subelement_factory=GbifXmlElement)
+        xml_root = xml_doc.root
+        for txml in xml_root.iter_taxon_occurrence():
+            keys.append(txml.get("gbifKey"))
+        return keys
+
+    def parse_occurrence_records(self, stream):
+        occurrences = GbifOccurrenceRecord.parse_from_stream(stream)
+        return occurrences
+    #     xml_doc = xmlprocessing.XmlDocument(file_obj=stream,
+    #             subelement_factory=GbifXmlElement)
+    #     xml_root = xml_doc.root
+    #     gbif_recs = []
+    #     for dps in xml_root.namespaced_findall("dataProviders", namespace=self.GBIF_NAMESPACE):
+    #         for dp in dps.namespaced_findall("dataProvider", namespace=self.GBIF_NAMESPACE):
+    #             data_provider = GbifDataProvenance(dp)
+    #             for drs in dp.namespaced_findall("dataResources", namespace=self.GBIF_NAMESPACE):
+    #                 for dr in drs.namespaced_findall("dataResource", namespace=self.GBIF_NAMESPACE):
+    #                     data_resource = GbifDataProvenance(dr)
+    #                     for occurs in dr.namedspaced_findall("occurrenceRecords", namespace=self.GBIF_NAMESPACE):
+    #                         for occur in occurs.namespaced_findall("TaxonOccurrence", namespace=self.TAXON_OCCURRENCE_NAMESPACE):
+    #                             pass
+
diff --git a/dendropy/interop/genbank.py b/dendropy/interop/genbank.py
new file mode 100644
index 0000000..ca9eb7a
--- /dev/null
+++ b/dendropy/interop/genbank.py
@@ -0,0 +1,1014 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Wrappers for retrieving data from GenBank.
+"""
+
+import sys
+import re
+import urllib
+import dendropy
+from dendropy.interop import entrez
+from xml.etree import ElementTree
+from dendropy.utility import container
+from dendropy.utility import error
+
+GENBANK_ANNOTATION_PREFIX = "genbank"
+GENBANK_ANNOTATION_NAMESPACE = "http://www.ncbi.nlm.nih.gov/dtd/INSD_INSDSeq.mod.dtd"
+
+##############################################################################
+## GenBank Resources
+
+class GenBankResourceStore(object):
+    """
+    Base GenBank data acquisition and manager class.
+    """
+
+    def parse_xml(**kwargs):
+        if "stream" in kwargs and "string" in kwargs:
+            raise TypeError("Cannot specify both 'stream' and 'string'")
+        elif "stream" in kwargs:
+            tree = ElementTree.parse(kwargs["stream"])
+            root = tree.getroot()
+        elif "string" in kwargs:
+            # from StringIO import StringIO
+            # s = StringIO(kwargs["string"])
+            # try:
+            #     root = ElementTree.parse(s)
+            # except:
+            #     print kwargs["string"]
+            #     raise
+            s = kwargs["string"]
+            try:
+                root = ElementTree.fromstring(s)
+            except:
+                sys.stderr.write(s)
+                raise
+        else:
+            raise TypeError("Must specify exactly one of 'stream' or 'string'")
+        gb_recs = []
+        for seq_set in root.iter("INSDSet"):
+            for seq in seq_set.iter("INSDSeq"):
+                gb_rec = GenBankAccessionRecord(xml=seq)
+                gb_recs.append(gb_rec)
+        return gb_recs
+    parse_xml = staticmethod(parse_xml)
+
+    def fetch_xml(db, ids, prefix=None, email=None, as_stream=False):
+        stream = entrez.efetch(db=db,
+                ids=ids,
+                rettype='gbc',
+                retmode='xml',
+                email=email)
+        if as_stream:
+            return stream
+        else:
+            return stream.read()
+    fetch_xml = staticmethod(fetch_xml)
+
+    def prepare_ids(ids, prefix=None):
+        if prefix is None:
+            prefix = ""
+        # for idx, i in enumerate(ids):
+        #     ids[idx] = "%s%s" % (prefix, i)
+        ids = ["%s%s" % (prefix, i) for i in ids]
+        return ids
+    prepare_ids = staticmethod(prepare_ids)
+
+    class AccessionFetchError(Exception):
+        def __init__(self, missing, response=None):
+            if response is None:
+                response = ""
+            else:
+                response = "\n\nServer response was:\n\n%s" % response
+            missing_desc = ", ".join([str(s) for s in missing])
+            Exception.__init__(self, "\n\nFailed to retrieve accessions: %s%s" % (missing_desc, response))
+
+    def __init__(self,
+            db,
+            ids=None,
+            id_range=None,
+            prefix=None,
+            verify=True,
+            email=None):
+        """
+        Initializes a GenBank data resource manager class, and optionally
+        populates it with data from GenBank.
+
+            - ``db``: database (e.g. "nucleotide" or "protein")
+            - ``ids``: sequence of GenBank accession identifiers or GI numbers.
+            - ``id_range``: tuple of integers indicating first and last (inclusive) range of identifiers to download.
+            - ``prefix``: string to be prepended to values in - ``ids``: or generated from - ``id_range``:
+            - ``verify``: check for one-to-one correspondence between requested id's and downloaded records
+
+        """
+        self.db = db
+        self.email = email
+        self._recs = []
+        self._accession_recs = {}
+        self._version_recs = {}
+        self._gi_recs = {}
+        if ids is not None:
+            self.acquire(ids=ids,
+                    prefix=prefix,
+                    verify=verify)
+        if id_range is not None:
+            self.acquire_range(
+                    first=id_range[0],
+                    last=id_range[1],
+                    prefix=prefix,
+                    verify=verify)
+
+    def __len__(self):
+        return len(self._recs)
+
+    def __getitem__(self, key):
+        return self._recs[key]
+
+    def __setitem__(self, key, value):
+        raise TypeError("%s elements cannot be reassigned" % self.__class__)
+
+    def __delitem__(self, key):
+        raise TypeError("%s elements cannot be deleted" % self.__class__)
+
+    def __iter__(self):
+        return iter(self._recs)
+
+    def __reversed__(self):
+        return reversed(self._recs)
+
+    def __contains__(self, item):
+        return item in self._recs
+
+    def add(self, rec):
+        if rec.primary_accession in self._accession_recs:
+            return None
+        if rec.accession_version in self._version_recs:
+            return None
+        if rec.gi in self._gi_recs:
+            return None
+        if rec in self._recs:
+            return None
+        if rec.db is None:
+            rec.db = self.db
+        self._recs.append(rec)
+        self._accession_recs[rec.primary_accession] = rec
+        self._version_recs[rec.accession_version] = rec
+        self._gi_recs[rec.gi] = rec
+
+    def update(self, recs):
+        for rec in recs:
+            self.add(rec)
+
+    def read_xml_string(self, xml_string):
+        gb_recs = GenBankResourceStore.parse_xml(string=xml_string)
+        self.update(gb_recs)
+
+    def read_xml_stream(self, xml_stream):
+        gb_recs = GenBankResourceStore.parse_xml(stream=xml_stream)
+        self.update(gb_recs)
+
+    def acquire(self, ids, prefix=None, verify=True):
+        """
+        Adds more data from GenBank.
+
+            - ``ids``: sequence of GenBank accession identifiers or GI numbers.
+            - ``prefix``: string to be prepended to values in - ``ids`` or generated from - ``id_range``.
+            - ``verify``: check for one-to-one correspondence between requested id's and downloaded records
+
+        """
+        ids = GenBankResourceStore.prepare_ids(ids=ids, prefix=prefix)
+        xml_string = GenBankResourceStore.fetch_xml(db=self.db,
+                ids=ids,
+                prefix=prefix,
+                email=self.email,
+                as_stream=False)
+        gb_recs = GenBankResourceStore.parse_xml(string=xml_string)
+        accession_recs = {}
+        accession_version_recs = {}
+        gi_recs = {}
+        for gb_rec in gb_recs:
+            accession_recs[gb_rec.primary_accession] = gb_rec
+            accession_version_recs[gb_rec.accession_version] = gb_rec
+            gi_recs[gb_rec.gi] = gb_rec
+        added = []
+        missing = []
+        for idx, gbid in enumerate(ids):
+            sgbid = str(gbid)
+            gb_rec = None
+            if sgbid in accession_version_recs:
+                gb_rec = accession_version_recs[sgbid]
+            elif sgbid in accession_recs:
+                gb_rec = accession_recs[sgbid]
+            elif sgbid in gi_recs:
+                gb_rec = gi_recs[sgbid]
+            elif verify:
+                missing.append(sgbid)
+            if gb_rec is not None:
+                gb_rec.db = self.db
+                gb_rec.request_key = sgbid
+                added.append(gb_rec)
+        if len(added) == 0 and missing:
+            raise GenBankResourceStore.AccessionFetchError(missing=missing, response=xml_string)
+        elif missing:
+            raise GenBankResourceStore.AccessionFetchError(missing=missing, response=None)
+        self.update(added)
+        return added
+
+    def acquire_range(self,
+            first,
+            last,
+            prefix=None,
+            verify=True):
+        """
+        Adds more data from GenBank.
+
+            - ``first``: integer specifying the start (inclusive) of the range of id's to download.
+            - ``last``: integer specifying the start (inclusive) of the range of id's to download.
+            - ``prefix``: string to be prepended to values given range.
+            - ``verify``: check for one-to-one correspondence between requested id's and downloaded records
+
+        """
+        ids = range(first, last+1)
+        return self.acquire(
+                ids=ids,
+                prefix=prefix,
+                verify=verify)
+
+    def generate_char_matrix(self,
+            label_components=None,
+            label_component_separator=" ",
+            taxon_namespace=None,
+            gb_to_taxon_fn=None,
+            add_full_annotation_to_taxa=False,
+            add_ref_annotation_to_taxa=False,
+            add_full_annotation_to_seqs=False,
+            add_ref_annotation_to_seqs=False,
+            set_taxon_attr=None,
+            set_seq_attr=None,
+            matrix_label=None):
+        """
+        Generates a CharacterMatrix of current sequences.
+
+            - ``label_components``: list of strings giving names of GenBankAccessionRecord attributes to be used to compose label.
+            - ``label_component_separator``: a string used to separate label components.
+            - ``taxon_namespace``: TaxonNamespace object to be used as the ``taxon_namespace`` of the resulting CharacterMatrix.
+            - ``gb_to_taxon_fn``: Function to be used to assign a Taxon object to sequence. Should take a GenBankAccessionRecord object as an argument and return a Taxon object.
+            - ``add_full_annotation_to_taxa``: If True, add link to record as metadata annotation to Taxon objects.
+            - ``add_ref_annotation_to_taxa``: If True, add full GenBank record as metadata annotations to Taxon objects.
+            - ``add_full_annotation_to_seqs``: If True, add link to record as metadata annotation to sequence (CharacterDataSequence) objects.
+            - ``add_ref_annotation_to_seqs``: If True, add full GenBank record as metadata annotations to sequence (CharacterDataSequence) objects.
+            - ``set_taxon_attr``: Name of attribute (string) to create on Taxon objects pointing to GenBank record object (GenBankAccessionRecord).
+            - ``set_seq_attr``: Name of attribute (string) to create on sequence objects pointing to GenBank record object (GenBankAccessionRecord).
+            - ``matrix_label``: Label of character matrix.
+
+        """
+        if gb_to_taxon_fn is not None and taxon_namespace is None:
+            raise TypeError("Cannot specify 'gb_to_taxon_fn' without 'taxon_namespace'")
+        if taxon_namespace is None:
+            taxon_namespace = dendropy.TaxonNamespace()
+        data_str = []
+        char_matrix = self.char_matrix_type(label=matrix_label, taxon_namespace=taxon_namespace)
+        for gb_idx, gb_rec in enumerate(self._recs):
+            taxon = None
+            # if gb_rec.request_key in id_to_taxon_map:
+            #     taxon = id_to_taxon_map[gb_rec.request_key]
+            if gb_to_taxon_fn is not None:
+                taxon = gb_to_taxon_fn(gb_rec)
+            else:
+                if label_components is not None:
+                    label = gb_rec.compose_taxon_label(
+                            components=label_components,
+                            separator=label_component_separator)
+                else:
+                    label = gb_rec.request_key
+                assert label is not None
+                assert str(label) != "None"
+                taxon = taxon_namespace.require_taxon(label=label)
+            assert taxon is not None
+            if add_ref_annotation_to_taxa:
+                taxon.annotations.add(gb_rec.as_reference_annotation())
+            if add_full_annotation_to_taxa:
+                taxon.annotations.add(gb_rec.as_annotation())
+            if set_taxon_attr is not None:
+                setattr(taxon, set_taxon_attr, gb_rec)
+            curr_vec = char_matrix.new_sequence(taxon=taxon)
+            char_matrix[taxon] = curr_vec
+            if add_ref_annotation_to_seqs:
+                curr_vec.annotations.add(gb_rec.as_reference_annotation())
+            if add_full_annotation_to_seqs:
+                curr_vec.annotations.add(gb_rec.as_annotation())
+            if set_seq_attr is not None:
+                setattr(curr_vec, set_seq_attr, gb_rec)
+            seq_text = gb_rec.sequence_text.upper()
+            for col_ind, c in enumerate(seq_text):
+                c = c.strip()
+                if not c:
+                    continue
+                try:
+                    state = char_matrix.default_state_alphabet[c]
+                except KeyError:
+                    raise ValueError('Accession %d of %d (%s, GI %s, acquired using key: %s): Unrecognized sequence symbol "%s" in position %d: %s'
+                            % (gb_idx+1, len(self._recs), gb_rec.primary_accession, gb_rec.gi, gb_rec.request_key, c, col_ind+1, seq_text))
+                curr_vec.append(state)
+        return char_matrix
+
+class GenBankNucleotide(GenBankResourceStore):
+
+    def __init__(self,
+            ids=None,
+            id_range=None,
+            prefix=None,
+            verify=True,
+            char_matrix_type=None,
+            email=None):
+        GenBankResourceStore.__init__(self,
+                db="nucleotide",
+                ids=ids,
+                id_range=id_range,
+                prefix=prefix,
+                verify=verify,
+                email=email)
+        self.char_matrix_type = char_matrix_type
+
+class GenBankDna(GenBankNucleotide):
+
+    def __init__(self,
+            ids=None,
+            id_range=None,
+            prefix=None,
+            verify=True,
+            email=None):
+        GenBankNucleotide.__init__(self,
+                ids=ids,
+                id_range=id_range,
+                prefix=prefix,
+                verify=verify,
+                char_matrix_type=dendropy.DnaCharacterMatrix,
+                email=email)
+
+class GenBankRna(GenBankNucleotide):
+
+    def __init__(self,
+            ids=None,
+            id_range=None,
+            prefix=None,
+            verify=True,
+            email=None):
+        GenBankNucleotide.__init__(self,
+                ids=ids,
+                id_range=id_range,
+                prefix=prefix,
+                verify=verify,
+                char_matrix_type=dendropy.RnaCharacterMatrix,
+                email=email)
+
+class GenBankProtein(GenBankResourceStore):
+
+    def __init__(self,
+            ids=None,
+            id_range=None,
+            prefix=None,
+            verify=True,
+            email=None):
+        GenBankResourceStore.__init__(self,
+                db="protein",
+                ids=ids,
+                id_range=id_range,
+                prefix=prefix,
+                verify=verify,
+                email=email)
+        self.char_matrix_type = dendropy.ProteinCharacterMatrix
+
+##############################################################################
+## GenBank Data Parsing to Python Objects
+
+class GenBankAccessionReference(object):
+    """
+    A GenBank reference record.
+    """
+    def __init__(self, xml=None):
+        self.number = None
+        self.position = None
+        self.authors = []
+        self.consrtm = None
+        self.title = None
+        self.journal = None
+        self.medline_id = None
+        self.pubmed_id = None
+        self.remark = None
+        self.db_ids = container.OrderedCaselessDict()
+        if xml is not None:
+            self.parse_xml(xml)
+
+    def parse_xml(self, xml):
+        self.number = xml.findtext("INSDReference_reference")
+        self.position = xml.findtext("INSDReference_position")
+        for authors in xml.findall("INSDReference_authors"):
+            for author_xml in authors.findall("INSDReference_author"):
+                author = author_xml.findtext("INSDAuthor")
+                self.authors.append(author)
+        self.title = xml.findtext("INSDReference_title")
+        self.journal = xml.findtext("INSDReference_journal")
+        for xrefs in xml.findall("INSDReference_xref"):
+            for xref_xml in xrefs.findall("INSDXref"):
+                dbname = xref_xml.findtext("INSDXref_dbname")
+                dbid = xref_xml.findtext("INSDXref_id")
+                self.db_ids[dbname] = dbid
+            self.pubmed_id = xml.findtext("INSDReference_pubmed")
+            self.medline_id = xml.findtext("INSDReference_medline")
+
+    def as_annotation(self):
+        top = dendropy.Annotation(
+                name="INSDReference_reference",
+                value=None,
+                datatype_hint=None,
+                name_prefix=GENBANK_ANNOTATION_PREFIX,
+                namespace=GENBANK_ANNOTATION_NAMESPACE,
+                name_is_prefixed=False,
+                is_attribute=False,
+                annotate_as_reference=True,
+                is_hidden=False,
+                label=None,
+                oid=None)
+        for item in [
+                ("number", "INSDReference_reference"),
+                ("position", "INSDReference_position"),
+                ("title", "INSDReference_title"),
+                ("journal", "INSDReference_journal"),
+                ("pubmed_id", "INSDReference_pubmed"),
+                ("medline_id", "INSDReference_medline"),
+                ]:
+            value = getattr(self, item[0])
+            if value is None:
+                continue
+            sub = dendropy.Annotation(
+                name=item[1],
+                value=value,
+                datatype_hint=None,
+                name_prefix=GENBANK_ANNOTATION_PREFIX,
+                namespace=GENBANK_ANNOTATION_NAMESPACE,
+                name_is_prefixed=False,
+                is_attribute=False,
+                annotate_as_reference=False,
+                is_hidden=False,
+                label=None,
+                oid=None)
+            top.annotations.add(sub)
+        return top
+
+
+class GenBankAccessionReferences(list):
+
+    def __init__(self, *args):
+        list.__init__(self, *args)
+
+    def findall(self, key):
+        results = []
+        for a in self:
+            if a.key == key:
+                results.append(a)
+        results = GenBankAccessionFeatures(results)
+        return results
+
+    def find(self, key, default=None):
+        for a in self:
+            if a.key == key:
+                return a
+        return default
+
+    def get_value(self, key, default=None):
+        for a in self:
+            if a.key == key:
+                return a.value
+        return default
+
+    def as_annotation(self):
+        top = dendropy.Annotation(
+                name="INSDSeq_references",
+                value=None,
+                datatype_hint=None,
+                name_prefix=GENBANK_ANNOTATION_PREFIX,
+                namespace=GENBANK_ANNOTATION_NAMESPACE,
+                name_is_prefixed=False,
+                is_attribute=False,
+                annotate_as_reference=True,
+                is_hidden=False,
+                label=None,
+                oid=None)
+        for reference in self:
+            top.annotations.add(reference.as_annotation())
+        return top
+
+class GenBankAccessionInterval(object):
+    """
+    A GenBank Interval record.
+    """
+    def __init__(self, xml=None):
+        self.begin = None
+        self.end = None
+        self.accession = None
+        if xml is not None:
+            self.parse_xml(xml)
+
+    def parse_xml(self, xml):
+        self.begin = xml.findtext("INSDInterval_from")
+        self.end = xml.findtext("INSDInterval_to")
+        self.accession = xml.findtext("INSDInterval_accession")
+
+class GenBankAccessionQualifier(object):
+    """
+    A GenBank Qualifier record.
+    """
+    def __init__(self, xml=None):
+        self.name = None
+        self.value = None
+        if xml is not None:
+            self.parse_xml(xml)
+
+    def parse_xml(self, xml):
+        self.name = xml.findtext("INSDQualifier_name")
+        self.value = xml.findtext("INSDQualifier_value")
+
+    def as_annotation(self):
+        annote = dendropy.Annotation(
+            name=self.name,
+            value=self.value,
+            datatype_hint=None,
+            name_prefix=GENBANK_ANNOTATION_PREFIX,
+            namespace=GENBANK_ANNOTATION_NAMESPACE,
+            name_is_prefixed=False,
+            is_attribute=False,
+            annotate_as_reference=False,
+            is_hidden=False,
+            label=None,
+            oid=None)
+        return annote
+
+class GenBankAccessionQualifiers(list):
+
+    def __init__(self, *args):
+        list.__init__(self, *args)
+
+    def findall(self, name):
+        results = []
+        for a in self:
+            if a.name == name:
+                results.append(a)
+        results = GenBankAccessionQualifiers(results)
+        return results
+
+    def find(self, name, default=None):
+        for a in self:
+            if a.name == name:
+                return a
+        return default
+
+    def get_value(self, name, default=None):
+        for a in self:
+            if a.name == name:
+                return a.value
+        return default
+
+    def as_annotation(self):
+        top = dendropy.Annotation(
+                name="INSDFeature_quals",
+                value=None,
+                datatype_hint=None,
+                name_prefix=GENBANK_ANNOTATION_PREFIX,
+                namespace=GENBANK_ANNOTATION_NAMESPACE,
+                name_is_prefixed=False,
+                is_attribute=False,
+                annotate_as_reference=True,
+                is_hidden=False,
+                label=None,
+                oid=None)
+        for item in self:
+            top.annotations.add(item.as_annotation())
+        return top
+
+class GenBankAccessionFeature(object):
+    """
+    A GenBank Feature record.
+    """
+    def __init__(self, xml=None):
+        self.key = None
+        self.location = None
+        self.qualifiers = GenBankAccessionQualifiers()
+        self.intervals = []
+        if xml is not None:
+            self.parse_xml(xml)
+
+    def parse_xml(self, xml):
+        self.key = xml.findtext("INSDFeature_key")
+        self.location = xml.findtext("INSDFeature_location")
+        for intervals in xml.findall("INSDFeature_intervals"):
+            for interval_xml in xml.findall("INSDInterval"):
+                interval = GenBankAccessionInterval(interval_xml)
+                self.intervals.append(interval)
+        for qualifiers in xml.findall("INSDFeature_quals"):
+            for qualifier_xml in qualifiers.findall("INSDQualifier"):
+                qualifier = GenBankAccessionQualifier(qualifier_xml)
+                self.qualifiers.append(qualifier)
+
+    def as_annotation(self):
+        top = dendropy.Annotation(
+                name="INSDSeq_feature",
+                value=None,
+                datatype_hint=None,
+                name_prefix=GENBANK_ANNOTATION_PREFIX,
+                namespace=GENBANK_ANNOTATION_NAMESPACE,
+                name_is_prefixed=False,
+                is_attribute=False,
+                annotate_as_reference=True,
+                is_hidden=False,
+                label=None,
+                oid=None)
+        for item in [
+                ("key", "INSDFeature_key"),
+                ("location", "INSDFeature_location"),
+                ]:
+            value = getattr(self, item[0])
+            if not value:
+                continue
+            sub = dendropy.Annotation(
+                name=item[1],
+                value=value,
+                datatype_hint=None,
+                name_prefix=GENBANK_ANNOTATION_PREFIX,
+                namespace=GENBANK_ANNOTATION_NAMESPACE,
+                name_is_prefixed=False,
+                is_attribute=False,
+                annotate_as_reference=False,
+                is_hidden=False,
+                label=None,
+                oid=None)
+            top.annotations.add(sub)
+        if self.intervals:
+            intervals_annote = dendropy.Annotation(
+                name="INSDSeq_intervals",
+                value=None,
+                datatype_hint=None,
+                name_prefix=GENBANK_ANNOTATION_PREFIX,
+                namespace=GENBANK_ANNOTATION_NAMESPACE,
+                name_is_prefixed=False,
+                is_attribute=False,
+                annotate_as_reference=True,
+                is_hidden=False,
+                label=None,
+                oid=None)
+            top.annotations.add(intervals_annote)
+            for interval in self.intervals:
+                interval_annote = dendropy.Annotation(
+                    name="INSDInterval",
+                    value=None,
+                    datatype_hint=None,
+                    name_prefix=GENBANK_ANNOTATION_PREFIX,
+                    namespace=GENBANK_ANNOTATION_NAMESPACE,
+                    name_is_prefixed=False,
+                    is_attribute=False,
+                    annotate_as_reference=True,
+                    is_hidden=False,
+                    label=None,
+                    oid=None)
+                intervals_annote.annotations.add(interval_annote)
+                for item in [
+                        ("begin", "INSDInterval_from"),
+                        ("end", "INSDInterval_to"),
+                        ("accession", "INSDInterval_accession"),
+                        ]:
+                    value = getattr(self, item[0])
+                    if not value:
+                        continue
+                    sub = dendropy.Annotation(
+                        name=item[1],
+                        value=value,
+                        datatype_hint=None,
+                        name_prefix=GENBANK_ANNOTATION_PREFIX,
+                        namespace=GENBANK_ANNOTATION_NAMESPACE,
+                        name_is_prefixed=False,
+                        is_attribute=False,
+                        annotate_as_reference=False,
+                        is_hidden=False,
+                        label=None,
+                        oid=None)
+                    interval_annote.annotations.add(interval_annote)
+        if self.qualifiers:
+            top.annotations.add(self.qualifiers.as_annotation())
+        return top
+
+class GenBankAccessionFeatures(list):
+
+    def __init__(self, *args):
+        list.__init__(self, *args)
+
+    def findall(self, key):
+        results = []
+        for a in self:
+            if a.key == key:
+                results.append(a)
+        results = GenBankAccessionFeatures(results)
+        return results
+
+    def find(self, key, default=None):
+        for a in self:
+            if a.key == key:
+                return a
+        return default
+
+    def get_value(self, key, default=None):
+        for a in self:
+            if a.key == key:
+                return a.value
+        return default
+
+    def as_annotation(self):
+        top = dendropy.Annotation(
+                name="INSDSeq_feature-table",
+                value=None,
+                datatype_hint=None,
+                name_prefix=GENBANK_ANNOTATION_PREFIX,
+                namespace=GENBANK_ANNOTATION_NAMESPACE,
+                name_is_prefixed=False,
+                is_attribute=False,
+                annotate_as_reference=True,
+                is_hidden=False,
+                label=None,
+                oid=None)
+        for feature in self:
+            top.annotations.add(feature.as_annotation())
+        return top
+
+class GenBankAccessionOtherSeqIds(dict):
+
+    def __init__(self, *args):
+        dict.__init__(self, *args)
+
+    def as_annotation(self):
+        top = dendropy.Annotation(
+                name="otherSeqIds",
+                value=None,
+                datatype_hint=None,
+                name_prefix=GENBANK_ANNOTATION_PREFIX,
+                namespace=GENBANK_ANNOTATION_NAMESPACE,
+                name_is_prefixed=False,
+                is_attribute=False,
+                annotate_as_reference=True,
+                is_hidden=False,
+                label=None,
+                oid=None)
+        for key, value in self.items():
+            sub = dendropy.Annotation(
+                name=key,
+                value=value,
+                datatype_hint=None,
+                name_prefix=GENBANK_ANNOTATION_PREFIX,
+                namespace=GENBANK_ANNOTATION_NAMESPACE,
+                name_is_prefixed=False,
+                is_attribute=False,
+                annotate_as_reference=False,
+                is_hidden=False,
+                label=None,
+                oid=None)
+            top.annotations.add(sub)
+        return top
+
+class GenBankAccessionRecord(object):
+    """
+    A GenBank record.
+    """
+
+    def __init__(self, db=None, xml=None):
+        self.db = db
+        self._request_key = None
+        self._defline = None
+        self.locus = None
+        self.length = None
+        self.moltype = None
+        self.topology = None
+        self.strandedness = None
+        self.division = None
+        self.update_date = None
+        self.create_date = None
+        self.definition = None
+        self.primary_accession = None
+        self.accession_version = None
+        self.other_seq_ids = GenBankAccessionOtherSeqIds()
+        self.source = None
+        self.organism = None
+        self.taxonomy = None
+        self.references = GenBankAccessionReferences()
+        self.feature_table = GenBankAccessionFeatures()
+        self.sequence_text = None
+        if xml is not None:
+            self.parse_xml(xml)
+
+    def __str__(self):
+        return self.defline
+
+    def _get_defline(self):
+        if self._defline is None:
+            self._defline = self.compose_fasta_defline()
+        return self._defline
+    defline = property(_get_defline)
+
+    def _get_request_key(self):
+        if self._request_key is None:
+            return self.gi
+        else:
+            return self._request_key
+    def _set_request_key(self, rid):
+        self._request_key = rid
+    request_key = property(_get_request_key, _set_request_key)
+
+    def _has_request_key(self):
+        return self._request_key is not None
+    has_request_key = property(_has_request_key)
+
+    def _get_accession(self):
+        return self.primary_accession
+    accession = property(_get_accession)
+
+    def _get_gi(self):
+        return self.other_seq_ids.get("gi", None)
+    gi = property(_get_gi)
+
+    def _get_gb(self):
+        return self.other_seq_ids.get("gb", None)
+    gb = property(_get_gb)
+
+    def parse_xml(self, xml):
+        self.locus = xml.findtext("INSDSeq_locus")
+        self.length = xml.findtext("INSDSeq_length")
+        self.moltype = xml.findtext("INSDSeq_moltype")
+        self.topology = xml.findtext("INSDSeq_topology")
+        self.strandedness = xml.findtext("INSDSeq_strandedness")
+        self.division = xml.findtext("INSDSeq_division")
+        self.update_date = xml.findtext("INSDSeq_update-date")
+        self.create_date = xml.findtext("INSDSeq_create-date")
+        self.definition = xml.findtext("INSDSeq_definition")
+        self.primary_accession = xml.findtext("INSDSeq_primary-accession")
+        self.accession_version = xml.findtext("INSDSeq_accession-version")
+        for other_seqids in xml.findall("INSDSeq_other-seqids"):
+            for other_seqid in other_seqids.findall("INSDSeqid"):
+                seqid = other_seqid.text
+                parts = seqid.split("|")
+                if len(parts) == 1:
+                    self.other_seq_ids[""] = seqid
+                else:
+                    self.other_seq_ids[parts[0]] = parts[1]
+        self.source = xml.findtext("INSDSeq_source")
+        self.organism = xml.findtext("INSDSeq_organism")
+        self.taxonomy = xml.findtext("INSDSeq_taxonomy")
+        for references in xml.findall("INSDSeq_references"):
+            for reference_xml in references.findall("INSDReference"):
+                reference = GenBankAccessionReference(reference_xml)
+                self.references.append(reference)
+        for features in xml.findall("INSDSeq_feature-table"):
+            for feature_xml in features.findall("INSDFeature"):
+                feature = GenBankAccessionFeature(feature_xml)
+                self.feature_table.append(feature)
+        self.sequence_text = xml.findtext("INSDSeq_sequence")
+
+    def compose_fasta_defline(self):
+        return "gi|%s|gb|%s| %s" % (
+                self.gi,
+                self.accession_version,
+                self.definition)
+
+    def compose_taxon_label(self, components=None, separator=" "):
+        label = []
+        if components is None:
+            components = ["accession", "organism"]
+        for component in components:
+            component = str(getattr(self, component))
+            if component is not None:
+                if separator is not None and " " in component and separator != " ":
+                    component = component.replace(" ", separator)
+                label.append(component)
+        if separator is None:
+            separator = ""
+        return separator.join(label)
+
+    def as_fasta(self,
+            generate_new_label=False,
+            label_components=None,
+            label_component_separator=" "):
+        if generate_new_label:
+            label = self.compose_taxon_label(components=label_components,
+                    separator=label_component_separator)
+        else:
+            label = self.compose_fasta_defline()
+        sequence_text = self.sequence_text.upper()
+        return ">%s\n%s" % (label, sequence_text)
+
+    def _get_uri(self):
+        uri = ["http://www.ncbi.nlm.nih.gov"]
+        uri.append(str(self.db))
+        uri.append(str(self.request_key))
+        return "/".join(uri)
+    uri = property(_get_uri)
+
+    def as_reference_annotation(self):
+        annote = dendropy.Annotation(
+                name="source",
+                value=self.uri,
+                datatype_hint=None,
+                name_prefix="dcterms",
+                namespace="http://purl.org/dc/terms/",
+                name_is_prefixed=False,
+                is_attribute=False,
+                annotate_as_reference=True,
+                is_hidden=False,
+                label=None,
+                oid=None)
+        return annote
+
+    def as_annotation(self):
+        top = self.as_reference_annotation()
+        # top = dendropy.Annotation(
+        #         name="source",
+        #         value=None,
+        #         datatype_hint=None,
+        #         name_prefix="dcterms",
+        #         namespace="http://purl.org/dc/terms/",
+        #         name_is_prefixed=False,
+        #         is_attribute=False,
+        #         annotate_as_reference=True,
+        #         is_hidden=False,
+        #         label=None,
+        #         oid=None)
+        # for item in [
+        #         ("db", "db"),
+        #         ("request_key", "requestKey")
+        #         ]:
+        #     value = getattr(self, item[0])
+        #     if value is None:
+        #         continue
+        #     a = dendropy.Annotation(
+        #         name=item[1],
+        #         value=value,
+        #         datatype_hint=None,
+        #         name_prefix="dendropy",
+        #         namespace="http://packages.python.org/DendroPy/",
+        #         name_is_prefixed=False,
+        #         is_attribute=False,
+        #         annotate_as_reference=False,
+        #         is_hidden=False,
+        #         label=None,
+        #         oid=None)
+        #     top.annotations.add(a)
+        for item in [
+                ("locus", "INSDSeq_locus"),
+                ("length", "INSDSeq_length"),
+                ("moltype", "INSDSeq_moltype"),
+                ("topology", "INSDSeq_topology"),
+                ("strandedness", "INSDSeq_strandedness"),
+                ("division", "INSDSeq_division"),
+                ("update_date", "INSDSeq_update-date"),
+                ("create_date", "INSDSeq_create-date"),
+                ("definition", "INSDSeq_definition"),
+                ("primary_accession", "INSDSeq_primary-accesison"),
+                ("accession_version", "INSDSeq_accession-version"),
+                ("other_seq_ids",  "INSDSeq_hter-seqids"),
+                ("source", "INSDSeq_source"),
+                ("organism", "INSDSeq_organism"),
+                ("taxonomy", "INSDSeq_taxonomy"),
+                ("references", "INSDSeq_references"),
+                ("feature_table", "INSDSeq_feature-table"),
+                ]:
+            value = getattr(self, item[0])
+            if hasattr(value, "as_annotation") and value:
+                a = value.as_annotation()
+            elif value is not None:
+                a = dendropy.Annotation(
+                    name=item[1],
+                    value=value,
+                    datatype_hint=None,
+                    name_prefix=GENBANK_ANNOTATION_PREFIX,
+                    namespace=GENBANK_ANNOTATION_NAMESPACE,
+                    name_is_prefixed=False,
+                    is_attribute=False,
+                    annotate_as_reference=False,
+                    is_hidden=False,
+                    label=None,
+                    oid=None)
+            top.annotations.add(a)
+        return top
+
diff --git a/dendropy/interop/muscle.py b/dendropy/interop/muscle.py
new file mode 100644
index 0000000..25e4c34
--- /dev/null
+++ b/dendropy/interop/muscle.py
@@ -0,0 +1,43 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Wrapper around calls to MUSCLE
+"""
+
+import dendropy
+import subprocess
+from dendropy.utility import processio
+
+def muscle_align(char_matrix, muscle_args=None, muscle_path='muscle'):
+    cmd = [muscle_path]
+    if muscle_args:
+        cmd = cmd + muscle_args
+    p = subprocess.Popen(cmd,
+            stdin=subprocess.PIPE,
+            stdout=subprocess.PIPE,
+            stderr=subprocess.PIPE)
+    stdout, stderr = processio.communicate(p, char_matrix.as_string("fasta"))
+    if p.returncode:
+        raise Exception(stderr)
+    d = char_matrix.__class__.get_from_string(stdout,
+            "fasta",
+            taxon_namespace=char_matrix.taxon_namespace)
+    return d
+
+
diff --git a/dendropy/interop/paup.py b/dendropy/interop/paup.py
new file mode 100644
index 0000000..98c98a6
--- /dev/null
+++ b/dendropy/interop/paup.py
@@ -0,0 +1,954 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Wrapper around calls to PAUP*, mainly for testing purposes rather than analysis.
+"""
+
+import os
+import sys
+import subprocess
+import tempfile
+import re
+import csv
+try:
+    from StringIO import StringIO # Python 2 legacy support: StringIO in this module is the one needed (not io)
+except ImportError:
+    from io import StringIO # Python 3
+
+import dendropy
+from dendropy.utility import error
+from dendropy.utility import metavar
+from dendropy.utility import container
+from dendropy.utility import messaging
+from dendropy.utility import filesys
+from dendropy.utility import processio
+from dendropy.dataio import nexuswriter
+_LOG = messaging.get_logger(__name__)
+
+import dendropy
+
+PAUP_PATH = os.environ.get(metavar.DENDROPY_PAUP_PATH_ENVAR, "paup")
+if PAUP_PATH == "NONE":
+    DENDROPY_PAUP_INTEROPERABILITY = False
+else:
+    DENDROPY_PAUP_INTEROPERABILITY = True
+
+STANDARD_PREAMBLE = "set warnreset=no increase=auto warnroot=no warnReset=no warnTree=no warnTSave=no warnBlkName=no errorStop=no errorBeep=no queryBeep=no"
+
+class PaupService(object):
+
+    @staticmethod
+    def call(
+            paup_commands,
+            suppress_standard_preamble=False,
+            ignore_error_returncode=False,
+            ignore_nonempty_stderr=False,
+            strip_extraneous_prompts_from_stdout=True,
+            strip_extraneous_prompts_from_stderr=True,
+            cwd=None,
+            env=None,
+            paup_path=PAUP_PATH
+            ):
+        """
+        Executes a sequence of commands in PAUP* and returns the results.
+
+        Parameters
+        ----------
+        paup_commands : iterable of strings
+            A list or some other iterable of strings representing PAUP
+            commands.
+        suppress_standard_preamble : bool
+            If `True`, then the command sequence will not be prefaced by the
+            standard preamble.
+        ignore_error_returncode : bool
+            If `True`, then a non-0 return code from the PAUP process will not
+            result in an exception being raised.
+        ignore_nonempty_stderr : bool
+            If `True`, then the PAUP process writing to standard error will not
+            result in an exception being raised.
+        strip_extraneous_prompts_from_stdout : bool
+            If `True`, then all occurrences of 'paup>' will be removed from the
+            standard output contents.
+        strip_extraneous_prompts_from_stderr : bool
+            If `True`, then all occurrences of 'paup>' will be removed from the
+            standard error contents.
+        cwd : string
+            Set the working directory of the PAUP* process to this directory.
+        env : dictionary
+            Environmental variables to set for the PAUP* process.
+        paup_path : string
+            Path to the PAUP* executable.
+
+        Returns
+        -------
+        returncode : exit value of PAUP process.
+        stdout : string
+            Contents of the PAUP process standard output.
+        stderr : string
+            Contents of the PAUP process standard error.
+        """
+        if isinstance(paup_commands, str):
+            commands = [paup_commands]
+        else:
+            commands = list(paup_commands)
+        if not suppress_standard_preamble:
+            commands.insert(0, STANDARD_PREAMBLE)
+        commands.append("quit")
+        paup_block = ";\n".join(commands) + ";\n"
+        invocation_command = [paup_path, "-n", "-u"]
+        p = subprocess.Popen(
+                invocation_command,
+                stdin=subprocess.PIPE,
+                stdout=subprocess.PIPE,
+                stderr=subprocess.PIPE,
+                cwd=cwd,
+                env=env,
+                )
+        raw_stdout, raw_stderr = processio.communicate(p, paup_block)
+        stdout = raw_stdout
+        stderr = raw_stderr
+        if strip_extraneous_prompts_from_stdout:
+            # weird dev/paup error ... lots or prompts spring up
+            stdout = stdout.replace("paup>", "")
+        if strip_extraneous_prompts_from_stderr:
+            # weird dev/paup error ... lots or prompts spring up
+            stderr = stderr.replace("paup>", "")
+            chk_stderr = stderr
+        else:
+            chk_stderr = stderr.replace("paup>", "")
+        if (p.returncode != 0 and not ignore_error_returncode) or (chk_stderr != "" and not ignore_nonempty_stderr):
+            raise error.ExternalServiceError(
+                    service_name="PAUP*",
+                    invocation_command=invocation_command,
+                    service_input=paup_block,
+                    returncode = p.returncode,
+                    stdout=raw_stdout,
+                    stderr=raw_stderr)
+        return p.returncode, stdout, stderr
+
+    @staticmethod
+    def bipartition_groups_to_split_bitmask(group_string, normalized=None):
+        """
+        This converts a PAUP* group representation (i.e. a string of askterisks
+        and periods, where the asterisks denote the taxon index counting from
+        left to right) to a mask representation:
+            - a clade mask, where 1's represent descendents of the split/edge
+                (with taxon index counting from right to left, i.e., first taxon
+                is right-most bit)
+            - a split mask, an unrooted normalized version of the above, where
+                if the right most bit is not 1 the clade mask is complemented
+                (and not changed otherwise).
+        """
+        group_string = group_string[::-1] # flip to get correct orientation
+        split_bitmask = int(group_string.replace("*", "1").replace(".", "0"), 2)
+        if normalized:
+            mask=((2 ** len(group_string)) -1)
+            return container.NormalizedBitmaskDict.normalize(split_bitmask, mask, 1)
+        else:
+            return split_bitmask
+
+    def __init__(self,
+            suppress_standard_preamble=False,
+            ignore_error_returncode=False,
+            strip_extraneous_prompts_from_stderr=True,
+            strip_extraneous_prompts_from_stdout=True,
+            cwd=None,
+            env=None,
+            paup_path=PAUP_PATH):
+        self.suppress_standard_preamble = suppress_standard_preamble
+        self.ignore_error_returncode = ignore_error_returncode
+        self.strip_extraneous_prompts_from_stderr = strip_extraneous_prompts_from_stderr
+        self.strip_extraneous_prompts_from_stdout = strip_extraneous_prompts_from_stdout
+        self.cwd = cwd
+        self.env = env
+        self.paup_path = paup_path
+        self._nexus_writer = nexuswriter.NexusWriter()
+        self.commands = []
+
+    def count_splits_from_files(self,
+            tree_filepaths=None,
+            is_rooted=None,
+            use_tree_weights=None,
+            burnin=None,
+            taxa_definition_filepath=None,
+            taxon_namespace=None):
+        """
+        Counts splits (bipartitions) in trees from files and returns the results.
+
+        Parameters
+        ----------
+        tree_filepaths : iterable of strings
+            A list or some other iterable of file paths containing trees in
+            NEXUS format.
+        is_rooted : bool
+            If `True` then trees will be treated as rooted. If `False`, then
+            rooting follows that specified in the tree statements, defaulting
+            to unrooted if not specified.
+        use_tree_weights : bool
+            If `False` then tree weighting statements are disregarded.
+            Otherwise, they will be regarded.
+        burnin : integer
+            Skip these many trees (from beginning of each source).
+        taxa_definition_filepath : str
+            Path of file containing TAXA block to execute. This is crucial to
+            getting the taxon order (and hence, indexes, and hence, split
+            bitmasks) correct. If not given, will use the first file
+            given in ``tree_filepaths``.
+        taxon_namespace : |TaxonNamespace|
+            The |TaxonNamespace| object to populate.
+
+        Returns
+        -------
+        d : dictionary
+            A dictionary with the following keys and values:
+
+                -   "bipartition_counts" : dictionary with split bitmasks as keys
+                    and (weighted) counts of occurrences as values
+                -   "bipartition_frequencies" : dictionary with split bitmasks as keys
+                    and (weighted) proportional frequencies of occurrences as values
+                -   "num_trees" : number of trees counted
+                -   "taxon_namespace" : |TaxonNamespace| instance
+                    corresponding to the taxa <=> split bitmask mapping
+                -   "is_rooted" : indicates whether the trees were rooted or not
+        """
+        self.commands = []
+        if taxa_definition_filepath is not None:
+            self.stage_execute_file(
+                    taxa_definition_filepath,
+                    clear_trees=True)
+        self.stage_load_trees(
+            tree_filepaths=tree_filepaths,
+            is_rooted=is_rooted,
+            use_tree_weights=use_tree_weights,
+            burnin=burnin,
+            mode=7)
+        self.stage_list_taxa()
+        self.stage_tree_info()
+        self.stage_count_splits(use_tree_weights=use_tree_weights)
+        # print("\n".join(self.commands))
+        returncode, stdout, stderr = self._execute_command_sequence()
+        # print("\n".join(stdout))
+        taxon_namespace = self.parse_taxon_namespace(stdout,
+                taxon_namespace=taxon_namespace)
+        is_rooted = self.parse_is_tree_rooted(stdout)
+        tree_count, bipartition_counts, bipartition_freqs = self.parse_group_freqs(stdout, is_rooted=is_rooted)
+        d = {
+            "num_trees" : tree_count,
+            "bipartition_counts" : bipartition_counts,
+            "bipartition_freqs" : bipartition_freqs,
+            "taxon_namespace" : taxon_namespace,
+            "is_rooted" : is_rooted,
+            }
+        return d
+
+    def get_split_distribution_from_files(self,
+            tree_filepaths=None,
+            is_rooted=None,
+            use_tree_weights=None,
+            burnin=None,
+            taxa_definition_filepath=None,
+            taxon_namespace=None,
+            split_distribution=None):
+        """
+        Returns a SplitDistribution object based on splits given in
+        tree files.
+
+        tree_filepaths : iterable of strings
+            A list or some other iterable of file paths containing trees in
+            NEXUS format.
+        is_rooted : bool
+            If `True` then trees will be treated as rooted. If `False`, then
+            rooting follows that specified in the tree statements, defaulting
+            to unrooted if not specified.
+        use_tree_weights : bool
+            If `False` then tree weighting statements are disregarded.
+            Otherwise, they will be regarded.
+        burnin : integer
+            Skip these many trees (from beginning of each source).
+        taxa_definition_filepath : str
+            Path of file containing TAXA block to execute. This is crucial to
+            getting the taxon order (and hence, indexes, and hence, split
+            bitmasks) correct. If not given, will use the first file
+            given in ``tree_filepaths``.
+        taxon_namespace : |TaxonNamespace|
+            |TaxonNamespace| object to use.
+        split_distribution : `SplitDistribution`
+            `SplitDistribution object to use.
+        """
+        if split_distribution is None:
+            split_distribution = dendropy.SplitDistribution(taxon_namespace=taxon_namespace)
+            taxon_namespace = split_distribution.taxon_namespace
+        else:
+            if taxon_namespace is None:
+                taxon_namespace = split_distribution.taxon_namespace
+            else:
+                assert split_distribution.taxon_namespace is taxon_namespace
+        result = self.count_splits_from_files(
+            tree_filepaths=tree_filepaths,
+            is_rooted=is_rooted,
+            use_tree_weights=use_tree_weights,
+            burnin=burnin,
+            taxa_definition_filepath=taxa_definition_filepath,
+            taxon_namespace=taxon_namespace)
+        for split in result["bipartition_counts"]:
+            if not is_rooted:
+                sd_split_key = split_distribution.normalize_bitmask(split)
+            else:
+                sd_split_key = split
+            split_distribution.add_split_count(sd_split_key, result["bipartition_counts"][split])
+        split_distribution.total_trees_counted = result["num_trees"]
+        return split_distribution
+
+    def stage_execute_file(self,
+            filepath,
+            clear_trees=False):
+        """Executes file, optionally clearing trees from file if requested"""
+        self.commands.append("execute {}".format(filepath))
+        if clear_trees:
+            self.commands.append("cleartrees")
+        return commands
+
+    def stage_load_trees(self,
+            tree_filepaths,
+            is_rooted=None,
+            use_tree_weights=None,
+            burnin=None,
+            mode=7): # keep trees in memory, specify 3 to clear
+        """
+        Composes commands to load a set of trees into PAUP*, with the specified
+        number of burnin dropped.
+        """
+        if isinstance(tree_filepaths, str):
+            raise Exception("expecting list of filepaths, not string")
+        if is_rooted is None:
+            rooting = ""
+        elif is_rooted:
+            rooting = "rooted=yes"
+        else:
+            rooting = "unrooted=yes"
+        if use_tree_weights is None:
+            treewts = ""
+        elif use_tree_weights:
+            treewts = "storetreewts=yes"
+        else:
+            treewts = "storetreewts=no"
+        if burnin is None:
+            burnin = 0
+        gettree_template = "gett file= '{{tree_filepath}}' storebrlens=yes warntree=no {rooting} {treewts} from={burnin} mode={mode};".format(
+                rooting=rooting,
+                treewts=treewts,
+                burnin=burnin+1,
+                mode=mode)
+        for tree_filepath in tree_filepaths:
+            # self.commands.append(gettree_template.format(tree_filepath=tree_filepath))
+            # using relpath because of a bug in PAUP* 4.0b10 with long paths passed to gettrees
+            self.commands.append(gettree_template.format(tree_filepath=os.path.relpath(tree_filepath)))
+        return self.commands
+
+    def stage_list_taxa(self):
+        """
+        Given a data file in memory, this gets PAUP* to print a list of
+        taxa that can be used to build a TaxaBlock later.
+        """
+        # self.commands.append("[!TAXON LIST BEGIN]\ntstatus / full;\n[!TAXON LIST END]\n")
+        self.commands.append("[!TAXON LIST BEGIN]\ntstatus / full;\n[!TAXON LIST END]\n")
+        return self.commands
+
+    def stage_tree_info(self):
+        self.commands.append("[!TREE INFO BEGIN]treeinfo;\n[!TREE INFO END]\n")
+        return self.commands
+
+    def stage_count_splits(self,
+            use_tree_weights=None,
+            majrule_filepath=None,
+            majrule_freq=0.5):
+        """
+        Given trees in memory, this composes a command to count the split
+        frequencies across the trees as well as a save the majority-rule
+        consensus tree if a path is given.
+        """
+        percent = int(100 * majrule_freq)
+        if majrule_filepath is None:
+            treefile = ""
+        else:
+            treefile = " treefile={filepath} replace=yes "
+        if use_tree_weights is None:
+            treewts = ""
+        elif use_tree_weights:
+            treewts = "usetreewts=yes"
+        else:
+            treewts = "usetreewts=no"
+        commands = []
+        commands.append("[!SPLITS COUNT BEGIN]")
+        commands.append("contree / strict=no {treefile} showtree=no grpfreq=yes majrule=yes percent={percent} {treewts}".format(
+            treefile=treefile,
+            percent=percent,
+            treewts=treewts))
+        commands.append("[!SPLITS COUNT END]")
+        self.commands.extend(commands)
+        return self.commands
+
+    def stage_execute_file(self, filepath, clear_trees=False):
+        """Executes file, optionally clearing trees from file if requested"""
+        self.commands.append("execute '{}'".format(filepath))
+        if clear_trees:
+            self.commands.append("cleartrees")
+        return self.commands
+
+    ##############################################################################
+    ## Processing of Output
+
+    def parse_taxon_namespace(self, paup_output, taxon_namespace=None):
+        """
+        Given PAUP* output that includes a taxon listing as produced by
+        ``stage_list_taxa``, this parses out and returns a taxon block.
+        """
+        taxlabels = []
+        taxinfo_pattern = re.compile('\s*(\d+) (.*)\s+\-')
+        idx = 0
+        for line in paup_output:
+            idx += 1
+            if line == "TAXON LIST BEGIN":
+                break
+        for line in paup_output[idx:]:
+            if line == "TAXON LIST END":
+                break
+            ti_match = taxinfo_pattern.match(line)
+            if ti_match:
+                label = ti_match.group(2).strip()
+                taxlabels.append(label)
+        if taxon_namespace is None:
+            taxon_namespace = dendropy.TaxonNamespace()
+        for taxlabel in taxlabels:
+            taxon_namespace.require_taxon(label=taxlabel)
+        return taxon_namespace
+
+    def parse_is_tree_rooted(self, paup_output):
+        """
+        Given PAUP* output that includes a information produced by
+        ``stage_tree_info``, this parses out and returns the rooting
+        state of trees in memory
+        """
+        pattern = re.compile(r'\d+ (\w+) trees in memory')
+        for line in paup_output:
+            if line == "TREE INFO END":
+                break
+            match = pattern.match(line)
+            if match:
+                s = match.groups(1)[0]
+                if s == "unrooted":
+                    return False
+                elif s == "rooted":
+                    return True
+                else:
+                    return None
+        raise Exception("Unable to find tree information")
+
+    def parse_group_freqs(self, paup_output, is_rooted=None):
+        """
+        Given PAUP* output that includes a split counting procedure, this
+        collects the splits and returns a dictionary of split bitmasks and their
+        frequencies.
+        """
+        bipartitions = []
+        bipartition_freqs = {}
+        bipartition_counts = {}
+        tree_count = None
+        tree_count_pattern = re.compile('.*Majority-rule consensus of ([\d]*) tree.*', re.I)
+
+        bipartition_section = re.compile('Bipartitions found in one or more trees and frequency of occurrence:')
+        bp_full_row_with_perc_col = re.compile('([\.|\*]+)\s+([\d\.]+)\s+([\d\.]*)%')
+        bp_full_row_with_no_perc_col = re.compile('([\.|\*]+)\s+([\d\.]+)')
+        bp_row = re.compile('([\.|\*]+).*')
+
+        # find tree count
+        for idx, line in enumerate(paup_output):
+            tp_match = tree_count_pattern.match(line)
+            if tp_match:
+                break
+        if not tp_match:
+            raise Exception("Failed to find tree count in PAUP* output")
+        tree_count = int(tp_match.group(1))
+
+        while not bp_row.match(paup_output[idx]):
+            idx += 1
+
+        split_idx = 0
+        split_reps = {}
+        for line in paup_output[idx:]:
+            if line == "SPLITS COUNT END":
+                    break
+            bp_match = bp_full_row_with_perc_col.match(line)
+            if not bp_match:
+                bp_match = bp_full_row_with_no_perc_col.match(line)
+            if bp_match:
+                # full row, or end of partial rows
+                if len(split_reps) == 0:
+                    split_rep = bp_match.group(1)
+                else:
+                    split_rep = split_reps[split_idx] + bp_match.group(1)
+                split_bitmask = PaupService.bipartition_groups_to_split_bitmask(split_rep, normalized=not is_rooted)
+                bipartition_counts[split_bitmask] = float(bp_match.group(2))
+                try:
+                    bipartition_freqs[split_bitmask] = float(bp_match.group(3)) / 100
+                except IndexError:
+                    bipartition_freqs[split_bitmask] = bipartition_counts[split_bitmask] / 100
+                split_idx += 1
+            else:
+                # either (1) partial row or (2) break between sections
+                bp_match = bp_row.match(line)
+                if not bp_match:
+                    split_idx = 0
+                else:
+                    if split_idx in split_reps:
+                        split_reps[split_idx] += bp_match.group(1)
+                    else:
+                        split_reps[split_idx] = bp_match.group(1)
+                    split_idx += 1
+        return tree_count, bipartition_counts, bipartition_freqs
+
+    ##############################################################################
+    ## Support
+
+    def _execute_command_sequence(self):
+        returncode, stdout, stderr = PaupService.call(self.commands)
+        self.commands = []
+        stdout = stdout.split("\n")
+        stderr = stderr.split("\n")
+        return returncode, stdout, stderr
+
+##############################################################################
+## Wrappers for PAUP* Services
+
+def call(*args, **kwargs):
+    return PaupService.call(*args, **kwargs)
+
+def symmetric_difference(tree1, tree2):
+    if tree1.taxon_namespace is not tree2.taxon_namespace:
+        trees = dendropy.TreeList([dendropy.Tree(tree1), dendropy.Tree(tree2)])
+    else:
+        trees = dendropy.TreeList([tree1, tree2], taxon_namespace=tree1.taxon_namespace)
+    tf = tempfile.NamedTemporaryFile("w", delete=True)
+    trees.write_to_stream(tf, schema='nexus')
+    tf.flush()
+    assert tree1.is_rooted == tree2.is_rooted
+    sd = get_split_distribution(
+            tree_filepaths=[tf.name],
+            taxa_filepath=tf.name,
+            is_rooted=tree1.is_rooted,
+            use_tree_weights=True,
+            burnin=0)
+    sf = sd.split_frequencies
+    conflicts = 0
+    for k, v in sf.items():
+        if v < 1.0:
+            conflicts += 1
+    return conflicts
+
+def pscore_trees(
+        trees,
+        char_matrix,
+        pset_option_list=None,
+        pscore_option_list=None,
+        paup_path=PAUP_PATH):
+
+    if pset_option_list is not None:
+        pset = "pset " + " ".join(pset_option_list)
+    else:
+        pset = ""
+
+    scorefile = tempfile.NamedTemporaryFile("w+", delete=True)
+    pscore_command = "pscore / scorefile={}".format(scorefile.name)
+    if pscore_option_list is not None:
+        pscore_command = pscore_command + " ".join(pscore_option_list)
+    else:
+        pscore_command = pscore_command
+
+    post_est_commands = """\
+    set crit=parsimony;
+    {pset}
+    {pscore_command}
+    """.format(pset=pset, pscore_command=pscore_command)
+
+    paup_block = """\
+    set warnreset=no;
+    exe '{data_file}';
+    gettrees file= '{intree_file}' warntree=no;
+    {post_est_commands};
+    """
+
+    cf = tempfile.NamedTemporaryFile("w", delete=True)
+    char_matrix.write_to_stream(cf, schema='nexus')
+    cf.flush()
+    input_tree_file_handle = tempfile.NamedTemporaryFile("w", delete=True)
+    input_tree_filepath = input_tree_file_handle.name
+    trees.write_to_stream(input_tree_file_handle, schema="nexus")
+    input_tree_file_handle.flush()
+    paup_args = {}
+    paup_args["data_file"] = cf.name
+    paup_args["intree_file"] = input_tree_filepath
+    paup_args["post_est_commands"] = post_est_commands
+    paup_block = paup_block.format(**paup_args)
+    paup_run = subprocess.Popen(['%s -n' % paup_path],
+                                shell=True,
+                                stdin=subprocess.PIPE,
+                                stdout=subprocess.PIPE)
+    stdout, stderr = processio.communicate(paup_run, paup_block)
+    if stderr:
+        sys.stderr.write("\n*** ERROR FROM PAUP ***")
+        sys.stderr.write(stderr)
+        sys.exit(1)
+    scores_str = open(scorefile.name, "r").read()
+    score_rows = [r for r in scores_str.split("\n")[1:] if r != ""]
+    assert len(score_rows) == len(trees)
+    scores = [int(s.split()[1]) for s in score_rows]
+    assert len(scores) == len(trees)
+    cf.close()
+    input_tree_file_handle.close()
+    scorefile.close()
+    return scores
+
+def estimate_ultrametric_tree(
+        char_matrix,
+        topology_tree=None,
+        paup_path=PAUP_PATH):
+    post_est_commands = """\
+    set crit=likelihood;
+    root rootmethod=midpoint;
+    lset userbr=no nst = 1 basefreq = eq rates = eq clock =yes;
+    lscore;
+    """
+    if topology_tree is None:
+        ultrametric_tree = estimate_tree(char_matrix,
+                tree_est_criterion="nj",
+                num_states=2,
+                unequal_base_freqs=False,
+                gamma_rates=False,
+                prop_invar=False,
+                extra_post_est_commands=post_est_commands)
+        return ultrametric_tree
+    else:
+        paup_block = """\
+        set warnreset=no;
+        exe '%(data_file)s';
+        gettrees file= '%(intree_file)s' warntree=no;
+        %(post_est_commands)s;
+        savetrees file=%(outtree_file)s format=nexus root=yes brlens=yes taxablk=yes maxdecimals=20;
+        """
+        cf = tempfile.NamedTemporaryFile("w", delete=True)
+        char_matrix.write_to_stream(cf, schema='nexus')
+        cf.flush()
+        input_tree_file_handle = tempfile.NamedTemporaryFile("w", delete=True)
+        input_tree_filepath = input_tree_file_handle.name
+        topology_tree.write_to_stream(input_tree_file_handle, schema="nexus")
+        input_tree_file_handle.flush()
+        # output_tree_file_handle, output_tree_filepath = tempfile.mkstemp(text=True)
+        output_tree_file_handle = tempfile.NamedTemporaryFile("w+", delete=True)
+        output_tree_filepath = output_tree_file_handle.name
+        paup_args = {}
+        paup_args["data_file"] = cf.name
+        paup_args["intree_file"] = input_tree_filepath
+        paup_args["post_est_commands"] = post_est_commands
+        paup_args["outtree_file"] = output_tree_filepath
+        paup_block = paup_block % paup_args
+        paup_run = subprocess.Popen(['%s -n' % paup_path],
+                                    shell=True,
+                                    stdin=subprocess.PIPE,
+                                    stdout=subprocess.PIPE)
+        stdout, stderr = processio.communicate(paup_run, paup_block)
+        t = dendropy.Tree.get_from_path(output_tree_filepath, "nexus", taxon_namespace=char_matrix.taxon_namespace)
+        cf.close()
+        input_tree_file_handle.close()
+        output_tree_file_handle.close()
+        return t
+
+def estimate_tree(char_matrix,
+                    tree_est_criterion="likelihood",
+                    num_states=6,
+                    unequal_base_freqs=True,
+                    gamma_rates=True,
+                    prop_invar=True,
+                    extra_pre_est_commands=None,
+                    extra_post_est_commands=None,
+                    paup_path='paup'):
+    """
+    Given a dataset, ``char_matrix``, estimates a tree using the given criterion.
+    """
+    paup_args = {
+        'nst': num_states,
+        'basefreq' : unequal_base_freqs and 'estimate' or 'equal',
+        'rates' : gamma_rates and 'gamma' or 'equal',
+        'pinvar' : prop_invar and 'estimate' or '0',
+    }
+    cf = tempfile.NamedTemporaryFile("w", delete=True)
+    char_matrix.write_to_stream(cf, schema='nexus')
+    cf.flush()
+    paup_args['datafile'] = cf.name
+    # output_tree_file_handle, output_tree_filepath = tempfile.mkstemp(text=True)
+    output_tree_file_handle = tempfile.NamedTemporaryFile("w+", delete=True)
+    output_tree_filepath = output_tree_file_handle.name
+    paup_args['est_tree_file'] = output_tree_filepath
+    if extra_pre_est_commands:
+        if isinstance(extra_pre_est_commands, str):
+            extra_pre_est_commands = [extra_pre_est_commands]
+        paup_args["pre_est_commands"] = ";\n".join(extra_pre_est_commands)
+    else:
+        paup_args["pre_est_commands"] = ""
+    if extra_post_est_commands:
+        if isinstance(extra_post_est_commands, str):
+            extra_post_est_commands = [extra_post_est_commands]
+        paup_args["post_est_commands"] = ";\n".join(extra_post_est_commands)
+    else:
+        paup_args["post_est_commands"] = ""
+    paup_template = """\
+    set warnreset=no;
+    exe %(datafile)s;
+    """
+    if tree_est_criterion.startswith("like"):
+        paup_template += """\
+    lset tratio=estimate rmatrix=estimate nst=%(nst)s basefreq=%(basefreq)s rates=%(rates)s shape=estimate pinvar=%(pinvar)s userbrlens=yes;
+    """
+    if tree_est_criterion not in ["nj", "upgma"] :
+        paup_template += """\
+        set crit=%s;
+        """ % tree_est_criterion
+    paup_template += """\
+    %(pre_est_commands)s;
+    """
+
+    if tree_est_criterion in ["nj", "upgma"] :
+        paup_template += tree_est_criterion + ";"
+    else:
+        paup_template += "hsearch;"
+
+    paup_template += """\
+    %(post_est_commands)s;
+    savetrees file=%(est_tree_file)s format=nexus root=yes brlens=yes taxablk=yes maxdecimals=20;
+    """
+    paup_run = subprocess.Popen(['%s -n' % paup_path],
+                                shell=True,
+                                stdin=subprocess.PIPE,
+                                stdout=subprocess.PIPE)
+    stdout, stderr = processio.communicate(paup_run, paup_template % paup_args)
+    t = dendropy.Tree.get_from_path(output_tree_filepath, "nexus", taxon_namespace=char_matrix.taxon_namespace)
+    cf.close()
+    output_tree_file_handle.close()
+    return t
+
+def estimate_model(char_matrix,
+                    tree_model=None,
+                    num_states=6,
+                    unequal_base_freqs=True,
+                    gamma_rates=True,
+                    prop_invar=True,
+                    tree_est_criterion="likelihood",
+                    tree_user_brlens=True,
+                    paup_path='paup'):
+    """
+    Given a dataset, ``char_matrix``, uses client-supplied tree or estimates a
+    tree, and character substitution model for the data.
+    Returns a tuple, consisting of a trees block with the tree(s) used for the
+    estimated character model, and a dictionary with estimates of rates, kappa,
+    base_frequencies, alpha, prop_invar, etc. as well as likelihood.
+    """
+    paup_args = {
+        'nst': num_states,
+        'basefreq' : unequal_base_freqs and 'estimate' or 'equal',
+        'rates' : gamma_rates and 'gamma' or 'equal',
+        'pinvar' : prop_invar and 'estimate' or '0',
+    }
+    if tree_model is not None:
+        assert tree_model.taxon_namespace is char_matrix.taxon_namespace
+        tf = tempfile.NamedTemporaryFile("w", delete=True)
+        tree_model.write_to_stream(tf, 'nexus')
+        tf.flush()
+        paup_args['tree'] = "gettrees file=%s storebrlens=yes;" % tf.name
+    else:
+        if tree_est_criterion in ["nj", "upgma"] :
+            paup_args['tree'] = tree_est_criterion
+        else:
+            paup_args['tree'] = "set crit=%s; hsearch; set crit=like;" % tree_est_criterion
+    if tree_user_brlens:
+        paup_args['userbrlens'] = 'yes'
+    else:
+        paup_args['userbrlens'] = 'no'
+
+    cf = tempfile.NamedTemporaryFile("w", delete=True)
+    char_matrix.write_to_stream(cf, schema='nexus')
+    cf.flush()
+    paup_args['datafile'] = cf.name
+    # output_tree_file_handle, output_tree_filepath = tempfile.mkstemp(text=True)
+    output_tree_file_handle = tempfile.NamedTemporaryFile("w+", delete=True)
+    output_tree_filepath = output_tree_file_handle.name
+    paup_args['est_tree_file'] = output_tree_filepath
+    paup_template = """\
+    set warnreset=no;
+    exe %(datafile)s;
+    set crit=like;
+    lset tratio=estimate rmatrix=estimate nst=%(nst)s basefreq=%(basefreq)s rates=%(rates)s shape=estimate pinvar=%(pinvar)s userbrlens=%(userbrlens)s;
+    %(tree)s;
+    lscore 1 / userbrlens=%(userbrlens)s;
+    savetrees file=%(est_tree_file)s format=nexus root=yes brlens=yes taxablk=yes maxdecimals=20;
+"""
+    paup_run = subprocess.Popen(['%s -n' % paup_path],
+                                shell=True,
+                                stdin=subprocess.PIPE,
+                                stdout=subprocess.PIPE)
+    stdout, stderr = processio.communicate(paup_run, paup_template % paup_args)
+    results = {}
+    patterns = {
+        'likelihood' : re.compile('-ln L\s+([\d\.]+)'),
+        'rAC' : re.compile('  AC\s+([\d\.]+)'),
+        'rAG' : re.compile('  AG\s+([\d\.]+)'),
+        'rAT' : re.compile('  AT\s+([\d\.]+)'),
+        'rCG' : re.compile('  CG\s+([\d\.]+)'),
+        'rCT' : re.compile('  CT\s+([\d\.]+)'),
+        'rGT' : re.compile('  GT\s+([\d\.]+)'),
+        'kappa': re.compile('  kappa\s+([\d\.]+)'),
+        'prop_invar' : re.compile('P_inv\s+([\d\.]+)'),
+        'alpha' : re.compile('Shape\s+([\S]+)'),
+        'pA' : re.compile('  A\s+([\d\.]+)'),
+        'pC' : re.compile('  C\s+([\d\.]+)'),
+        'pG' : re.compile('  G\s+([\d\.]+)'),
+        'pT' : re.compile('  T\s+([\d\.]+)'),
+    }
+    for value_name in patterns:
+        results[value_name] = None
+    for line in stdout.split('\n'):
+        for value_name in patterns:
+            m = patterns[value_name].match(line)
+            if m:
+                results[value_name] = m.group(1)
+    for value_name in results.keys():
+        if value_name == 'likelihood':
+            results[value_name] = -1 * float(results[value_name])
+            results["log_likelihood"] = results[value_name]
+        elif results[value_name] is not None:
+            try:
+                results[value_name] = float(results[value_name])
+            except:
+                pass
+    t = dendropy.Tree.get_from_path(output_tree_filepath, "nexus", taxon_namespace=char_matrix.taxon_namespace)
+    cf.close()
+    output_tree_file_handle.close()
+    return t, results
+
+def prune_taxa_from_trees(trees, taxa, paup_path='paup'):
+    """
+    Drops Taxon objects given in container ``taxa`` from TreeList ``trees``
+    """
+    tf = tempfile.NamedTemporaryFile("w", delete=True)
+    trees.write_to_stream(tf, schema='nexus')
+    tf.flush()
+    output_tree_file_handle = tempfile.NamedTemporaryFile("w+", delete=True)
+    output_tree_filepath = output_tree_file_handle.name
+    tax_idxs = [ str(trees.taxon_namespace.index(t)+1) for t in taxa ]
+    tax_idxs = " ".join(tax_idxs)
+    paup_template = """\
+    set warnreset=no;
+    exe %s;
+    gett file=%s storebrlens=yes;
+    delete %s / prune;
+    savetrees file=%s format=nexus brlens=user taxablk=yes maxdecimals=20;
+    """ % (tf.name,
+            tf.name,
+            tax_idxs,
+            output_tree_filepath)
+    paup_run = subprocess.Popen(['%s -n' % paup_path],
+                                shell=True,
+                                stdin=subprocess.PIPE,
+                                stdout=subprocess.PIPE)
+    stdout, stderr = processio.communicate(paup_run, paup_template)
+    t = dendropy.TreeList.get_from_path(output_tree_filepath,
+            "nexus",
+            taxon_namespace=trees.taxon_namespace)
+    output_tree_file_handle.close()
+    return t
+
+###############################################################################
+## PAUP* WRAPPERS
+
+class PaupSession(processio.Session):
+    """
+    Starts a PAUP* session, which remains active until explicitly closed.
+    Various commands can get executed and results returned.
+    """
+
+    EOC_FLAG = "@@@END-OF-COMMAND@@@"
+    FLAG_DETECT = re.compile(r'^\s*%s\s*$' % EOC_FLAG, re.MULTILINE)
+    EOC_FLAG_STRIP = re.compile(r"^(paup>)*\s*(\[!)*" + EOC_FLAG + "(\])*\s*$", re.MULTILINE)
+    # FLAG_DETECT = re.compile(r'[^\[]\s*%s\s*[^\]]' % EOC_FLAG, re.MULTILINE)
+
+    def __init__(self, paup_path=None):
+        processio.Session.__init__(self, join_err_to_out=False)
+        if paup_path is None:
+            self.paup_path = PAUP_PATH
+        else:
+            self.paup_path = paup_path
+        self.start([self.paup_path])
+
+    def __del__(self):
+        self.stop()
+
+    def stop(self):
+        if self.process:
+            try:
+                self.process.terminate()
+            except:
+                pass
+        self.process = None
+
+    def send_command(self, command):
+        command = command + ";\n"
+        command = command + "[!" + self.EOC_FLAG + "]\n"
+        self.process.stdin.write(command)
+        self.process.stdin.flush()
+        stdout_block = ""
+        while True:
+            stdout = self._stdout_reader.read()
+            if stdout is not None:
+                stdout_block = stdout_block + stdout
+            if self.FLAG_DETECT.search(stdout_block):
+                stdout_block = self.EOC_FLAG_STRIP.sub("", stdout_block)
+                break
+            # else:
+            #     print stdout_block
+        stderr_block = ""
+        while True:
+                stderr = self._stderr_reader.read()
+                if stderr is not None:
+                    stderr_block += stderr
+                else:
+                    break
+        return stdout_block, stderr_block
+
+    def execute_file(self, filepath):
+        return self.send_command("set warnreset=no; execute %s;\n" % filepath)
+
+    def read_data(self, data):
+        """
+        Writes ``data`` as NEXUS-formatted file and
+        executes file within processio.
+        """
+        cf = tempfile.NamedTemporaryFile("w", delete=True)
+        data.write_to_stream(cf, "nexus")
+        cf.flush()
+        stdout, stderr = self.execute_file(cf.name)
+        return stdout, stderr
+
diff --git a/dendropy/interop/raxml.py b/dendropy/interop/raxml.py
new file mode 100644
index 0000000..9e7ad8a
--- /dev/null
+++ b/dendropy/interop/raxml.py
@@ -0,0 +1,678 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Wrapper around calls to RAxML.
+"""
+
+import sys
+import os
+import subprocess
+import tempfile
+import dendropy
+import random
+
+from dendropy.utility.messaging import ConsoleMessenger
+from dendropy.utility import processio
+
+def get_messenger(verbosity=1):
+    if verbosity == 0:
+        messaging_level = ConsoleMessenger.ERROR_MESSAGING_LEVEL
+    else:
+        messaging_level = ConsoleMessenger.INFO_MESSAGING_LEVEL
+    messenger = ConsoleMessenger(name="raxml-map-bipartitions",
+                    messaging_level=messaging_level)
+    return messenger
+
+###############################################################################
+## RAXML WRAPPER
+# raxmlHPC[-SSE3|-PTHREADS|-PTHREADS-SSE3|-HYBRID|-HYBRID-SSE3]
+#   -s sequenceFileName -n outputFileName -m substitutionModel
+#   [-a weightFileName] [-A secondaryStructureSubstModel]
+#   [-b bootstrapRandomNumberSeed] [-B wcCriterionThreshold]
+#   [-c numberOfCategories] [-C] [-d] [-D]
+#   [-e likelihoodEpsilon] [-E excludeFileName]
+#   [-f a|A|b|c|d|e|E|F|g|h|i|I|j|J|m|n|o|p|r|s|S|t|u|v|w|x|y] [-F]
+#   [-g groupingFileName] [-G placementThreshold] [-h]
+#   [-i initialRearrangementSetting] [-I autoFC|autoMR|autoMRE|autoMRE_IGN]
+#   [-j] [-J MR|MR_DROP|MRE|STRICT|STRICT_DROP] [-k] [-K] [-M]
+#   [-o outGroupName1[,outGroupName2[,...]]]
+#   [-p parsimonyRandomSeed] [-P proteinModel]
+#   [-q multipleModelFileName] [-r binaryConstraintTree]
+#   [-R binaryModelParamFile] [-S secondaryStructureFile] [-t userStartingTree]
+#   [-T numberOfThreads] [-U] [-v] [-w outputDirectory] [-W slidingWindowSize]
+#   [-x rapidBootstrapRandomNumberSeed] [-X] [-y]
+#   [-z multipleTreesFile] [-#|-N numberOfRuns|autoFC|autoMR|autoMRE|autoMRE_IGN]
+#   -a      Specify a column weight file name to assign individual weights to each column of
+#           the alignment. Those weights must be integers separated by any type and number
+#           of whitespaces whithin a separate file, see file "example_weights" for an example.
+#   -A      Specify one of the secondary structure substitution models implemented in RAxML.
+#           The same nomenclature as in the PHASE manual is used, available models:
+#           S6A, S6B, S6C, S6D, S6E, S7A, S7B, S7C, S7D, S7E, S7F, S16, S16A, S16B
+#           DEFAULT: 16-state GTR model (S16)
+#   -b      Specify an integer number (random seed) and turn on bootstrapping
+#           DEFAULT: OFF
+#   -B      specify a floating point number between 0.0 and 1.0 that will be used as cutoff threshold
+#           for the MR-based bootstopping criteria. The recommended setting is 0.03.
+#           DEFAULT: 0.03 (recommended empirically determined setting)
+#   -c      Specify number of distinct rate catgories for RAxML when modelOfEvolution
+#           is set to GTRCAT or GTRMIX
+#           Individual per-site rates are categorized into numberOfCategories rate
+#           categories to accelerate computations.
+#           DEFAULT: 25
+#   -C      Conduct model parameter optimization on gappy, partitioned multi-gene alignments with per-partition
+#           branch length estimates (-M enabled) using the fast method with pointer meshes described in:
+#           Stamatakis and Ott: "Efficient computation of the phylogenetic likelihood function on multi-gene alignments and multi-core processors"
+#           WARNING: We can not conduct useful tree searches using this method yet! Does not work with Pthreads version.
+#   -d      start ML optimization from random starting tree
+#           DEFAULT: OFF
+#   -D      ML search convergence criterion. This will break off ML searches if the relative
+#           Robinson-Foulds distance between the trees obtained from two consecutive lazy SPR cycles
+#           is smaller or equal to 1%. Usage recommended for very large datasets in terms of taxa.
+#           On trees with more than 500 taxa this will yield execution time improvements of approximately 50%
+#           While yielding only slightly worse trees.
+#           DEFAULT: OFF
+#   -e      set model optimization precision in log likelihood units for final
+#           optimization of tree topology under MIX/MIXI or GAMMA/GAMMAI
+#           DEFAULT: 0.1   for models not using proportion of invariant sites estimate
+#                    0.001 for models using proportion of invariant sites estimate
+#   -E      specify an exclude file name, that contains a specification of alignment positions you wish to exclude.
+#           Format is similar to Nexus, the file shall contain entries like "100-200 300-400", to exclude a
+#           single column write, e.g., "100-100", if you use a mixed model, an appropriatly adapted model file
+#           will be written.
+#   -f      select algorithm:
+#           "-f a": rapid Bootstrap analysis and search for best-scoring ML tree in one program run
+#           "-f A": compute marginal ancestral states on a ROOTED reference tree provided with "t"
+#           "-f b": draw bipartition information on a tree provided with "-t" based on multiple trees
+#                   (e.g., from a bootstrap) in a file specifed by "-z"
+#           "-f c": check if the alignment can be properly read by RAxML
+#           "-f d": new rapid hill-climbing
+#                   DEFAULT: ON
+#           "-f e": optimize model+branch lengths for given input tree under GAMMA/GAMMAI only
+#           "-f E": execute very fast experimental tree search, at present only for testing
+#           "-f F": execute fast experimental tree search, at present only for testing
+#           "-f g": compute per site log Likelihoods for one ore more trees passed via
+#                   "-z" and write them to a file that can be read by CONSEL
+#           "-f h": compute log likelihood test (SH-test) between best tree passed via "-t"
+#                   and a bunch of other trees passed via "-z"
+#           "-f i": EXPERIMENTAL do not use for real tree inferences: conducts a single cycle of fast lazy SPR moves
+#                   on a given input tree, to be used in combination with -C and -M
+#           "-f I": EXPERIMENTAL do not use for real tree inferences: conducts a single cycle of thorough lazy SPR moves
+#                   on a given input tree, to be used in combination with -C and -M
+#           "-f j": generate a bunch of bootstrapped alignment files from an original alignemnt file.
+#                   You need to specify a seed with "-b" and the number of replicates with "-#"
+#           "-f J": Compute SH-like support values on a given tree passed via "-t".
+#           "-f m": compare bipartitions between two bunches of trees passed via "-t" and "-z"
+#                   respectively. This will return the Pearson correlation between all bipartitions found
+#                   in the two tree files. A file called RAxML_bipartitionFrequencies.outpuFileName
+#                   will be printed that contains the pair-wise bipartition frequencies of the two sets
+#           "-f n": compute the log likelihood score of all trees contained in a tree file provided by
+#                   "-z" under GAMMA or GAMMA+P-Invar
+#           "-f o": old and slower rapid hill-climbing without heuristic cutoff
+#           "-f p": perform pure stepwise MP addition of new sequences to an incomplete starting tree and exit
+#           "-f r": compute pairwise Robinson-Foulds (RF) distances between all pairs of trees in a tree file passed via "-z"
+#                   if the trees have node labales represented as integer support values the program will also compute two flavors of
+#                   the weighted Robinson-Foulds (WRF) distance
+#           "-f s": split up a multi-gene partitioned alignment into the respective subalignments
+#           "-f S": compute site-specific placement bias using a leave one out test inspired by the evolutionary placement algorithm
+#           "-f t": do randomized tree searches on one fixed starting tree
+#           "-f u": execute morphological weight calibration using maximum likelihood, this will return a weight vector.
+#                   you need to provide a morphological alignment and a reference tree via "-t"
+#           "-f v": classify a bunch of environmental sequences into a reference tree using thorough read insertions
+#                   you will need to start RAxML with a non-comprehensive reference tree and an alignment containing all sequences (reference + query)
+#           "-f w": compute ELW test on a bunch of trees passed via "-z"
+#           "-f x": compute pair-wise ML distances, ML model parameters will be estimated on an MP
+#                   starting tree or a user-defined tree passed via "-t", only allowed for GAMMA-based
+#                   models of rate heterogeneity
+#           "-f y": classify a bunch of environmental sequences into a reference tree using parsimony
+#                   you will need to start RAxML with a non-comprehensive reference tree and an alignment containing all sequences (reference + query)
+#           DEFAULT for "-f": new rapid hill climbing
+#   -F      enable ML tree searches under CAT model for very large trees without switching to
+#           GAMMA in the end (saves memory).
+#           This option can also be used with the GAMMA models in order to avoid the thorough optimization
+#           of the best-scoring ML tree in the end.
+#           DEFAULT: OFF
+#   -g      specify the file name of a multifurcating constraint tree
+#           this tree does not need to be comprehensive, i.e. must not contain all taxa
+#   -G      enable the ML-based evolutionary placement algorithm heuristics
+#           by specifiyng a threshold value (fraction of insertion branches to be evaluated
+#           using slow insertions under ML).
+#   -h      Display this help message.
+#   -i      Initial rearrangement setting for the subsequent application of topological
+#           changes phase
+#   -I      a posteriori bootstopping analysis. Use:
+#          "-I autoFC" for the frequency-based criterion
+#          "-I autoMR" for the majority-rule consensus tree criterion
+#          "-I autoMRE" for the extended majority-rule consensus tree criterion
+#          "-I autoMRE_IGN" for metrics similar to MRE, but include bipartitions under the threshold whether they are compatible
+#                           or not. This emulates MRE but is faster to compute.
+#           You also need to pass a tree file containg several bootstrap replicates via "-z"
+#   -j      Specifies that intermediate tree files shall be written to file during the standard ML and BS tree searches.
+#           DEFAULT: OFF
+#   -J      Compute majority rule consensus tree with "-J MR" or extended majority rule consensus tree with "-J MRE"
+#           or strict consensus tree with "-J STRICT".
+#           Options "-J STRICT_DROP" and "-J MR_DROP" will execute an algorithm that identifies dropsets which contain
+#           rogue taxa as proposed by Pattengale et al. in the paper "Uncovering hidden phylogenetic consensus".
+#           You will also need to provide a tree file containing several UNROOTED trees via "-z"
+#   -k      Specifies that bootstrapped trees should be printed with branch lengths.
+#           The bootstraps will run a bit longer, because model parameters will be optimized
+#           at the end of each run under GAMMA or GAMMA+P-Invar respectively.
+#           DEFAULT: OFF
+#   -K      Specify one of the multi-state substitution models (max 32 states) implemented in RAxML.
+#           Available models are: ORDERED, MK, GTR
+#           DEFAULT: GTR model
+#   -m      Model of Binary (Morphological), Nucleotide, Multi-State, or Amino Acid Substitution:
+#           BINARY:
+#             "-m BINCAT"         : Optimization of site-specific
+#                                   evolutionary rates which are categorized into numberOfCategories distinct
+#                                   rate categories for greater computational efficiency. Final tree might be evaluated
+#                                   automatically under BINGAMMA, depending on the tree search option
+#             "-m BINCATI"        : Optimization of site-specific
+#                                   evolutionary rates which are categorized into numberOfCategories distinct
+#                                   rate categories for greater computational efficiency. Final tree might be evaluated
+#                                   automatically under BINGAMMAI, depending on the tree search option
+#             "-m BINGAMMA"       : GAMMA model of rate
+#                                   heterogeneity (alpha parameter will be estimated)
+#             "-m BINGAMMAI"      : Same as BINGAMMA, but with estimate of proportion of invariable sites
+#           NUCLEOTIDES:
+#             "-m GTRCAT"         : GTR + Optimization of substitution rates + Optimization of site-specific
+#                                   evolutionary rates which are categorized into numberOfCategories distinct
+#                                   rate categories for greater computational efficiency.  Final tree might be evaluated
+#                                   under GTRGAMMA, depending on the tree search option
+#             "-m GTRCATI"        : GTR + Optimization of substitution rates + Optimization of site-specific
+#                                   evolutionary rates which are categorized into numberOfCategories distinct
+#                                   rate categories for greater computational efficiency.  Final tree might be evaluated
+#                                   under GTRGAMMAI, depending on the tree search option
+#             "-m GTRGAMMA"       : GTR + Optimization of substitution rates + GAMMA model of rate
+#                                   heterogeneity (alpha parameter will be estimated)
+#             "-m GTRGAMMAI"      : Same as GTRGAMMA, but with estimate of proportion of invariable sites
+#           MULTI-STATE:
+#             "-m MULTICAT"         : Optimization of site-specific
+#                                   evolutionary rates which are categorized into numberOfCategories distinct
+#                                   rate categories for greater computational efficiency. Final tree might be evaluated
+#                                   automatically under MULTIGAMMA, depending on the tree search option
+#             "-m MULTICATI"        : Optimization of site-specific
+#                                   evolutionary rates which are categorized into numberOfCategories distinct
+#                                   rate categories for greater computational efficiency. Final tree might be evaluated
+#                                   automatically under MULTIGAMMAI, depending on the tree search option
+#             "-m MULTIGAMMA"       : GAMMA model of rate
+#                                   heterogeneity (alpha parameter will be estimated)
+#             "-m MULTIGAMMAI"      : Same as MULTIGAMMA, but with estimate of proportion of invariable sites
+#             You can use up to 32 distinct character states to encode multi-state regions, they must be used in the following order:
+#             0, 1, 2, 3, 4, 5, 6, 7, 8, 9, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V
+#             i.e., if you have 6 distinct character states you would use 0, 1, 2, 3, 4, 5 to encode these.
+#             The substitution model for the multi-state regions can be selected via the "-K" option
+#           AMINO ACIDS:
+#             "-m PROTCATmatrixName[F]"         : specified AA matrix + Optimization of substitution rates + Optimization of site-specific
+#                                                 evolutionary rates which are categorized into numberOfCategories distinct
+#                                                 rate categories for greater computational efficiency.   Final tree might be evaluated
+#                                                 automatically under PROTGAMMAmatrixName[f], depending on the tree search option
+#             "-m PROTCATImatrixName[F]"        : specified AA matrix + Optimization of substitution rates + Optimization of site-specific
+#                                                 evolutionary rates which are categorized into numberOfCategories distinct
+#                                                 rate categories for greater computational efficiency.   Final tree might be evaluated
+#                                                 automatically under PROTGAMMAImatrixName[f], depending on the tree search option
+#             "-m PROTGAMMAmatrixName[F]"       : specified AA matrix + Optimization of substitution rates + GAMMA model of rate
+#                                                 heterogeneity (alpha parameter will be estimated)
+#             "-m PROTGAMMAImatrixName[F]"      : Same as PROTGAMMAmatrixName[F], but with estimate of proportion of invariable sites
+#             Available AA substitution models: DAYHOFF, DCMUT, JTT, MTREV, WAG, RTREV, CPREV, VT, BLOSUM62, MTMAM, LG, MTART, MTZOA, PMB, HIVB, HIVW, JTTDCMUT, FLU, GTR
+#             With the optional "F" appendix you can specify if you want to use empirical base frequencies
+#             Please note that for mixed models you can in addition specify the per-gene AA model in
+#             the mixed model file (see manual for details). Also note that if you estimate AA GTR parameters on a partitioned
+#             dataset, they will be linked (estimated jointly) across all partitions to avoid over-parametrization
+#   -M      Switch on estimation of individual per-partition branch lengths. Only has effect when used in combination with "-q"
+#           Branch lengths for individual partitions will be printed to separate files
+#           A weighted average of the branch lengths is computed by using the respective partition lengths
+#           DEFAULT: OFF
+#   -n      Specifies the name of the output file.
+#   -o      Specify the name of a single outgrpoup or a comma-separated list of outgroups, eg "-o Rat"
+#           or "-o Rat,Mouse", in case that multiple outgroups are not monophyletic the first name
+#           in the list will be selected as outgroup, don't leave spaces between taxon names!
+#   -p      Specify a random number seed for the parsimony inferences. This allows you to reproduce your results
+#           and will help me debug the program.
+#   -P      Specify the file name of a user-defined AA (Protein) substitution model. This file must contain
+#           420 entries, the first 400 being the AA substitution rates (this must be a symmetric matrix) and the
+#           last 20 are the empirical base frequencies
+#   -q      Specify the file name which contains the assignment of models to alignment
+#           partitions for multiple models of substitution. For the syntax of this file
+#           please consult the manual.
+#   -r      Specify the file name of a binary constraint tree.
+#           this tree does not need to be comprehensive, i.e. must not contain all taxa
+#   -R      Specify the file name of a binary model parameter file that has previously been generated
+#           with RAxML using the -f e tree evaluation option. The file name should be:
+#           RAxML_binaryModelParameters.runID
+#   -s      Specify the name of the alignment data file in PHYLIP format
+#   -S      Specify the name of a secondary structure file. The file can contain "." for
+#           alignment columns that do not form part of a stem and characters "()<>[]{}" to define
+#           stem regions and pseudoknots
+#   -t      Specify a user starting tree file name in Newick format
+#   -T      PTHREADS VERSION ONLY! Specify the number of threads you want to run.
+#           Make sure to set "-T" to at most the number of CPUs you have on your machine,
+#           otherwise, there will be a huge performance decrease!
+#   -U      Try to save memory by using SEV-based implementation for gap columns on large gappy alignments
+#           WARNING: this will only work for DNA under GTRGAMMA and is still in an experimental state.
+#   -v      Display version information
+#   -w      FULL (!) path to the directory into which RAxML shall write its output files
+#           DEFAULT: current directory
+#   -W      Sliding window size for leave-one-out site-specific placement bias algorithm
+#           only effective when used in combination with "-f S"
+#           DEFAULT: 100 sites
+#   -x      Specify an integer number (random seed) and turn on rapid bootstrapping
+#           CAUTION: unlike in version 7.0.4 RAxML will conduct rapid BS replicates under
+#           the model of rate heterogeneity you specified via "-m" and not by default under CAT
+#   -X      EXPERIMENTAL OPTION: This option will do a per-site estimate of protein substitution models
+#           by looping over all given, fixed models LG, WAG, JTT, etc and using their respective base frequencies to independently
+#           assign a prot subst. model to each site via ML optimization
+#           At present this option only works with the GTR+GAMMA model, unpartitioned datasets, and in the sequential
+#           version only.
+#           DEFAULT: OFF
+#   -y      If you want to only compute a parsimony starting tree with RAxML specify "-y",
+#           the program will exit after computation of the starting tree
+#           DEFAULT: OFF
+#   -z      Specify the file name of a file containing multiple trees e.g. from a bootstrap
+#           that shall be used to draw bipartition values onto a tree provided with "-t",
+#           It can also be used to compute per site log likelihoods in combination with "-f g"
+#           and to read a bunch of trees for a couple of other options ("-f h", "-f m", "-f n").
+#   -#|-N   Specify the number of alternative runs on distinct starting trees
+#           In combination with the "-b" option, this will invoke a multiple boostrap analysis
+#           Note that "-N" has been added as an alternative since "-#" sometimes caused problems
+#           with certain MPI job submission systems, since "-#" is often used to start comments.
+#           If you want to use the bootstopping criteria specify "-# autoMR" or "-# autoMRE" or "-# autoMRE_IGN"
+#           for the majority-rule tree based criteria (see -I option) or "-# autoFC" for the frequency-based criterion.
+#           Bootstopping will only work in combination with "-x" or "-b"
+#           DEFAULT: 1 single analysis
+
+class RaxmlRunner(object):
+
+    def __init__(self,
+            working_dir_path=None,
+            replace=None,
+            postclean=None,
+            name=None,
+            verbosity=1,
+            raxml_path="raxmlHPC"):
+        self.dirs_to_clean = []
+        self.files_to_clean = []
+        if working_dir_path is None:
+            self.working_dir_path = None
+            self.replace = False
+            self.postclean = postclean if postclean is not None else True
+        else:
+            self.working_dir_path = working_dir_path
+            self.replace = replace if replace is not None else True
+            self.postclean = postclean if postclean is not None else False
+        self._name = name
+        self.verbosity = verbosity
+        self.messenger = get_messenger(self.verbosity)
+        self.input_format = "nexus"
+        self.output_format = "nexus"
+        self.raxml_path = raxml_path
+        self.taxon_label_map = {}
+
+    @property
+    def name(self):
+        if self._name is None:
+            self._name = "dendropy_raxml"
+        return self._name
+
+    def _compose_fname(self, s):
+        return "RAxML_%s.%s" % (s, self.name)
+
+    @property
+    def input_seq_fname(self):
+        return "%s.seqs" % self.name
+
+    @property
+    def best_tree_fname(self):
+        return self._compose_fname("bestTree")
+
+    @property
+    def bipartitions_fname(self):
+        return self._compose_fname("bipartitions")
+
+    @property
+    def info_fname(self):
+        return self._compose_fname("info")
+
+    @property
+    def log_fname(self):
+        return self._compose_fname("log")
+
+    @property
+    def parsimony_tree_fname(self):
+        return self._compose_fname("parsimonyTree")
+
+    @property
+    def result_fname(self):
+        return self._compose_fname("result")
+
+    def _raxml_output_filenames(self):
+        return [self.input_seq_fname,
+                self.best_tree_fname,
+                self.bipartitions_fname,
+                self.info_fname,
+                self.log_fname,
+                self.parsimony_tree_fname,
+                self.result_fname]
+
+    def _raxml_output_filepaths(self):
+        return [os.path.join(self.working_dir_path, fp) for fp in self._raxml_output_filenames()]
+
+    def _get_trees(self, tree_filepath, tree_list=None, **kwargs):
+        if tree_list is None:
+            tree_list = dendropy.TreeList()
+        tree_list.read_from_path(tree_filepath,
+                self.input_format,
+                **kwargs)
+        return tree_list
+
+    def _expand_path(self, path):
+        return os.path.expanduser(os.path.expandvars(path))
+
+    def _check_overwrite(self, path):
+        if os.path.exists(path) and not self.replace:
+            ok = input("Overwrite existing file '{}'? (y/n/all [n])? ".format(path))
+            if not ok:
+                return False
+            ok = ok[0].lower()
+            if ok == "a":
+                self.replace = True
+                return True
+            if ok == "y":
+                return True
+            return False
+        else:
+            return True
+
+    # def _send_info(self, msg):
+    #     self.messenger.send_info(msg, wrap=False)
+
+    # def _send_warning(self, msg):
+    #     self.messenger.send_warning(msg, wrap=False)
+
+    # def _send_error(self, msg):
+    #     self.messenger.send_info(msg, wrap=False)
+
+    def _write_dummy_seqs(self, taxon_namespace, out):
+        nchar = 19
+        out.write("{} {}\n".format(len(taxon_namespace), nchar))
+        bases = ["A", "C", "G", "T"]
+        for idx, taxon in enumerate(taxon_namespace):
+            base_seq = [random.choice(bases) for x in range(nchar)]
+            out.write("{}    {}\n".format(taxon.label, "".join(base_seq)))
+
+    def _remap_taxon_labels(self, taxa):
+        for idx, taxon in enumerate(taxa):
+            label = "T{}".format(idx)
+            self.taxon_label_map[label] = taxon.label
+            taxon.label = label
+
+    def _create_working_dir(self):
+        if self.working_dir_path is None:
+            self.working_dir_path = tempfile.mkdtemp()
+            self.dirs_to_clean.append(self.working_dir_path)
+        if not os.path.exists(self.working_dir_path):
+            # self._send_info("Creating work directory: {}".format(self.working_dir_path))
+            os.makedirs(self.working_dir_path)
+
+    def _clean_working_files(self):
+        for fpath in self._raxml_output_filepaths():
+            if not self._check_overwrite(fpath):
+                sys.exit(0)
+            if os.path.exists(fpath):
+                os.remove(fpath)
+
+    def _preclean_working_dir(self):
+        self._clean_working_files()
+
+    def _postclean_working_dir(self):
+        if self.postclean:
+            # self._send_info("Cleaning up run files")
+            for fpath in self.files_to_clean:
+                # self._send_info("Deleting file: {}".format(fpath))
+                try:
+                    os.remove(fpath)
+                except OSError as e:
+                    pass
+            for dir_path in self.dirs_to_clean:
+                # self._send_info("Deleting directory: {}".format(dir_path))
+                try:
+                    os.rmdir(dir_path)
+                except OSError as e:
+                    pass
+
+    def estimate_tree(self,
+            char_matrix,
+            raxml_args=None):
+
+        # set up taxa
+        taxa = char_matrix.taxon_namespace
+
+        # create working directory
+        self._create_working_dir()
+
+        # remap taxon labels
+        self.taxon_label_map = {}
+        self._remap_taxon_labels(taxa)
+
+        # clean working directory of previous runs
+        self._preclean_working_dir()
+
+        # write input sequences
+        raxml_seqs_filepath = os.path.join(self.working_dir_path, self.input_seq_fname)
+        # self._send_info("Creating RAxML dummy sequences file: {}".format(raxml_seqs_filepath))
+        # if not self._check_overwrite(raxml_seqs_filepath):
+        #     sys.exit(0)
+        raxml_seqs_filepath_out = open(raxml_seqs_filepath, "w")
+        char_matrix.write_to_stream(raxml_seqs_filepath_out, "phylip")
+        raxml_seqs_filepath_out.flush()
+        raxml_seqs_filepath_out.close()
+        self.files_to_clean.append(raxml_seqs_filepath)
+        self.files_to_clean.append(raxml_seqs_filepath + ".reduced")
+
+        # run RAxML
+        if raxml_args is None:
+            raxml_args = []
+        cmd = [self.raxml_path,
+                '-m',
+                'GTRCAT',
+                '-s', raxml_seqs_filepath,
+                '-n', self.name,
+                '-p', str(random.randint(0, sys.maxsize))] + raxml_args
+        # self._send_info("Executing: {}".format(" ".join(cmd)))
+        if self.verbosity >= 2:
+            stdout_pipe = None
+            stderr_pipe = None
+        else:
+            stdout_pipe = subprocess.PIPE
+            stderr_pipe = subprocess.PIPE
+        p = subprocess.Popen(cmd,
+            stdout=stdout_pipe,
+            stderr=stderr_pipe,
+            cwd=self.working_dir_path)
+        stdout, stderr = processio.communicate(p)
+        if p.returncode != 0:
+            sys.stderr.write("[RAxML run failed]:\n\n%s\n\n" % (" ".join(cmd)))
+            sys.stdout.write(stdout)
+            sys.stderr.write(stderr)
+            sys.exit(p.returncode)
+
+        # # read result
+        raxml_best_tree_fpath = os.path.join(self.working_dir_path, self.best_tree_fname)
+        if not os.path.exists(raxml_best_tree_fpath):
+            self._send_error("RAxML result not found: {}".format(raxml_best_tree_fpath))
+            sys.exit(1)
+        best_tree = dendropy.Tree.get_from_path(raxml_best_tree_fpath,
+                "newick",
+                taxon_namespace=taxa)
+
+        # remap labels
+        for taxon in best_tree.taxon_namespace:
+            taxon.label = self.taxon_label_map[taxon.label]
+
+        # # write results
+        # mapped_tree.write_to_stream(self.output_dest, self.output_format)
+
+        # clean-up
+        self._postclean_working_dir()
+
+        # # return result
+        return best_tree
+
+    def map_bipartitions(self, target_tree_fpath, bootstrap_trees_fpaths):
+
+        # set up taxa
+        taxa = dendropy.TaxonNamespace()
+        taxon_label_map = {}
+
+        # read target tree
+        target_tree_fpath = self._expand_path(target_tree_fpath)
+        # self._send_info("Reading target tree file: {}".format(target_tree_fpath))
+        target_tree = self._get_trees(target_tree_fpath, taxon_namespace=taxa)[0]
+
+        # read boostrap trees
+        boot_trees = dendropy.TreeList()
+        for fpath in bootstrap_trees_fpaths:
+            fpath = self._expand_path(fpath)
+            # self._send_info("Reading bootstrap tree file: {}".format(fpath))
+            self._get_trees(tree_filepath=fpath, tree_list=boot_trees, taxon_namespace=taxa)
+        # self._send_info("Read: {} taxa, {} bootstrap trees".format(len(taxa), len(boot_trees)))
+
+        # create working directory
+        self._create_working_dir()
+
+        # remap taxon labels
+        self.taxon_label_map = {}
+        self._remap_taxon_labels(taxa)
+
+        # write input target tree
+        raxml_target_tree_filepath = os.path.join(self.working_dir_path, "{}.target_tree".format(self.name))
+        # self._send_info("Creating RAxML target tree file: {}".format(raxml_target_tree_filepath))
+        if not self._check_overwrite(raxml_target_tree_filepath):
+            sys.exit(0)
+        target_tree.write_to_path(raxml_target_tree_filepath, "newick")
+        self.files_to_clean.append(raxml_target_tree_filepath)
+
+        # write input bootstrap trees
+        raxml_bootstrap_trees_filepath = os.path.join(self.working_dir_path, "{}.boot_trees".format(self.name))
+        # self._send_info("Creating RAxML bootstrap tree file: {}".format(raxml_bootstrap_trees_filepath))
+        if not self._check_overwrite(raxml_bootstrap_trees_filepath):
+            sys.exit(0)
+        boot_trees.write_to_path(raxml_bootstrap_trees_filepath, "newick")
+        self.files_to_clean.append(raxml_bootstrap_trees_filepath)
+
+        # write input (dummy) sequences
+        raxml_seqs_filepath = os.path.join(self.working_dir_path, "{}.seqs".format(self.name))
+        # self._send_info("Creating RAxML dummy sequences file: {}".format(raxml_seqs_filepath))
+        if not self._check_overwrite(raxml_seqs_filepath):
+            sys.exit(0)
+        raxml_seqs_filepath_out = open(raxml_seqs_filepath, "w")
+        self._write_dummy_seqs(taxa, raxml_seqs_filepath_out)
+        raxml_seqs_filepath_out.flush()
+        raxml_seqs_filepath_out.close()
+        self.files_to_clean.append(raxml_seqs_filepath)
+
+        # clean working directory of previous runs
+        self._preclean_working_dir()
+
+        # run RAxML
+        cmd = [self.raxml_path, '-f', 'b',
+                '-t', os.path.basename(raxml_target_tree_filepath),
+                '-z', os.path.basename(raxml_bootstrap_trees_filepath),
+                '-s', os.path.basename(raxml_seqs_filepath),
+                '-m', 'GTRCAT',
+                '-n', self.name]
+        # self._send_info("Executing: {}".format(" ".join(cmd)))
+        if self.verbosity >= 2:
+            stdout_pipe = None
+            stderr_pipe = None
+        else:
+            stdout_pipe = subprocess.PIPE
+            stderr_pipe = subprocess.PIPE
+        p = subprocess.Popen(cmd,
+            stdout=stdout_pipe,
+            stderr=stderr_pipe,
+            cwd=self.working_dir_path)
+        stdout, stderr = processio.communicate(p)
+        if p.returncode != 0:
+            self._send_error("RAxML run failed")
+            if self.verbosity < 2:
+                sys.stdout.write(stdout)
+                sys.stderr.write(stderr)
+            sys.exit(p.returncode)
+
+        # read result
+        raxml_mapped_tree_fpath = os.path.join(self.working_dir_path, self.bipartitions_fname)
+        if not os.path.exists(raxml_mapped_tree_fpath):
+            self._send_error("RAxML result not found: {}".format(raxml_mapped_tree_fpath))
+            sys.exit(1)
+        mapped_tree = dendropy.Tree.get_from_path(raxml_mapped_tree_fpath, "newick")
+
+        # remap labels
+        for taxon in mapped_tree.taxon_namespace:
+            taxon.label = taxon_label_map[taxon.label]
+
+#         # write results
+#         mapped_tree.write_to_stream(self.output_dest, self.output_format)
+
+        # clean-up
+        self.files_to_clean.append(raxml_mapped_tree_fpath)
+        self.files_to_clean.append(self.info_fname)
+        self._postclean_working_dir()
+
+        # return result
+        return mapped_tree
+
+    # def estimate_tree(self,
+    #         char_matrix,
+    #         raxml_args=None):
+    #     command = [self.raxml_path] + self.raxml_args + raxml_args
+    #     for dup_cmd in ['-n', '-s']:
+    #         if dup_cmd in command:
+    #             raise Exception("Cannot specify '%s' option when running through wrapper"  % dup_cmd)
+
+    #     # if '-w' not in command:
+    #     #     work_dir = tempfile.mkdtemp()
+    #     #     clean_up_dir = True
+    #     #     command.a
+    #     #     command.extend(['-w', work_dir])
+    #     # else:
+    #     #     arg_idx = command(command.index('-w')+1)
+    #     #     work_dir = os.path.abspath(arg_idx)
+    #     #     command[arg_idx] = work_dir
+    #     #     clean_up_dir = False
+    #     work_dir = os.path.abspath('.')
+
+    #     input_data_path = os.path.join(work_dir, "input.phy")
+    #     char_matrix.write_to_path(input_data_path, "phylip")
+    #     command.extend(['-n', 'dendropy.raxml'])
+    #     command.extend(['-s', input_data_path])
+    #     run = subprocess.Popen(command,
+    #             stdin=subprocess.PIPE,
+    #             stdout=subprocess.PIPE,
+    #             stderr=subprocess.PIPE)
+    #     stdout, stderr = processio.communicate(run)
+    #     results = stdout.split("\n")
+    #     if run.returncode:
+    #         sys.stderr.write("\n*** ERROR FROM RAxML:\n")
+    #         sys.stderr.write(stdout)
+    #         sys.stderr.write("\n\n*** COMMAND SENT TO RAxML:\n    ")
+    #         sys.stderr.write(' '.join(command))
+    #         sys.stderr.write("\n")
+    #         sys.exit(1)
+    #     return results
+
diff --git a/dendropy/interop/rstats.py b/dendropy/interop/rstats.py
new file mode 100644
index 0000000..3d24aa2
--- /dev/null
+++ b/dendropy/interop/rstats.py
@@ -0,0 +1,99 @@
+#! /usr/bin/env python
+
+import os
+import sys
+import subprocess
+from dendropy.utility import error
+from dendropy.utility import metavar
+from dendropy.utility import libexec
+from dendropy.utility import processio
+
+RSCRIPT_EXECUTABLE = os.environ.get(metavar.DENDROPY_RSCRIPT_PATH_ENVAR, "Rscript")
+if RSCRIPT_EXECUTABLE == "NONE":
+    DENDROPY_RSCRIPT_INTEROPERABILITY = False
+else:
+    DENDROPY_RSCRIPT_INTEROPERABILITY = True
+rsubprocess_pipe_path = libexec.filepath("rsubprocess.R")
+
+class RService(object):
+
+    @staticmethod
+    def call(r_commands,
+            ignore_error_returncode=False,
+            cwd=None,
+            env=None,
+            rscript_path=RSCRIPT_EXECUTABLE,
+            ):
+        """
+        Executes a sequence of commans in R and returns the results.
+
+        Note that newlines ('\n') and other special characters will be
+        converted before being passed to the R interpreter, so need to
+        be escaped or entered as raw string expressions.
+
+        That is, instead of, e.g.:
+
+            returncode, stdout, stderr = RService.call([
+                "cat('hello, world\n')",
+            ])
+
+        use this:
+
+            returncode, stdout, stderr = RService.call([
+                "cat('hello, world\\n')",
+            ])
+
+        or:
+
+            returncode, stdout, stderr = RService.call([
+                r"cat('hello, world\n')",
+            ])
+
+        Parameters
+        ----------
+        r_commands : iterable of strings
+            A list or some other iterable of strings of R commands.
+        ignore_error_returncode : bool
+            If `True`, then a non-0 return code from the PAUP process will not
+            result in an exception being raised.
+        cwd : string
+            Set the working directory of the PAUP* process to this directory.
+        env : dictionary
+            Environmental variables to set for the PAUP* process.
+        rscript_path : string
+            Path to the Rscript executable.
+
+        Returns
+        -------
+        returncode : exit value of the R process
+        stdout : string
+            Contents of the R process standard output.
+        stderr : string
+            Contents of the R process standard error.
+        """
+        if not isinstance(r_commands, str):
+            r_commands = "\n".join(r_commands)
+        r_commands += "\n"
+        invocation_command = [RSCRIPT_EXECUTABLE, rsubprocess_pipe_path]
+        p = subprocess.Popen(
+                invocation_command,
+                stdin=subprocess.PIPE,
+                stdout=subprocess.PIPE,
+                stderr=subprocess.PIPE,
+                cwd=cwd,
+                env=env,
+                )
+        stdout, stderr = processio.communicate(p, r_commands)
+        if (p.returncode != 0 and not ignore_error_returncode):
+            raise error.ExternalServiceError(
+                    service_name="Rscript",
+                    invocation_command=invocation_command,
+                    service_input=r_commands,
+                    returncode = p.returncode,
+                    stdout=stdout,
+                    stderr=stderr)
+        return p.returncode, stdout, stderr
+
+def call(*args, **kwargs):
+    return RService.call(*args, **kwargs)
+
diff --git a/dendropy/interop/seqgen.py b/dendropy/interop/seqgen.py
new file mode 100644
index 0000000..8e43135
--- /dev/null
+++ b/dendropy/interop/seqgen.py
@@ -0,0 +1,212 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Wrappers for interacting with SeqGen. Originally part of PySeqGen.
+"""
+
+import subprocess
+try:
+    from StringIO import StringIO # Python 2 legacy support: StringIO in this module is the one needed (not io)
+except ImportError:
+    from io import StringIO # Python 3
+import uuid
+import tempfile
+import socket
+import random
+import os
+import sys
+
+from optparse import OptionGroup
+from optparse import OptionParser
+
+import dendropy
+from dendropy.utility.messaging import get_logger
+from dendropy.utility import processio
+_LOG = get_logger("interop.seqgen")
+
+HOSTNAME = socket.gethostname()
+PID = os.getpid()
+
+def _get_strongly_unique_tempfile(dir=None):
+    return tempfile.NamedTemporaryFile(dir=dir, prefix="dendropy_tempfile-{0}-{1}-{2}".format(HOSTNAME, PID, uuid.uuid4()))
+
+def _get_tempfile(dir=None):
+    return tempfile.NamedTemporaryFile(dir=dir)
+
+class SeqGen(object):
+    """
+    This class wraps all attributes and input needed to make a call to SeqGen.
+    """
+
+    class SubstitutionModel(object):
+        def __init__(self, idstr):
+            self.idstr = idstr
+        def __str__(self):
+            return self.idstr
+
+    F84 = SubstitutionModel("F84")
+    HKY = SubstitutionModel("HKY")
+    GTR = SubstitutionModel("GTR")
+    JTT = SubstitutionModel("JTT")
+    WAG = SubstitutionModel("WAG")
+    PAM = SubstitutionModel("PAM")
+    BLOSUM = SubstitutionModel("BLOSUM")
+    MTREV = SubstitutionModel("MTREV")
+    CPREV = SubstitutionModel("CPREV")
+    GENERAL = SubstitutionModel("GENERAL")
+    MODELS = [F84, HKY, GTR, JTT, WAG, PAM, BLOSUM, MTREV, CPREV, GENERAL]
+    MODEL_IDS = [str(m) for m in MODELS]
+
+    def get_model(idstr):
+        for model in SeqGen.MODELS:
+            if idstr.upper() == model.idstr.upper():
+                return model
+        return None
+    get_model = staticmethod(get_model)
+
+    def __init__(self, strongly_unique_tempfiles=False):
+        """
+        Sets up all properties, which (generally) map directly to command
+        parameters of Seq-Gen.
+        """
+
+        # control tempfile generation
+        if strongly_unique_tempfiles:
+            self.get_tempfile = _get_strongly_unique_tempfile
+        else:
+            self.get_tempfile = _get_tempfile
+
+        # python object specific attributes
+        self.seqgen_path = 'seq-gen'
+        self.rng_seed = None
+        self._rng = None
+
+        # following are passed to seq-gen in one form or another
+        self.char_model = 'HKY'
+        self.seq_len = None
+        self.num_partitions = None
+        self.scale_branch_lens = None
+        self.scale_tree_len = None
+        self.codon_pos_rates = None
+        self.gamma_shape = None
+        self.gamma_cats = None
+        self.prop_invar = None
+        self.state_freqs = None
+        self.ti_tv = 0.5 # = kappa of 1.0, i.e. JC
+        self.general_rates = None
+        self.ancestral_seq = None
+        self.output_text_append = None
+        self.write_ancestral_seqs = False
+        self.write_site_rates = False
+
+    def _get_rng(self):
+        if self._rng is None:
+            self._rng = random.Random(self.rng_seed)
+        return self._rng
+    def _set_rng(self, rng):
+        self._rng = rng
+    rng = property(_get_rng, _set_rng)
+
+    def _get_kappa(self):
+        return float(self.ti_tv) / 2
+    def _set_kappa(self, kappa):
+        self.ti_tv = kappa * 2
+
+    def _compose_arguments(self):
+        """
+        Composes and returns a list of strings that make up the arguments to a Seq-Gen
+        call, based on the attribute values of the object.
+        """
+        args = []
+        args.append(self.seqgen_path)
+        if self.char_model:
+            args.append("-m%s" % str(self.char_model))
+        if self.seq_len:
+            args.append("-l%s" % self.seq_len)
+        if self.num_partitions:
+            args.append("-p%s" % self.num_partitions)
+        if self.scale_branch_lens:
+            args.append("-s%s" % self.scale_branch_lens)
+        if self.scale_tree_len:
+            args.append("-d%s" % self.scale_tree_len)
+        if self.codon_pos_rates:
+            args.append("-c%s" % (",".join(self.codon_pos_rates)))
+        if self.gamma_shape:
+            args.append("-a%s" % self.gamma_shape)
+        if self.gamma_cats:
+            args.append("-g%s" % self.gamma_cats)
+        if self.prop_invar:
+            args.append("-i%s" % self.prop_invar)
+        if self.state_freqs:
+            if isinstance(self.state_freqs, str):
+                args.append("-f%s" % self.state_freqs)
+            else:
+                args.append("-f%s" % (",".join([str(s) for s in self.state_freqs])))
+        if self.ti_tv and (self.char_model in ['HKY', 'F84']):
+            args.append("-t%s" % self.ti_tv)
+        if self.general_rates:
+            if isinstance(self.general_rates, str):
+                args.append("-r%s" % self.general_rates)
+            else:
+                args.append("-r%s" % (",".join([str(r) for r in self.general_rates])))
+        if self.ancestral_seq:
+            args.append("-k%s" % self.ancestral_seq)
+        if self.output_text_append:
+            args.append("-x'%s'" % self.output_text_append)
+        if self.write_ancestral_seqs:
+            args.append("-wa")
+        if self.write_site_rates:
+            args.append("-wr")
+
+        # following are controlled directly by the wrapper
+        # silent running
+        args.append("-q")
+        # we explicitly pass a random number seed on each call
+        args.append("-z%s" % self.rng.randint(0, sys.maxsize))
+        # force nexus
+        args.append("-on")
+        # force one dataset at a time
+        args.append("-n1")
+        return args
+
+    def generate(self, trees, dataset=None, taxon_namespace=None, **kwargs):
+        args=self._compose_arguments()
+        tree_inputf = self.get_tempfile()
+        trees.write_to_path(tree_inputf.name,
+                "newick",
+                suppress_rooting=True,
+                suppress_internal_node_labels=True)
+        tree_inputf.flush()
+        args.append(tree_inputf.name)
+        #_LOG.debug("seq-gen args: = %s" % " ".join(args))
+        run = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+        stdout, stderr = processio.communicate(run)
+        if stderr or run.returncode != 0:
+            raise RuntimeError("Seq-gen error: %s" % stderr)
+        if taxon_namespace is None:
+            taxon_namespace = trees.taxon_namespace
+        if dataset is None:
+            dataset = dendropy.DataSet(**kwargs)
+            if taxon_namespace is not None:
+                dataset.attach_taxon_namespace(taxon_namespace)
+        dataset.read(data=stdout, schema="nexus")
+        return dataset
+
+
+
diff --git a/dendropy/legacy/__init__.py b/dendropy/legacy/__init__.py
new file mode 100644
index 0000000..5035498
--- /dev/null
+++ b/dendropy/legacy/__init__.py
@@ -0,0 +1,19 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+
diff --git a/dendropy/legacy/coalescent.py b/dendropy/legacy/coalescent.py
new file mode 100644
index 0000000..8b2456c
--- /dev/null
+++ b/dendropy/legacy/coalescent.py
@@ -0,0 +1,106 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+DEPRECATED IN DENDROPY 4: USE `dendropy.model.coalescent` instead.
+"""
+
+from dendropy.model import coalescent
+from dendropy.utility import deprecate
+from dendropy.utility import constants
+
+def discrete_time_to_coalescence(n_genes,
+                                 pop_size=None,
+                                 n_to_coalesce=2,
+                                 rng=None):
+    deprecate.dendropy_deprecation_warning(
+            preamble="Deprecated since DendroPy 4: The 'dendropy.coalescent' module has moved to 'dendropy.model.coalescent'.",
+            old_construct="from dendropy import coalescent\ncoalescent.discrete_time_to_coalescence(...)",
+            new_construct="from dendropy.model import coalescent\ncoalescent.discrete_time_to_coalescence(...)")
+    return coalescent.discrete_time_to_coalescence(
+            n_genes=n_genes,
+            pop_size=pop_size,
+            n_to_coalesce=n_to_coalesce,
+            rng=rng)
+
+def time_to_coalescence(n_genes,
+        pop_size=None,
+        n_to_coalesce=2,
+        rng=None):
+    deprecate.dendropy_deprecation_warning(
+            preamble="Deprecated since DendroPy 4: The 'dendropy.coalescent' module has moved to 'dendropy.model.coalescent'.",
+            old_construct="from dendropy import coalescent\ncoalescent.time_to_coalescence(...)",
+            new_construct="from dendropy.model import coalescent\ncoalescent.time_to_coalescence(...)")
+    return coalescent.time_to_coalescence(
+        n_genes=n_genes,
+        pop_size=pop_size,
+        n_to_coalesce=n_to_coalesce,
+        rng=rng)
+
+def expected_tmrca(n_genes, pop_size=None, n_to_coalesce=2):
+    deprecate.dendropy_deprecation_warning(
+            preamble="Deprecated since DendroPy 4: The 'dendropy.coalescent' module has moved to 'dendropy.model.coalescent'.",
+            old_construct="from dendropy import coalescent\ncoalescent.expected_tmrca(...)",
+            new_construct="from dendropy.model import coalescent\ncoalescent.expected_tmrca(...)")
+    return coalescent.expected_tmrca(n_genes, pop_size=pop_size, n_to_coalesce=n_to_coalesce)
+
+def coalesce(nodes,
+             pop_size=None,
+             period=None,
+             rng=None,
+             use_expected_tmrca=False):
+    deprecate.dendropy_deprecation_warning(
+            preamble="Deprecated since DendroPy 4: The 'dendropy.coalescent' module has moved to 'dendropy.model.coalescent', and this function has been renamed 'coalesce_nodes'.",
+            old_construct="from dendropy import coalescent\ncoalescent.coalesce(...)",
+            new_construct="from dendropy.model import coalescent\ncoalescent.coalesce_nodes(...)")
+    return coalescent.coalesce_nodes(
+            nodes=nodes,
+            pop_size=pop_size,
+            period=period,
+            rng=rng,
+            use_expected_tmrca=use_expected_tmrca)
+
+def node_waiting_time_pairs(tree, ultrametricity_precision=constants.DEFAULT_ULTRAMETRICITY_PRECISION):
+    deprecate.dendropy_deprecation_warning(
+            preamble="Deprecated since DendroPy 4: The 'dendropy.coalescent' module has moved to 'dendropy.model.coalescent'.",
+            old_construct="from dendropy import coalescent\ncoalescent.node_waiting_time_pairs(...)",
+            new_construct="from dendropy.model import coalescent\ncoalescent.node_waiting_time_pairs(...)")
+    return coalescent.node_waiting_time_pairs(tree=tree,
+            ultrametricity_precision=ultrametricity_precision)
+
+def extract_coalescent_frames(*args, **kwargs):
+    deprecate.dendropy_deprecation_warning(
+            preamble="Deprecated since DendroPy 4: The 'dendropy.coalescent' module has moved to 'dendropy.model.coalescent'.",
+            old_construct="from dendropy import coalescent\ncoalescent.extract_coalescent_frames(...)",
+            new_construct="from dendropy.model import coalescent\ncoalescent.extract_coalescent_frames(...)")
+    return coalescent.extract_coalescent_frames(*args, **kwargs)
+
+def log_probability_of_coalescent_frames(*args, **kwargs):
+    deprecate.dendropy_deprecation_warning(
+            preamble="Deprecated since DendroPy 4: The 'dendropy.coalescent' module has moved to 'dendropy.model.coalescent'.",
+            old_construct="from dendropy import coalescent\ncoalescent.log_probability_of_coalescent_frames(...)",
+            new_construct="from dendropy.model import coalescent\ncoalescent.log_probability_of_coalescent_frames(...)")
+    return log_probability_of_coalescent_frames(*args, **kwargs)
+
+def log_probability_of_coalescent_tree(*args, **kwargs):
+    deprecate.dendropy_deprecation_warning(
+            preamble="Deprecated since DendroPy 4: The 'dendropy.coalescent' module has moved to 'dendropy.model.coalescent'.",
+            old_construct="from dendropy import coalescent\ncoalescent.log_probability_of_coalescent_tree(...)",
+            new_construct="from dendropy.model import coalescent\ncoalescent.log_probability_of_coalescent_tree(...)")
+    return log_probability_of_coalescent_tree(*args, **kwargs)
+
diff --git a/dendropy/legacy/continuous.py b/dendropy/legacy/continuous.py
new file mode 100644
index 0000000..ac3bb58
--- /dev/null
+++ b/dendropy/legacy/continuous.py
@@ -0,0 +1,49 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+DEPRECATED IN DENDROPY 4: USE `dendropy.model.continuous` instead.
+"""
+
+from dendropy.model import continuous
+from dendropy.utility import deprecate
+
+def simulate_continuous(node, rng=None, **kwargs):
+    deprecate.dendropy_deprecation_warning(
+            preamble="The 'dendropy.continuous' module has moved to 'dendropy.model.continuous', and this function has been renamed 'evolve_continuous_char()'.",
+            old_construct="from dendropy import continuous\ncontinuous.simulate_continuous(...)",
+            new_construct="from dendropy.model import continuous\ncontinuous.evolve_continuous_char(...)",
+            epilog="Note that this function is also available through 'dendropy.simulate.charsim.evolve_continuous_char(...)'.")
+    return continuous.evolve_continuous_char(node, rng, **kwargs)
+
+class PhylogeneticIndependentConstrasts(continuous.PhylogeneticIndependentConstrasts):
+
+    def __init__(self,
+            tree,
+            char_matrix,
+            polytomy_strategy=None):
+        deprecate.dendropy_deprecation_warning(
+                preamble="The 'dendropy.continuous' module has moved to 'dendropy.model.continuous'.",
+                old_construct="from dendropy import continuous\ncontinuous.PhylogeneticIndependentConstrasts(...)",
+                new_construct="from dendropy.model import continuous\ncontinuous.PhylogeneticIndependentConstrasts(...)",
+                )
+        continuous.PhylogeneticIndependentConstrasts.__init__(self,
+                tree=tree,
+                char_matrix=char_matrix,
+                polytomy_strategy=polytomy_strategy)
+
diff --git a/dendropy/legacy/ncbi.py b/dendropy/legacy/ncbi.py
new file mode 100644
index 0000000..988c7d5
--- /dev/null
+++ b/dendropy/legacy/ncbi.py
@@ -0,0 +1,328 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Wrappers for interacting with NCBI databases.
+*** DEPRECATED: use dendropy.interop.genbank.GenBankDna,
+dendropy.interop.genbank.GenBankRna, or
+dendropy.interop.genbank.GenBankProtein instead ***
+"""
+
+import warnings
+from dendropy.utility import messaging
+from dendropy.utility import urlio
+_LOG = messaging.get_logger(__name__)
+
+import sys
+import dendropy
+import re
+from dendropy.utility.error import DataParseError
+
+GB_FASTA_DEFLINE_PATTERN = re.compile(r'^gi\|(\d+)\|(\w+)\|([\w\d]+).(\d+)\|(.*)$')
+
+def parse_accession_number_and_gi_from_gb(gb_str):
+    accession = re.search(r"ACCESSION\s+(\S+)$", gb_str, flags=re.MULTILINE)
+    if accession is None:
+        raise ValueError("Failed to parse accession number")
+    accession = accession.groups()[0].strip()
+    gi = re.search(r'^VERSION\s+\S+\s+GI:([0-9]+)$', gb_str, flags=re.MULTILINE)
+    if gi is None:
+        raise ValueError("Failed to parse GI number")
+    gi = gi.groups()[0].strip()
+    return accession, gi
+
+def parse_ncbi_curation_info_from_defline(gb_defline):
+    m = GB_FASTA_DEFLINE_PATTERN.match(gb_defline)
+    if m is not None:
+        return m.groups()[0], m.groups()[2], m.groups()[2] + '.' + m.groups()[3]
+    else:
+        return None
+
+def compose_taxon_label_from_gb_defline(gb_defline,
+        num_desc_components=3,
+        separator='_',
+        gbnum_in_front=True,
+        exclude_gbnum=False):
+    """
+    If ``gb_defline`` matches a GenBank FASTA-format defline structure, then this returns a
+    label:
+
+        <GB-ACCESSION-ID><SEPARATOR><DESC_COMPONENT(1)><SEPARATOR><DESC_COMPONENT(2)>...<DESC_COMPONENT(n)>
+
+    So, for example, given the following FASTA label:
+
+        gi|158931046|gb|EU105975.1| Homo sapiens Ache non-coding region T1584 genomic sequence
+
+    the corresponding taxon 3-component (default) label will be:
+
+        EU105975_Homo_sapiens_Ache
+
+    If ``gb_defline`` does *not* match a GenBank FASTA-format defline structure, then the string
+    is returned unchanged.
+    """
+    m = GB_FASTA_DEFLINE_PATTERN.match(gb_defline)
+    if m is not None:
+        groups = m.groups()
+        desc_parts = [s.strip() for s in groups[-1].split() if s]
+        if exclude_gbnum:
+            label_parts = desc_parts[:num_desc_components]
+        elif gbnum_in_front:
+            label_parts = [groups[2]] + desc_parts[:num_desc_components]
+        else:
+            label_parts = desc_parts[:num_desc_components] + [groups[2]]
+        return separator.join(label_parts)
+    else:
+        return gb_defline
+
+def relabel_taxa_from_defline(taxon_set,
+        num_desc_components=3,
+        separator='_',
+        gbnum_in_front=True,
+        exclude_gbnum=False):
+    """
+    Examines the labels of each |Taxon| object in ``taxon_set``, and if
+    conforming to a GenBank pattern, translates the labels to a standard
+    format:
+
+        <GB-ACCESSION-ID><SEPARATOR><DESC_COMPONENT(1)><SEPARATOR><DESC_COMPONENT(2)>...<DESC_COMPONENT(n)>
+
+    So, for example, given the following FASTA label:
+
+        gi|158931046|gb|EU105975.1| Homo sapiens Ache non-coding region T1584 genomic sequence
+
+    the corresponding taxon 3-component (default) label will be:
+
+        EU105975_Homo_sapiens_Ache
+
+    """
+    for taxon in taxon_set:
+        taxon.label = compose_taxon_label_from_gb_defline(
+                gb_defline=taxon.label,
+                num_desc_components=num_desc_components,
+                separator=separator,
+                gbnum_in_front=gbnum_in_front,
+                exclude_gbnum=exclude_gbnum)
+    return taxon_set
+
+class Entrez(object):
+    """
+    Wraps up all interactions with Entrez.
+    Example usage::
+
+        >>> from dendropy.interop import ncbi
+        >>> e = ncbi.Entrez(generate_labels=True,
+        ... label_id_in_front=False,
+        ... sort_taxa_by_label=True)
+        >>> d1 = e.fetch_nucleotide_accessions(['EU105474', 'EU105476'])
+        >>> d2 = e.fetch_nucleotide_accession_range(105474, 106045, prefix="EU")
+
+    """
+
+    BASE_URL = "http://eutils.ncbi.nlm.nih.gov/entrez/eutils"
+    DATABASES = [
+        'pubmed',
+        'protein',
+        'nucleotide',
+        'nuccore',
+        'nucgss',
+        'nucest',
+        'structure',
+        'genome',
+        'biosystems',
+        'books',
+        'cancerchromosomes',
+        'cdd',
+        'gap',
+        'dbvar',
+        'domains',
+        'epigenomics',
+        'gene',
+        'genomeprj',
+        'gensat',
+        'geo',
+        'gds',
+        'homologene',
+        'journals',
+        'mesh',
+        'ncbisearch',
+        'nlmcatalog',
+        'omia',
+        'omim',
+        'pepdome',
+        'pmc',
+        'popset',
+        'probe',
+        'proteinclusters',
+        'pcassay',
+        'pccompound',
+        'pcsubstance',
+        'seqannot',
+        'snp',
+        'sra',
+        'taxonomy',
+        'toolkit',
+        'toolkitall',
+        'unigene',
+        'unists',
+        'linkoutpubmed',
+        'linkoutseq',
+        'linkoutother',
+        ]
+
+    class AccessionFetchError(Exception):
+
+        def __init__(self, accession_ids):
+            Exception.__init__(self, "Failed to retrieve accessions: %s" % (", ".join([str(s) for s in accession_ids])))
+
+    def __init__(self,
+            generate_labels=False,
+            label_num_desc_components=3,
+            label_separator='_',
+            label_id_in_front=True,
+            exclude_gbnum_from_label=False,
+            sort_taxa_by_label=False):
+        """
+        *** DEPRECATED: use dendropy.interop.genbank.GenBankDna,
+        dendropy.interop.genbank.GenBankRna, or
+        dendropy.interop.genbank.GenBankProtein instead ***
+        Instantiates a broker that queries NCBI and returns data.  If
+        ``generate_labels`` is `True`, then appropriate labels for sequences
+        will be automatically composed for each sequence based on the GenBank
+        FASTA defline. ``label_num_desc_components`` specifies the number of
+        components from the defline to use. ``label_separator`` specifies the
+        string used to separate the different label components.
+        ``label_id_in_front`` specifies whether the GenBank accession number
+        should form the beginning (`True`) or tail (`False`) end of the
+        label. ``sort_taxa_by_label`` specifies whether the sequences should be
+        sorted by their final label values.
+        """
+        warnings.warn("This class (and parent module) has been deprecated. Use the classes provided in 'dendropy.interop.genbak' instead", DeprecationWarning)
+        self.generate_labels = generate_labels
+        self.label_num_desc_components = label_num_desc_components
+        self.label_separator = label_separator
+        self.label_id_in_front = label_id_in_front
+        self.exclude_gbnum_from_label = exclude_gbnum_from_label
+        self.sort_taxa_by_label = sort_taxa_by_label
+
+    def fetch(self, db, ids, rettype):
+        """
+        Raw fetch. Returns file-like object opened for reading on string
+        returned by query.
+        """
+        if isinstance(ids, str):
+            id_list = ids
+        else:
+            id_list = ",".join([str(i) for i in set(ids)])
+        params = {'db': db,
+                'id': id_list,
+                'rettype': rettype,
+                'retmode': 'text'}
+        query_url = Entrez.BASE_URL + "/efetch.fcgi?" + urlio.urlencode(params)
+        return urlio.read_url(query_url)
+
+    def fetch_gbrecs_as_plaintext_dict(self,
+            db,
+            ids,
+            key_field="accession"):
+        db_name = "nucleotide"
+        gb_recs_str = self.fetch(db=db, ids=ids, rettype="gb")
+        gb_recs_str_list = re.split(r"^//$", gb_recs_str, flags=re.MULTILINE)
+        gb_recs_str_list = [gb_rec for gb_rec in gb_recs_str_list
+                    if gb_rec.replace("\n", "")]
+        gb_recs_dict = {}
+        for gb_str in gb_recs_str_list:
+            if not gb_str:
+                continue
+            try:
+                accession, gi = parse_accession_number_and_gi_from_gb(gb_str)
+            except TypeError:
+                print("---")
+                print(gb_str)
+                print("---")
+                raise
+            if key_field == "accession":
+                gb_recs_dict[accession] = gb_str
+            elif key_field == "gi":
+                gb_recs_dict[gi] = gb_str
+            else:
+                raise NotImplementedError("Key field '%s' is not supported" % key_field)
+        return gb_recs_dict
+
+    def fetch_nucleotide_accessions(self,
+            ids,
+            prefix=None,
+            verify=True,
+            matrix_type=dendropy.DnaCharacterMatrix,
+            **kwargs):
+        """
+        Returns a DnaCharacterMatrix object (or some other type, if specified
+        by ``matrix_type`` argument) populated with sequences from the Entrez
+        nucleotide database with accession numbers given by ``ids`` (a list of
+        accession numbers). If ``prefix`` is given, it is pre-pended to all values
+        given in the id list. Any other keyword arguments given are passed to
+        the constructor of |DnaCharacterMatrix|.
+        **Note that the order of records is *not* the same as the order of ids!!!**
+        """
+        if prefix is not None:
+            id_list = ["%s%s" % (prefix,i) for i in ids]
+        else:
+            id_list = [str(i) for i in ids]
+        results_str = self.fetch(db='nucleotide', ids=id_list, rettype='fasta')
+        try:
+            d = matrix_type.get_from_string(results_str, 'fasta', **kwargs)
+        except DataParseError:
+            sys.stderr.write("---\nNCBI Entrez Query returned:\n%s\n---\n" % results_str)
+            raise
+        for taxon in d.taxon_set:
+            taxon.ncbi_defline = taxon.label
+            taxon.ncbi_gi, taxon.ncbi_accession, taxon.ncbi_version = parse_ncbi_curation_info_from_defline(taxon.ncbi_defline)
+        if verify:
+            found_ids = set([t.ncbi_accession for t in d.taxon_set])
+            missing_ids = set(id_list).difference(found_ids)
+            found_ids = set([t.ncbi_gi for t in d.taxon_set])
+            missing_ids = set(missing_ids).difference(found_ids)
+            if len(missing_ids) > 0:
+                raise Entrez.AccessionFetchError(missing_ids)
+        if self.generate_labels:
+            relabel_taxa_from_defline(d.taxon_set,
+                    num_desc_components=self.label_num_desc_components,
+                    separator=self.label_separator,
+                    gbnum_in_front=self.label_id_in_front,
+                    exclude_gbnum=self.exclude_gbnum_from_label)
+        if self.sort_taxa_by_label:
+            d.taxon_set.sort(key=lambda x: x.label)
+        return d
+
+    def fetch_nucleotide_accession_range(self,
+            first,
+            last,
+            prefix=None,
+            verify=True,
+            matrix_type=dendropy.DnaCharacterMatrix,
+            **kwargs):
+        """
+        Returns a DnaCharacterMatrix object (or some other type, if specified
+        by the ``matrix_type`` argument) populated with sequences from the
+        Entrez nucleotide database with accession numbers between ``start``
+        and, up to and *including* ``end``. If ``prefix`` is given, then it is
+        pre-pended to the ids. Any other keyword arguments given are passed to
+        thee constructor of |DnaCharacterMatrix|.
+        """
+        ids = range(first, last+1)
+        return self.fetch_nucleotide_accessions(ids=ids, prefix=prefix, verify=verify, matrix_type=matrix_type, **kwargs)
+
diff --git a/dendropy/legacy/popgensim.py b/dendropy/legacy/popgensim.py
new file mode 100644
index 0000000..5d3afc9
--- /dev/null
+++ b/dendropy/legacy/popgensim.py
@@ -0,0 +1,34 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+DEPRECATED IN DENDROPY 4: USE `dendropy.simulate.popgensim` instead.
+"""
+
+from dendropy.utility import deprecate
+from dendropy.simulate import popgensim
+
+class FragmentedPopulations(popgensim.FragmentedPopulations):
+
+    def __init__(self, *args, **kwargs):
+        deprecate.dendropy_deprecation_warning(
+                preamble="The 'dendropy.popgensim' module has moved to 'dendropy.simulate.popgensim'.",
+                old_construct="from dendropy import popgensim\npopgensim.FragmentedPopulations(...)",
+                new_construct="from dendropy.simulate import popgensim\npopgensim.FragmentedPopulations(...)",
+                )
+        popgensim.FragmentedPopulations.__init__(self, *args, **kwargs)
diff --git a/dendropy/legacy/popgenstat.py b/dendropy/legacy/popgenstat.py
new file mode 100644
index 0000000..6ca7bd1
--- /dev/null
+++ b/dendropy/legacy/popgenstat.py
@@ -0,0 +1,94 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+DEPRECATED IN DENDROPY 4: USE `dendropy.calculate.popgenstat` instead.
+"""
+
+from dendropy.utility import deprecate
+from dendropy.calculate import popgenstat
+
+def num_segregating_sites(char_matrix, ignore_uncertain=True):
+    deprecate.dendropy_deprecation_warning(
+            preamble="The 'dendropy.popgenstat' module has moved to 'dendropy.calculate.popgenstat'.",
+            old_construct="from dendropy import popgenstat\npopgenstat.num_segregating_sites(...)",
+            new_construct="from dendropy.calculate import popgenstat\npopgenstat.num_segregating_sites(...)",
+            )
+    return popgenstat.num_segregating_sites(char_matrix, ignore_uncertain)
+
+def average_number_of_pairwise_differences(char_matrix, ignore_uncertain=True):
+    deprecate.dendropy_deprecation_warning(
+            preamble="The 'dendropy.popgenstat' module has moved to 'dendropy.calculate.popgenstat'.",
+            old_construct="from dendropy import popgenstat\npopgenstat.average_number_of_pairwise_differences(...)",
+            new_construct="from dendropy.calculate import popgenstat\npopgenstat.average_number_of_pairwise_differences(...)",
+            )
+    return popgenstat.average_number_of_pairwise_differences(char_matrix=char_matrix, ignore_uncertain=ignore_uncertain)
+
+def nucleotide_diversity(char_matrix, ignore_uncertain=True):
+    deprecate.dendropy_deprecation_warning(
+            preamble="The 'dendropy.popgenstat' module has moved to 'dendropy.calculate.popgenstat'.",
+            old_construct="from dendropy import popgenstat\npopgenstat.nucleotide_diversity(...)",
+            new_construct="from dendropy.calculate import popgenstat\npopgenstat.nucleotide_diversity(...)",
+            )
+    return popgenstat.nucleotide_diversity(char_matrix=char_matrix, ignore_uncertain=ignore_uncertain)
+
+def tajimas_d(char_matrix, ignore_uncertain=True):
+    deprecate.dendropy_deprecation_warning(
+            preamble="The 'dendropy.popgenstat' module has moved to 'dendropy.calculate.popgenstat'.",
+            old_construct="from dendropy import popgenstat\npopgenstat.tajimas_d(...)",
+            new_construct="from dendropy.calculate import popgenstat\npopgenstat.tajimas_d(...)",
+            )
+    return popgenstat.tajimas_d(char_matrix=char_matrix, ignore_uncertain=ignore_uncertain)
+
+def wattersons_theta(char_matrix, ignore_uncertain=True):
+    deprecate.dendropy_deprecation_warning(
+            preamble="The 'dendropy.popgenstat' module has moved to 'dendropy.calculate.popgenstat'.",
+            old_construct="from dendropy import popgenstat\npopgenstat.watterson_theta(...)",
+            new_construct="from dendropy.calculate import popgenstat\npopgenstat.watterson_theta(...)",
+            )
+    return popgenstat.wattersons_theta(char_matrix=char_matrix, ignore_uncertain=ignore_uncertain)
+
+def derived_state_matrix(char_matrix, ancestral_seq=None):
+    deprecate.dendropy_deprecation_warning(
+            preamble="The 'dendropy.popgenstat' module has moved to 'dendropy.calculate.popgenstat'.",
+            old_construct="from dendropy import popgenstat\npopgenstat.derived_state_matrix(...)",
+            new_construct="from dendropy.calculate import popgenstat\npopgenstat.derived_state_matrix(...)",
+            )
+    return popgenstat.derived_state_matrix(
+            char_matrix=char_matrix,
+            ancestral_sequence=ancestral_seq)
+
+def unfolded_site_frequency_spectrum(char_matrix, ancestral_seq=None, pad=True):
+    deprecate.dendropy_deprecation_warning(
+            preamble="The 'dendropy.popgenstat' module has moved to 'dendropy.calculate.popgenstat'.",
+            old_construct="from dendropy import popgenstat\npopgenstat.unfolded_site_frequency_spectrum(...)",
+            new_construct="from dendropy.calculate import popgenstat\npopgenstat.unfolded_site_frequency_spectrum(...)",
+            )
+    return popgenstat.unfolded_site_frequency_spectrum(
+            char_matrix=char_matrix,
+            ancestral_sequence=ancestral_seq,
+            pad=pad)
+
+class PopulationPairSummaryStatistics(popgenstat.PopulationPairSummaryStatistics):
+    def __init__(self, *args, **kwargs):
+        deprecate.dendropy_deprecation_warning(
+                preamble="The 'dendropy.popgenstat' module has moved to 'dendropy.calculate.popgenstat'.",
+                old_construct="from dendropy import popgenstat\npopgenstat.PopulationPairSummaryStatistics(...)",
+                new_construct="from dendropy.calculate import popgenstat\npopgenstat.PopulationPairSummaryStatistics(...)",
+                )
+        popgenstat.PopulationPairSummaryStatistics.__init__(self, *args, **kwargs)
diff --git a/dendropy/legacy/reconcile.py b/dendropy/legacy/reconcile.py
new file mode 100644
index 0000000..773fc77
--- /dev/null
+++ b/dendropy/legacy/reconcile.py
@@ -0,0 +1,49 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+DEPRECATED IN DENDROPY 4: USE `dendropy.model.reconcile` instead.
+"""
+
+from dendropy.utility import deprecate
+from dendropy.model import reconcile
+
+def reconciliation_discordance(gene_tree, species_tree):
+    deprecate.dendropy_deprecation_warning(
+            preamble="The 'dendropy.reconcile' module has moved to 'dendropy.model.reconcile'.",
+            old_construct="from dendropy import reconcile\nreconcile.reconciliation_discordance(...)",
+            new_construct="from dendropy.model import reconcile\nreconcile.reconciliation_discordance(...)",
+            )
+    return reconcile.reconciliation_discordance(gene_tree, species_tree)
+
+def monophyletic_partition_discordance(tree, taxon_namespace_partition):
+    deprecate.dendropy_deprecation_warning(
+            preamble="The 'dendropy.reconcile' module has moved to 'dendropy.model.reconcile'.",
+            old_construct="from dendropy import reconcile\nreconcile.monophyletic_partition_discordance(...)",
+            new_construct="from dendropy.model import reconcile\nreconcile.monophyletic_partition_discordance(...)",
+            )
+    return reconcile.monophyletic_partition_discordance(tree, taxon_namespace_partition)
+
+class ContainingTree(reconcile.ContainingTree):
+    def __init__(self, *args, **kwargs):
+        deprecate.dendropy_deprecation_warning(
+                preamble="The 'dendropy.reconcile' module has moved to 'dendropy.model.reconcile'.",
+                old_construct="from dendropy import reconcile\nreconcile.ContainingTree(...)",
+                new_construct="from dendropy.model import reconcile\nreconcile.ContainingTree(...)",
+                )
+        reconcile.ContainingTree.__init__(self, *args, **kwargs)
diff --git a/dendropy/legacy/seqmodel.py b/dendropy/legacy/seqmodel.py
new file mode 100644
index 0000000..8f26d5a
--- /dev/null
+++ b/dendropy/legacy/seqmodel.py
@@ -0,0 +1,60 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+DEPRECATED IN DENDROPY 4: USE `dendropy.model.discrete`.
+"""
+
+from dendropy.model import discrete
+from dendropy.utility import deprecate
+
+class SeqModel(discrete.DiscreteCharacterEvolutionModel):
+    def __init__(self, state_alphabet, rng=None):
+        deprecate.dendropy_deprecation_warning(
+                preamble="Deprecated since DendroPy 4: The 'dendropy.seqmodel.SeqModel' class has moved to 'dendropy.model.discrete.DiscreteCharacterEvolutionModel'.",
+                old_construct="from dendropy import seqmodel\nm = seqmodel.SeqModel(...)",
+                new_construct="from dendropy.model import discrete\nm = discrete.DiscreteCharacterEvolutionModel(...)")
+        discrete.DiscreteCharacterEvolutionModel.__init__(
+                self,
+                state_alphabet=state_alphabet,
+                rng=rng)
+
+class Hky85SeqModel(discrete.Hky85):
+    def __init__(self, kappa=1.0, base_freqs=None, state_alphabet=None, rng=None):
+        deprecate.dendropy_deprecation_warning(
+                preamble="Deprecated since DendroPy 4: The 'dendropy.seqmodel.Hky85SeqModel' class has moved to 'dendropy.model.discrete.Hky85'.",
+                old_construct="from dendropy import seqmodel\nm = seqmodel.NucleotideSeqModel(...)",
+                new_construct="from dendropy.model import discrete\ndiscrete.Hky85(...)")
+        discrete.Hky85.__init__(
+                self,
+                kappa=kappa,
+                base_freqs=base_freqs,
+                state_alphabet=state_alphabet,
+                rng=rng)
+
+class Jc69SeqModel(discrete.Jc69):
+    def __init__(self, state_alphabet=None, rng=None):
+        deprecate.dendropy_deprecation_warning(
+                preamble="Deprecated since DendroPy 4: The 'dendropy.seqmodel.Jc69SeqModel' class has moved to 'dendropy.model.discrete.Jc69'.",
+                old_construct="from dendropy import seqmodel\nm = seqmodel.NucleotideSeqModel(...)",
+                new_construct="from dendropy.model import discrete\ndiscrete.Jc69(...)")
+        discrete.Jc69.__init__(
+                self,
+                state_alphabet=state_alphabet,
+                rng=rng)
+
diff --git a/dendropy/legacy/seqsim.py b/dendropy/legacy/seqsim.py
new file mode 100644
index 0000000..c19ff7c
--- /dev/null
+++ b/dendropy/legacy/seqsim.py
@@ -0,0 +1,107 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+DEPRECATED IN DENDROPY 4: USE `dendropy.simulate.charsim`.
+"""
+
+import dendropy
+from dendropy.simulate import charsim
+from dendropy.utility import deprecate
+
+def generate_hky_dataset(seq_len,
+                         tree_model,
+                         mutation_rate=1.0,
+                         kappa=1.0,
+                         base_freqs=[0.25, 0.25, 0.25, 0.25],
+                         root_states=None,
+                         dataset=None,
+                         rng=None):
+    deprecate.dendropy_deprecation_warning(
+            preamble="Deprecated since DendroPy 4: The 'dendropy.seqsim.generate_hky_dataset()' function has been replaced with 'dendropy.simulate.charsim.hky85_chars()'.",
+            old_construct="from dendropy import seqsim\ndataset = seqsim.generate_hky_dataset(...)",
+            new_construct="import dendropy\nfrom dendropy.simulate import charsim\ndataset = dendropy.DataSet()\nchar_matrix = charsim.hky85_chars(...)\ndataset.add_char_matrix(char_matrix)")
+    if dataset is None:
+        dataset = dendropy.DataSet()
+    char_matrix = dataset.new_char_matrix(char_matrix_type="dna", taxon_namespace=tree_model.taxon_namespace)
+    charsim.hky85_chars(
+            seq_len=seq_len,
+            tree_model=tree_model,
+            mutation_rate=mutation_rate,
+            kappa=kappa,
+            base_freqs=base_freqs,
+            root_states=root_states,
+            char_matrix=char_matrix,
+            rng=rng)
+    return dataset
+
+def generate_hky_characters(seq_len,
+                            tree_model,
+                            mutation_rate=1.0,
+                            kappa=1.0,
+                            base_freqs=[0.25, 0.25, 0.25, 0.25],
+                            root_states=None,
+                            char_matrix=None,
+                            rng=None):
+    deprecate.dendropy_deprecation_warning(
+            preamble="Deprecated since DendroPy 4: The 'dendropy.seqsim.generate_hky_characters()' function has been replaced with 'dendropy.simulate.charsim.hky85_chars()'.",
+            old_construct="from dendropy import seqsim\nchar_matrix = seqsim.generate_hky_characters(...)",
+            new_construct="from dendropy.simulate import charsim\nchar_matrix = charsim.hky85_chars(...)")
+    return charsim.hky85_chars(
+            seq_len=seq_len,
+            tree_model=tree_model,
+            mutation_rate=mutation_rate,
+            kappa=kappa,
+            base_freqs=base_freqs,
+            root_states=root_states,
+            char_matrix=char_matrix,
+            rng=rng)
+
+# def generate_dataset(seq_len,
+#                      tree_model,
+#                      seq_model,
+#                      mutation_rate=1.0,
+#                      root_states=None,
+#                      dataset=None,
+#                      rng=None):
+#     deprecate.dendropy_deprecation_warning(
+#             preamble="Deprecated since DendroPy 4: The 'dendropy.seqsim.generate_hky_characters()' function has been replaced with 'dendropy.simulate.charsim.hky85_chars()'.",
+#             old_construct="from dendropy import seqsim\nchar_matrix = seqsim.generate_hky_characters(...)",
+#             new_construct="from dendropy.simulate import charsim\nchar_matrix = discrete.hky85_chars(...)")
+
+# def generate_char_matrix(seq_len,
+#                         tree_model,
+#                         seq_model,
+#                         mutation_rate=1.0,
+#                         root_states=None,
+#                         char_matrix=None,
+#                         rng=None):
+
+#     pass
+
+# class SeqEvolver(object):
+#     def __init__(self,
+#      seq_model=None,
+#      mutation_rate=None,
+#      seq_attr='sequences',
+#      seq_model_attr="seq_model",
+#      edge_length_attr="length",
+#      edge_rate_attr="mutation_rate",
+#      seq_label_attr='taxon'):
+#         pass
+
diff --git a/dendropy/legacy/treecalc.py b/dendropy/legacy/treecalc.py
new file mode 100644
index 0000000..6d55f09
--- /dev/null
+++ b/dendropy/legacy/treecalc.py
@@ -0,0 +1,134 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+DEPRECATED IN DENDROPY 4: USE
+`dendropy.calculate.treecompare`,`dendropy.calculate.treemeasure`,
+or `dendropy.calculate.treescore`,
+"""
+
+import dendropy
+from dendropy.calculate import treecompare
+from dendropy.calculate import treemeasure
+from dendropy.calculate import treescore
+from dendropy.utility import deprecate
+
+##############################################################################
+## dendropy.calculate.treemeasure
+
+class PatristicDistanceMatrix(treemeasure.PatristicDistanceMatrix):
+    def __init__(self, tree=None):
+        deprecate.dendropy_deprecation_warning(
+                preamble="Deprecated since DendroPy 4: The 'dendropy.treecalc.PatristicDistanceMatrix' class has moved to 'dendropy.calculate.treemeasure.PatristicDistanceMatrix'.",
+                old_construct="from dendropy import treecalc\npdm = treecalc.PatristicDistanceMatrix(...)",
+                new_construct="from dendropy.calculate import treemeasure\npdm = treemeasure.PatristicDistanceMatrix(...)")
+        treemeasure.PatristicDistanceMatrix.__init__(self, tree=tree)
+
+def patristic_distance(tree, taxon1, taxon2):
+    deprecate.dendropy_deprecation_warning(
+            preamble="Deprecated since DendroPy 4: The 'dendropy.treecalc.patristic_distance()' function has moved to 'dendropy.calculate.treemeasure.patristic_distance()'.",
+            old_construct="from dendropy import treecalc\npdm = treecalc.patristic_distance(...)",
+            new_construct="from dendropy.calculate import treemeasure\npdm = treemeasure.patristic_distance(...)")
+    return treemeasure.patristic_distance(
+            tree=tree,
+            taxon1=taxon1,
+            taxon2=taxon2)
+
+##############################################################################
+## dendropy.calculate.treecompare
+
+def robinson_foulds_distance(tree1,
+        tree2,
+        edge_length_attr="length"):
+    deprecate.dendropy_deprecation_warning(
+            preamble="Deprecated since DendroPy 4: The 'dendropy.treecalc.robinson_foulds_distance()' function has moved to 'dendropy.calculate.treecompare.weighted_robinson_foulds_distance()'.",
+            old_construct="from dendropy import treecalc\nd = treecalc.robinson_foulds_distance(...)",
+            new_construct="from dendropy.calculate import treecompare\nd = treecompare.weighted_robinson_foulds_distance(...)")
+    return treecompare.weighted_robinson_foulds_distance(
+            tree1=tree1,
+            tree2=tree2,
+            edge_weight_attr=edge_length_attr)
+
+def euclidean_distance(tree1,
+        tree2,
+        edge_length_attr="length",
+        value_type=float):
+    deprecate.dendropy_deprecation_warning(
+            preamble="Deprecated since DendroPy 4: The 'dendropy.treecalc.euclidean_distance()' function has moved to 'dendropy.calculate.treecompare.euclidean_distance()'.",
+            old_construct="from dendropy import treecalc\nd = treecalc.euclidean_distance(...)",
+            new_construct="from dendropy.calculate import treecompare\nd = treecompare.euclidean_distance(...)")
+    return treecompare.euclidean_distance(
+            tree1=tree1,
+            tree2=tree2,
+            edge_weight_attr=edge_length_attr,
+            value_type=value_type)
+
+def false_positives_and_negatives(reference_tree, test_tree):
+    deprecate.dendropy_deprecation_warning(
+            preamble="Deprecated since DendroPy 4: The 'dendropy.treecalc.false_positives_and_negatives()' function has moved to 'dendropy.calculate.treecompare.false_positives_and_negatives()'.",
+            old_construct="from dendropy import treecalc\nd = treecalc.false_positives_and_negatives(...)",
+            new_construct="from dendropy.calculate import treecompare\nd = treecompare.false_positives_and_negatives(...)")
+    return treecompare.false_positives_and_negatives(
+            reference_tree=reference_tree,
+            comparison_tree=test_tree)
+
+def symmetric_difference(tree1, tree2):
+    deprecate.dendropy_deprecation_warning(
+            preamble="Deprecated since DendroPy 4: The 'dendropy.treecalc.symmetric_difference()' function has moved to 'dendropy.calculate.treecompare.symmetric_difference()'.",
+            old_construct="from dendropy import treecalc\nd = treecalc.symmetric_difference(...)",
+            new_construct="from dendropy.calculate import treecompare\nd = treecompare.symmetric_difference(...)")
+    return treecompare.symmetric_difference(
+            tree1=tree1,
+            tree2=tree2)
+
+def find_missing_splits(reference_tree, test_tree):
+    deprecate.dendropy_deprecation_warning(
+            preamble="Deprecated since DendroPy 4: The 'dendropy.treecalc.find_missing_splits()' function has moved to 'dendropy.calculate.treecompare.find_missing_splits()'.",
+            old_construct="from dendropy import treecalc\nd = treecalc.find_missing_splits(...)",
+            new_construct="from dendropy.calculate import treecompare\nd = treecompare.find_missing_splits(...)")
+    return treecompare.find_missing_splits(
+            reference_tree=reference_tree,
+            comparison_tree=test_tree)
+
+##############################################################################
+## dendropy.calculate.treescore
+
+# def fitch_up_pass(preorder_node_list, attr_name="state_sets", taxa_to_state_set_map=None):
+#     deprecate.dendropy_deprecation_warning(
+#             preamble="Deprecated since DendroPy 4: The 'dendropy.treecalc.euclidean_distance()' function has moved to 'dendropy.calculate.treecompare.euclidean_distance()'.",
+#             old_construct="from dendropy import treecalc\nd = treecalc.euclidean_distance(...)",
+#             new_construct="from dendropy.calculate import treecompare\nd = treecompare.euclidean_distance(...)")
+#     return treecompare.euclidean_distance(
+#             tree1=tree1,
+#             tree2=tree2,
+#             edge_weight_attr=edge_length_attr,
+#             value_type=value_type)
+
+# def fitch_down_pass(postorder_node_list, attr_name="state_sets", weight_list=None, taxa_to_state_set_map=None):
+#     deprecate.dendropy_deprecation_warning(
+#             preamble="Deprecated since DendroPy 4: The 'dendropy.treecalc.euclidean_distance()' function has moved to 'dendropy.calculate.treecompare.euclidean_distance()'.",
+#             old_construct="from dendropy import treecalc\nd = treecalc.euclidean_distance(...)",
+#             new_construct="from dendropy.calculate import treecompare\nd = treecompare.euclidean_distance(...)")
+#     return treecompare.euclidean_distance(
+#             tree1=tree1,
+#             tree2=tree2,
+#             edge_weight_attr=edge_length_attr,
+#             value_type=value_type)
+
+# def mason_gamer_kellogg_score(tree1, tree2):
+
diff --git a/dendropy/legacy/treemanip.py b/dendropy/legacy/treemanip.py
new file mode 100644
index 0000000..7bc4a9b
--- /dev/null
+++ b/dendropy/legacy/treemanip.py
@@ -0,0 +1,78 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+All of these functions are now native methods of the Tree class. They are
+maintained here only for background compatibility.
+"""
+
+from dendropy.utility import deprecate
+
+def _deprecate_tree_manip(old, new):
+    deprecate.dendropy_deprecation_warning(
+            preamble="Deprecated since DendroPy 4: Tree structure manipulation and editing functionality are now native methods of the 'dendropy.Tree' class.",
+            old_construct=old,
+            new_construct=new,
+            stacklevel=4)
+
+def collapse_edge(edge):
+    _deprecate_tree_manip("treemanip.collapse_edge(edge)", "edge.collapse()")
+    edge.collapse()
+
+def collapse_clade(node):
+    _deprecate_tree_manip("treemanip.collapse_clade(node)", "node.collapse_clade()")
+    node.collapse_clade()
+
+def prune_subtree(tree, node, suppress_unifurcations=True):
+    _deprecate_tree_manip("treemanip.prune_subtree(tree, node)", "tree.prune_subtree(node)")
+    tree.prune_subtree(node=node,
+            suppress_unifurcations=suppress_unifurcations)
+    return tree
+
+def prune_leaves_without_taxa(tree, suppress_unifurcations=True):
+    _deprecate_tree_manip("treemanip.prune_leaves_without_taxa(tree)", "tree.prune_leaves_without_taxa()")
+    tree.prune_leaves_without_taxa(suppress_unifurcations=suppress_unifurcations)
+    return tree
+
+def prune_taxa(tree, taxa, suppress_unifurcations=True):
+    _deprecate_tree_manip("treemanip.prune_taxa(tree, taxa)", "tree.prune_taxa(taxa)")
+    tree.prune_taxa(taxa=taxa,
+            suppress_unifurcations=suppress_unifurcations)
+    return tree
+
+def retain_taxa(tree, taxa, suppress_unifurcations=True):
+    _deprecate_tree_manip("treemanip.retain_taxa(tree, taxa)", "tree.retain_taxa(taxa)")
+    tree.retain_taxa(taxa=taxa,
+            suppress_unifurcations=suppress_unifurcations)
+
+def randomly_reorient_tree(tree, rng=None, splits=False):
+    _deprecate_tree_manip("randomly_reorient_tree(tree)", "tree.randomly_reorient()")
+    tree.randomly_reorient(rng=rng, update_splits=splits)
+
+def randomly_rotate(tree, rng=None):
+    _deprecate_tree_manip("treemanip.randomly_rotate(tree)", "tree.randomly_rotate()")
+    tree.randomly_rotate(rng=rng)
+
+def collapse_conflicting(subtree_root, split, split_bitmask):
+    _deprecate_tree_manip("treemanip.collapse_conflicting(node)", "node.collapse_conflicting()")
+    subtree_root.collapse_conflicting(split, split_bitmask)
+
+def scale_edges(tree, edge_len_multiplier):
+    _deprecate_tree_manip("treemanip.scale_edges(tree)", "tree.scale_edges()")
+    tree.scale_edges(edge_len_multiplier=edge_len_multiplier)
+
diff --git a/dendropy/legacy/treesim.py b/dendropy/legacy/treesim.py
new file mode 100644
index 0000000..244ee0b
--- /dev/null
+++ b/dendropy/legacy/treesim.py
@@ -0,0 +1,95 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+DEPRECATED IN DENDROPY 4: USE `dendropy.simulate.treesim` instead.
+"""
+
+from dendropy.simulate import treesim
+from dendropy.simulate import popgensim
+from dendropy.utility import deprecate
+
+def pop_gen_tree(*args, **kwargs):
+    deprecate.dendropy_deprecation_warning(
+            preamble="Deprecated since DendroPy 4: The 'dendropy.treesim.pop_gen_tree()' function has moved to 'dendropy.simulate.popgensim.pop_gen_tree()'.",
+            old_construct="from dendropy import treesim\ntree = treesim.pop_gen_tree(...)",
+            new_construct="from dendropy.simulate import popgensim\ntree = popgensim.pop_gen_tree(...)")
+    return popgensim.pop_gen_tree(*args, **kwargs)
+
+def birth_death(*args, **kwargs):
+    deprecate.dendropy_deprecation_warning(
+            preamble="Deprecated since DendroPy 4: The 'dendropy.treesim.birth_death()' function has moved to 'dendropy.simulate.treesim.birth_death_tree()'.",
+            old_construct="from dendropy import treesim\ntree = treesim.birth_death(...)",
+            new_construct="from dendropy.simulate import treesim\ntree = treesim.birth_death_tree(...)")
+    return treesim.birth_death_tree(*args, **kwargs)
+
+# from dendropy.model.birthdeath import discrete_birth_death_tree as discrete_birth_death
+def discrete_birth_death(*args, **kwargs):
+    deprecate.dendropy_deprecation_warning(
+            preamble="Deprecated since DendroPy 4: The 'dendropy.treesim.discrete_birth_death()' function has moved to 'dendropy.simulate.treesim.discrete_birth_death_tree()'.",
+            old_construct="from dendropy import treesim\ntree = treesim.discrete_birth_death(...)",
+            new_construct="from dendropy.simulate import treesim\ntree = treesim.discrete_birth_death_tree(...)")
+    return treesim.discrete_birth_death_tree(*args, **kwargs)
+
+# from dendropy.model.birthdeath import uniform_pure_birth_tree as uniform_pure_birth
+def uniform_pure_birth(*args, **kwargs):
+    deprecate.dendropy_deprecation_warning(
+            preamble="Deprecated since DendroPy 4: The 'dendropy.treesim.uniform_pure_birth()' function has moved to 'dendropy.simulate.treesim.uniform_pure_birth_tree()'.",
+            old_construct="from dendropy import treesim\ntree = treesim.uniform_pure_birth(...)",
+            new_construct="from dendropy.simulate import treesim\ntree = treesim.uniform_pure_birth_tree(...)")
+    return treesim.uniform_pure_birth_tree(*args, **kwargs)
+
+# from dendropy.model.coalescent import contained_coalescent_tree as contained_coalescent
+def contained_coalescent(*args, **kwargs):
+    deprecate.dendropy_deprecation_warning(
+            preamble="Deprecated since DendroPy 4: The 'dendropy.treesim.contained_coalescent()' function has moved to 'dendropy.simulate.treesim.contained_coalescent_tree()'.",
+            old_construct="from dendropy import treesim\ntree = treesim.contained_coalescent(...)",
+            new_construct="from dendropy.simulate import treesim\ntree = treesim.contained_coalescent_tree(...)")
+    return treesim.contained_coalescent_tree(*args, **kwargs)
+
+# from dendropy.model.coalescent import pure_kingman_tree as pure_kingman
+def pure_kingman(*args, **kwargs):
+    deprecate.dendropy_deprecation_warning(
+            preamble="Deprecated since DendroPy 4: The 'dendropy.treesim.pure_kingman()' function has moved to 'dendropy.simulate.treesim.pure_kingman_tree()'.",
+            old_construct="from dendropy import treesim\ntree = treesim.pure_kingman(...)",
+            new_construct="from dendropy.simulate import treesim\ntree = treesim.pure_kingman_tree(...)")
+    return treesim.pure_kingman_tree(*args, **kwargs)
+
+# from dendropy.model.coalescent import mean_kingman_tree as mean_kingman
+def mean_kingman(*args, **kwargs):
+    deprecate.dendropy_deprecation_warning(
+            preamble="Deprecated since DendroPy 4: The 'dendropy.treesim.mean_kingman()' function has moved to 'dendropy.simulate.treesim.mean_kingman_tree()'.",
+            old_construct="from dendropy import treesim\ntree = treesim.mean_kingman(...)",
+            new_construct="from dendropy.simulate import treesim\ntree = treesim.mean_kingman_tree(...)")
+    return treesim.mean_kingman_tree(*args, **kwargs)
+
+# from dendropy.model.coalescent import constrained_kingman_tree as constrained_kingman
+def constrained_kingman(*args, **kwargs):
+    deprecate.dendropy_deprecation_warning(
+            preamble="Deprecated since DendroPy 4: The 'dendropy.treesim.constrained_kingman()' function has moved to 'dendropy.simulate.treesim.constrained_kingman_tree()'.",
+            old_construct="from dendropy import treesim\ntree = treesim.constrained_kingman(...)",
+            new_construct="from dendropy.simulate import treesim\ntree = treesim.constrained_kingman_tree(...)")
+    return treesim.constrained_kingman_tree(*args, **kwargs)
+
+# from dendropy.model.treeshape import star_tree
+def star_tree(*args, **kwargs):
+    deprecate.dendropy_deprecation_warning(
+            preamble="Deprecated since DendroPy 4: The 'dendropy.treesim.star()' function has moved to 'dendropy.simulate.treesim.star_tree()'.",
+            old_construct="from dendropy import treesim\ntree = treesim.star(...)",
+            new_construct="from dendropy.simulate import treesim\ntree = treesim.star_tree(...)")
+    return treesim.star_tree(*args, **kwargs)
diff --git a/dendropy/legacy/treesplit.py b/dendropy/legacy/treesplit.py
new file mode 100644
index 0000000..c17da71
--- /dev/null
+++ b/dendropy/legacy/treesplit.py
@@ -0,0 +1,179 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Split calculation and management.
+DEPRECATED IN DENDROPY 4.
+"""
+
+import dendropy
+from dendropy.utility import bitprocessing
+from dendropy.utility import deprecate
+
+def tree_from_splits(splits, taxon_set, is_rooted=False):
+    deprecate.dendropy_deprecation_warning(
+            preamble="Deprecated since DendroPy 4: 'dendropy.treesplit.tree_from_splits()'.",
+            old_construct="from dendropy import treesplit\ntree = treesplit.tree_from_splits(...)",
+            new_construct="import dendropy\ntree = dendropy.Tree.from_split_bitmasks(...)")
+    return dendropy.Tree.from_split_bitmasks(
+            split_bitmasks=splits,
+            taxon_namespace=taxon_set,
+            is_rooted=is_rooted)
+
+def split_to_list(s, mask=-1, one_based=False, ordination_in_mask=False):
+    deprecate.dendropy_deprecation_warning(
+            preamble="Deprecated since DendroPy 4: 'dendropy.treesplit.split_to_list()'.",
+            old_construct="from dendropy import treesplit\nd = treesplit.split_to_list(...)",
+            new_construct="from dendropy.utility import bitprocessing\nd = bitprocessing.indexes_of_set_bits(...)")
+    return bitprocessing.indexes_of_set_bits(
+            s=s,
+            fill_bitmask=mask,
+            one_based=one_based,
+            ordination_in_mask=ordination_in_mask)
+
+def is_trivial_split(split, mask):
+    deprecate.dendropy_deprecation_warning(
+            preamble="Deprecated since DendroPy 4: 'dendropy.treesplit.is_trivial_split()'.",
+            old_construct="from dendropy import treesplit\nd = treesplit.is_trivial_split(...)",
+            new_construct="import dendropy\nd = dendropy.Bipartition.is_trivial_bitmask(...)")
+    return dendropy.Bipartition.is_trivial_bitmask(bitmask=split, fill_bitmask=mask)
+
+def is_non_singleton_split(split, mask):
+    deprecate.dendropy_deprecation_warning(
+            preamble="Deprecated since DendroPy 4: 'dendropy.treesplit.is_non_singleton_split()'.",
+            old_construct="from dendropy import treesplit\nd = treesplit.is_non_singleton_split(...)",
+            new_construct="import dendropy\nd = not dendropy.Bipartition.is_trivial_bitmask(...)")
+    return dendropy.Bipartition.is_trivial_bitmask(bitmask=split, fill_bitmask=mask)
+
+def split_as_string(split_mask, width, symbol1=None, symbol2=None):
+    deprecate.dendropy_deprecation_warning(
+            preamble="Deprecated since DendroPy 4: 'dendropy.treesplit.split_as_string()'.",
+            old_construct="from dendropy import treesplit\nd = treesplit.split_as_string(...)",
+            new_construct="""\
+# if using a bipartition
+d = bipartition.split_as_bitstring(...)
+d = bipartition.leafset_as_bitstring(...)
+# if a "raw" bitmask
+from dendropy.utility import bitprocessing
+d = bitprocessing.int_as_bitstring(...)""")
+    return bitprocessing.int_as_bitstring(
+            n=split_mask,
+            length=width,
+            symbol0=symbol1,
+            symbol1=symbol2,
+            reverse=False)
+
+def split_as_string_rev(split_mask, width, symbol1='.', symbol2='*'):
+    deprecate.dendropy_deprecation_warning(
+            preamble="Deprecated since DendroPy 4: 'dendropy.treesplit.split_as_string_rev()'.",
+            old_construct="from dendropy import treesplit\nd = treesplit.split_as_string(...)",
+            new_construct="""\
+# if using a bipartition
+d = bipartition.split_as_bitstring(...)[::-1]
+d = bipartition.leafset_as_bitstring(...)[::-1]
+# if a "raw" bitmask
+from dendropy.utility import bitprocessing
+d = bitprocessing.int_as_bitstring(..., reverse=True)""")
+    return bitprocessing.int_as_bitstring(
+            mask=split_mask,
+            length=width,
+            symbol0=symbol0,
+            symbol1=symbol1,
+            reverse=True)
+
+def find_edge_from_split(root, split_to_find, mask=-1):
+    deprecate.dendropy_deprecation_warning(
+            preamble="Deprecated since DendroPy 4: 'dendropy.treesplit.find_edge_from_split()'",
+            old_construct="from dendropy import treesplit\nd = treesplit.find_edge_from_split(...)",
+            new_construct="""\
+# if using a bipartition
+d = bipartition.edge
+# if a "raw" bitmask
+d = tree.find_edge_for_split_bitmask(...)""")
+    return root.find_edge_for_split_bitmask(split_to_find, fill_bitmask=mask)
+
+def encode_splits(tree, create_dict=True, suppress_unifurcations=True):
+    deprecate.dendropy_deprecation_warning(
+            preamble="Deprecated since DendroPy 4: 'dendropy.treesplit.encode_splits()'.",
+            old_construct="from dendropy import treesplit\nd = treesplit.encode_splits(tree, ...)",
+            new_construct="bipartitions = tree.encode_bipartitions(...)\nsplit_bitmasks = tree.split_bitmask_edge_map.keys()")
+    return tree.encode_bipartitions(suppress_unifurcations=suppress_unifurcations)
+
+def is_compatible(split1, split2, mask):
+    deprecate.dendropy_deprecation_warning(
+            preamble="Deprecated since DendroPy 4: 'dendropy.treesplit.is_compatible()'.",
+            old_construct="from dendropy import treesplit\nd = treesplit.is_compatible(...)",
+            new_construct="""\
+# if using a bipartition
+d = bipartition.is_compatible_with(other_bipartition)
+# if a "raw" bitmask
+d = dendropy.Bipartition.is_compatible_bitmasks(m1, m2, fill_bitmask=mask)""")
+    """
+    Mask should have 1 for every leaf in the leaf_set
+    """
+    # m1 = mask & split1
+    # m2 = mask & split2
+    # if 0 == (m1 & m2):
+    #     return True
+    # c2 = mask ^ split2
+    # if 0 == (m1 & c2):
+    #     return True
+    # c1 = mask ^ split1
+    # if 0 == (c1 & m2):
+    #     return True
+    # if 0 == (c1 & c2):
+    #     return True
+    # return False
+    return dendropy.Bipartition.is_compatible_bitmasks(split1, split2, fill_bitmask=mask)
+
+def delete_outdegree_one(tree):
+    deprecate.dendropy_deprecation_warning(
+            preamble="Deprecated since DendroPy 4: 'dendropy.treesplit.delete_outdegree_one()'.",
+            old_construct="from dendropy import treesplit\nd = treesplit.delete_outdegree_one(tree)",
+            new_construct="tree.suppress_unifurcations()")
+    return tree.suppress_unifurcations()
+
+def lowest_bit_only(s):
+    deprecate.dendropy_deprecation_warning(
+            preamble="Deprecated since DendroPy 4: 'dendropy.treesplit.lowest_bit_only()'.",
+            old_construct="from dendropy import treesplit\nd = treesplit.lowest_bit_only(...)",
+            new_construct="from dendropy.utility import bitprocessing\nd = bitprocessing.least_significant_set_bit(...)")
+    return bitprocessing.least_significant_set_bit(s)
+
+def count_bits(a):
+    deprecate.dendropy_deprecation_warning(
+            preamble="Deprecated since DendroPy 4: 'dendropy.treesplit.count_bits()'.",
+            old_construct="from dendropy import treesplit\nd = treesplit.count_bits(...)",
+            new_construct="from dendropy.utility import bitprocessing\nd = bitprocessing.num_set_bits(...)")
+    return bitprocessing.num_set_bits(a)
+
+class SplitDistribution(dendropy.SplitDistribution):
+    def __init__(self, taxon_set=None, split_set=None):
+        deprecate.dendropy_deprecation_warning(
+                preamble="Deprecated since DendroPy 4: The 'dendropy.treesplit.SplitDistribution' class has moved to 'dendropy.calculate.treesplit.SplitDistribution'.",
+                old_construct="from dendropy import treesplit\nm = treesplit.SplitDistribution(...)",
+                new_construct="import dendropy\nm = dendropy.SplitDistribution(...)")
+        dendropy.SplitDistribution.__init__(self,
+                taxon_namespace=taxon_set)
+        if split_set:
+            for split in split_set:
+                self.add_split_count(split, count=1)
+
+
+
+
diff --git a/dendropy/legacy/treesum.py b/dendropy/legacy/treesum.py
new file mode 100644
index 0000000..03b9e1b
--- /dev/null
+++ b/dendropy/legacy/treesum.py
@@ -0,0 +1,43 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Split calculation and management.
+DEPRECATED IN DENDROPY 4: USE `dendropy.calculate.treesum` instead.
+"""
+
+from dendropy.calculate import treesum
+from dendropy.utility import deprecate
+
+class TreeSummarizer(treesum.TreeSummarizer):
+    def __init__(self, **kwargs):
+        deprecate.dendropy_deprecation_warning(
+                preamble="Deprecated since DendroPy 4: The 'dendropy.treesum.TreeSummarizer' class has moved to 'dendropy.calculate.treesum.TreeSummarizer'.",
+                old_construct="from dendropy import treesum\nm = treesum.TreeSummarizer(...)",
+                new_construct="from dendropy.calculate import treesum\nm = treesum.TreeSummarizer(...)")
+        treesum.TreeSummarizer.__init__(self, **kwargs)
+
+class TopologyCounter(treesum.TopologyCounter):
+    def __init__(self, **kwargs):
+        deprecate.dendropy_deprecation_warning(
+                preamble="Deprecated since DendroPy 4: The 'dendropy.treesum.TopologyCounter' class has moved to 'dendropy.calculate.treesum.TopologyCounter'.",
+                old_construct="from dendropy import treesum\nm = treesum.TopologyCounter(...)",
+                new_construct="from dendropy.calculate import treesum\nm = treesum.TopologyCounter(...)")
+        treesum.TopologyCounter.__init__(self, **kwargs)
+
+
diff --git a/dendropy/mathlib/__init__.py b/dendropy/mathlib/__init__.py
new file mode 100644
index 0000000..5035498
--- /dev/null
+++ b/dendropy/mathlib/__init__.py
@@ -0,0 +1,19 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+
diff --git a/dendropy/mathlib/mathfn.py b/dendropy/mathlib/mathfn.py
new file mode 100644
index 0000000..e06ab98
--- /dev/null
+++ b/dendropy/mathlib/mathfn.py
@@ -0,0 +1,23 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+DEPRECATED IN DENDROPY 4: USE `dendropy.calculate.mathfn` instead.
+"""
+
+from dendropy.calculate.mathfn import *
diff --git a/dendropy/mathlib/probability.py b/dendropy/mathlib/probability.py
new file mode 100644
index 0000000..c600407
--- /dev/null
+++ b/dendropy/mathlib/probability.py
@@ -0,0 +1,23 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+DEPRECATED IN DENDROPY 4: USE `dendropy.calculate.probability` instead.
+"""
+
+from dendropy.calculate.probability import *
diff --git a/dendropy/mathlib/statistics.py b/dendropy/mathlib/statistics.py
new file mode 100644
index 0000000..ffeaaf3
--- /dev/null
+++ b/dendropy/mathlib/statistics.py
@@ -0,0 +1,23 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+DEPRECATED IN DENDROPY 4: USE `dendropy.calculate.statistics` instead.
+"""
+
+from dendropy.calculate.statistics import *
diff --git a/dendropy/model/__init__.py b/dendropy/model/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/dendropy/model/birthdeath.py b/dendropy/model/birthdeath.py
new file mode 100644
index 0000000..53c8d14
--- /dev/null
+++ b/dendropy/model/birthdeath.py
@@ -0,0 +1,932 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Models, modeling and model-fitting of birth-death processes.
+"""
+
+import math
+import collections
+import itertools
+from dendropy.calculate import probability
+from dendropy.utility import GLOBAL_RNG
+from dendropy.utility.error import TreeSimTotalExtinctionException
+from dendropy.utility import constants
+
+import dendropy
+
+def birth_death_tree(birth_rate, death_rate, birth_rate_sd=0.0, death_rate_sd=0.0, **kwargs):
+    """
+    Returns a birth-death tree with birth rate specified by ``birth_rate``, and
+    death rate specified by ``death_rate``, with edge lengths in continuous (real)
+    units.
+
+    ``birth_rate_sd`` is the standard deviation of the normally-distributed mutation
+    added to the birth rate as it is inherited by daughter nodes; if 0, birth
+    rate does not evolve on the tree.
+
+    ``death_rate_sd`` is the standard deviation of the normally-distributed mutation
+    added to the death rate as it is inherited by daughter nodes; if 0, death
+    rate does not evolve on the tree.
+
+    Tree growth is controlled by one or more of the following arguments, of which
+    at least one must be specified:
+
+        - If ``ntax`` is given as a keyword argument, tree is grown until the number of
+          tips == ntax.
+        - If ``taxon_namespace`` is given as a keyword argument, tree is grown until the
+          number of tips == len(taxon_namespace), and the taxa are assigned randomly to the
+          tips.
+        - If 'max_time' is given as a keyword argument, tree is grown for
+          a maximum of ``max_time``.
+        - If ``gsa_ntax`` is given then the tree will be simulated up to this number of
+          tips (or 0 tips), then a tree will be randomly selected from the
+          intervals which corresond to times at which the tree had exactly ``ntax``
+          leaves (or len(taxon_namespace) tips). This allows for simulations according to
+          the "General Sampling Approach" of [citeHartmannWS2010]_
+
+
+    If more than one of the above is given, then tree growth will terminate when
+    *any* of the termination conditions (i.e., number of tips == ``ntax``, or number
+    of tips == len(taxon_namespace) or maximum time = ``max_time``) are met.
+
+    Also accepts a Tree object (with valid branch lengths) as an argument passed
+    using the keyword ``tree``: if given, then this tree will be used; otherwise
+    a new one will be created.
+
+    If ``assign_taxa`` is False, then taxa will *not* be assigned to the tips;
+    otherwise (default), taxa will be assigned. If ``taxon_namespace`` is given
+    (``tree.taxon_namespace``, if ``tree`` is given), and the final number of tips on the
+    tree after the termination condition is reached is less then the number of
+    taxa in ``taxon_namespace`` (as will be the case, for example, when
+    ``ntax`` < len(``taxon_namespace``)), then a random subset of taxa in ``taxon_namespace`` will
+    be assigned to the tips of tree. If the number of tips is more than the number
+    of taxa in the ``taxon_namespace``, new Taxon objects will be created and added
+    to the ``taxon_namespace`` if the keyword argument ``create_required_taxa`` is not given as
+    False.
+
+    Under some conditions, it is possible for all lineages on a tree to go extinct.
+    In this case, if the keyword argument ``repeat_until_success`` is `True` (the
+    default), then a new branching process is initiated.
+    If `False` (default), then a TreeSimTotalExtinctionException is raised.
+
+    A Random() object or equivalent can be passed using the ``rng`` keyword;
+    otherwise GLOBAL_RNG is used.
+
+    .. [citeHartmannWS2010] Hartmann, Wong, and Stadler "Sampling Trees from Evolutionary Models" Systematic Biology. 2010. 59(4). 465-476
+
+    """
+    target_num_taxa = kwargs.get('ntax')
+    max_time = kwargs.get('max_time')
+    taxon_namespace = kwargs.get('taxon_namespace')
+    if (target_num_taxa is None) and (taxon_namespace is not None):
+        target_num_taxa = len(taxon_namespace)
+    elif taxon_namespace is None:
+        taxon_namespace = dendropy.TaxonNamespace()
+    gsa_ntax = kwargs.get('gsa_ntax')
+    terminate_at_full_tree = False
+    if target_num_taxa is None:
+        if gsa_ntax is not None:
+            raise ValueError("When 'gsa_ntax' is used, either 'ntax' or 'taxon_namespace' must be used")
+        if max_time is None:
+            raise ValueError("At least one of the following must be specified: 'ntax', 'taxon_namespace', or 'max_time'")
+    else:
+        if gsa_ntax is None:
+            terminate_at_full_tree = True
+            gsa_ntax = 1 + target_num_taxa
+        elif gsa_ntax < target_num_taxa:
+            raise ValueError("gsa_ntax must be greater than target_num_taxa")
+    repeat_until_success = kwargs.get('repeat_until_success', True)
+    rng = kwargs.get('rng', GLOBAL_RNG)
+
+    # initialize tree
+    if "tree" in kwargs:
+        tree = kwargs['tree']
+        if "taxon_namespace" in kwargs and kwargs['taxon_namespace'] is not tree.taxon_namespace:
+            raise ValueError("Cannot specify both ``tree`` and ``taxon_namespace``")
+    else:
+        tree = dendropy.Tree(taxon_namespace=taxon_namespace)
+        tree.is_rooted = True
+        tree.seed_node.edge.length = 0.0
+        tree.seed_node.birth_rate = birth_rate
+        tree.seed_node.death_rate = death_rate
+
+    # grow tree
+    leaf_nodes = tree.leaf_nodes()
+    #_LOG.debug("Will generate a tree with no more than %s leaves to get a tree of %s leaves" % (str(gsa_ntax), str(target_num_taxa)))
+    curr_num_leaves = len(leaf_nodes)
+    total_time = 0
+    # for the GSA simulations targetted_time_slices is a list of tuple
+    #   the first element in the tuple is the duration of the amount
+    #   that the simulation spent at the (targetted) number of taxa
+    #   and a list of edge information. The list of edge information includes
+    #   a list of terminal edges in the tree and the length for that edge
+    #   that marks the beginning of the time slice that corresponds to the
+    #   targetted number of taxa.
+
+    targetted_time_slices = []
+    extinct_tips = []
+    while True:
+        if gsa_ntax is None:
+            assert (max_time is not None)
+            if total_time >= max_time:
+                break
+        elif curr_num_leaves >= gsa_ntax:
+            break
+
+        # get vector of birth/death probabilities, and
+        # associate with nodes/events
+        event_rates = []
+        event_nodes = []
+        for nd in leaf_nodes:
+            if not hasattr(nd, 'birth_rate'):
+                nd.birth_rate = birth_rate
+            if not hasattr(nd, 'death_rate'):
+                nd.death_rate = death_rate
+            event_rates.append(nd.birth_rate)
+            event_nodes.append((nd, True)) # birth event = True
+            event_rates.append(nd.death_rate)
+            event_nodes.append((nd, False)) # birth event = False; i.e. death
+
+        # get total probability of any birth/death
+        rate_of_any_event = sum(event_rates)
+
+        # waiting time based on above probability
+        #_LOG.debug("rate_of_any_event = %f" % (rate_of_any_event))
+        waiting_time = rng.expovariate(rate_of_any_event)
+        #_LOG.debug("Drew waiting time of %f from hazard parameter of %f" % (waiting_time, rate_of_any_event))
+
+        if (gsa_ntax is not None) and (curr_num_leaves == target_num_taxa):
+            edge_and_start_length = []
+            for nd in leaf_nodes:
+                e = nd.edge
+                edge_and_start_length.append((e, e.length))
+            targetted_time_slices.append((waiting_time, edge_and_start_length))
+            #_LOG.debug("Recording slice with %d edges" % len(edge_and_start_length))
+            if terminate_at_full_tree:
+                break
+
+        # add waiting time to nodes
+        for nd in leaf_nodes:
+            try:
+                nd.edge.length += waiting_time
+            except TypeError:
+                nd.edge.length = waiting_time
+        #_LOG.debug("Next waiting_time = %f" % waiting_time)
+        total_time += waiting_time
+
+        # if event occurs within time constraints
+        if max_time is None or total_time <= max_time:
+
+            # normalize probability
+            for i in range(len(event_rates)):
+                event_rates[i] = event_rates[i]/rate_of_any_event
+
+            # select node/event and process
+            nd, birth_event = probability.weighted_choice(event_nodes, event_rates, rng=rng)
+            leaf_nodes.remove(nd)
+            curr_num_leaves -= 1
+            if birth_event:
+                #_LOG.debug("Speciation")
+                c1 = nd.new_child()
+                c2 = nd.new_child()
+                c1.edge.length = 0
+                c2.edge.length = 0
+                c1.birth_rate = nd.birth_rate + rng.gauss(0, birth_rate_sd)
+                c1.death_rate = nd.death_rate + rng.gauss(0, death_rate_sd)
+                c2.birth_rate = nd.birth_rate + rng.gauss(0, birth_rate_sd)
+                c2.death_rate = nd.death_rate + rng.gauss(0, death_rate_sd)
+                leaf_nodes.append(c1)
+                leaf_nodes.append(c2)
+                curr_num_leaves += 2
+            else:
+                #_LOG.debug("Extinction")
+                if curr_num_leaves > 0:
+                    #_LOG.debug("Will delete " + str(id(nd)) + " with parent = " + str(id(nd.parent_node)))
+                    extinct_tips.append(nd)
+                else:
+                    if (gsa_ntax is not None):
+                        if (len(targetted_time_slices) > 0):
+                            break
+                    if not repeat_until_success:
+                        raise TreeSimTotalExtinctionException()
+                    # We are going to basically restart the simulation because the tree has gone extinct (without reaching the specified ntax)
+                    leaf_nodes = [tree.seed_node]
+                    curr_num_leaves = 1
+                    for nd in tree.seed_node.child_nodes():
+                        tree.prune_subtree(nd, suppress_unifurcations=False)
+                    extinct_tips = []
+                    total_time = 0
+            assert(curr_num_leaves == len(leaf_nodes))
+            #_LOG.debug("Current tree \n%s" % (tree.as_ascii_plot(plot_metric='length', show_internal_node_labels=True)))
+    #tree._debug_tree_is_valid()
+    #_LOG.debug("Terminated with %d leaves (%d, %d  according to len(leaf_nodes))" % (curr_num_leaves, len(leaf_nodes), len(tree.leaf_nodes())))
+    if gsa_ntax is not None:
+        total_duration_at_target_n_tax = 0.0
+        for i in targetted_time_slices:
+            total_duration_at_target_n_tax += i[0]
+        r = rng.random()*total_duration_at_target_n_tax
+        #_LOG.debug("Selected rng = %f out of (0, %f)" % (r, total_duration_at_target_n_tax))
+        selected_slice = None
+        for n, i in enumerate(targetted_time_slices):
+            r -= i[0]
+            if r < 0.0:
+                selected_slice = i
+        assert(selected_slice is not None)
+        #_LOG.debug("Selected time slice index %d" % n)
+        edges_at_slice = selected_slice[1]
+        last_waiting_time = selected_slice[0]
+        for e, prev_length in edges_at_slice:
+            daughter_nd = e.head_node
+            for nd in daughter_nd.child_nodes():
+                tree.prune_subtree(nd, suppress_unifurcations=False)
+                #_LOG.debug("After pruning %s:\n%s" % (str(id(nd)), tree.as_ascii_plot(plot_metric='length', show_internal_node_labels=True)))
+                try:
+                    extinct_tips.remove(nd)
+                except:
+                    pass
+            try:
+                extinct_tips.remove(daughter_nd)
+            except:
+                pass
+            e.length = prev_length + last_waiting_time
+
+
+
+#     tree._debug_tree_is_valid()
+#     for nd in extinct_tips:
+#         _LOG.debug("Will be deleting " + str(id(nd)))
+
+    for nd in extinct_tips:
+        bef = len(tree.leaf_nodes())
+        while (nd.parent_node is not None) and (len(nd.parent_node.child_nodes()) == 1):
+            nd = nd.parent_node
+#         _LOG.debug("Deleting " + str(nd.__dict__) + '\n' + str(nd.edge.__dict__))
+#         for n, pnd in enumerate(tree.postorder_node_iter()):
+#             _LOG.debug("%d %s" % (n, repr(pnd)))
+#        _LOG.debug("Before prune of %s:\n%s" % (str(id(nd)), tree.as_ascii_plot(plot_metric='length', show_internal_node_labels=True)))
+        if nd.parent_node:
+            tree.prune_subtree(nd, suppress_unifurcations=False)
+#         _LOG.debug("Deleted " + str(nd.__dict__))
+#         for n, pnd in enumerate(tree.postorder_node_iter()):
+#             _LOG.debug("%d %s" % (n, repr(pnd)))
+#         tree._debug_tree_is_valid()
+    tree.suppress_unifurcations()
+#    tree._debug_tree_is_valid()
+#    _LOG.debug("After deg2suppression:\n%s" % (tree.as_ascii_plot(plot_metric='length', show_internal_node_labels=True)))
+
+    if kwargs.get("assign_taxa", True):
+        tree.randomly_assign_taxa(create_required_taxa=True, rng=rng)
+
+
+    # return
+    return tree
+
+
+def discrete_birth_death_tree(birth_rate, death_rate, birth_rate_sd=0.0, death_rate_sd=0.0, **kwargs):
+    """
+    Returns a birth-death tree with birth rate specified by ``birth_rate``, and
+    death rate specified by ``death_rate``, with edge lengths in discrete (integer)
+    units.
+
+    ``birth_rate_sd`` is the standard deviation of the normally-distributed mutation
+    added to the birth rate as it is inherited by daughter nodes; if 0, birth
+    rate does not evolve on the tree.
+
+    ``death_rate_sd`` is the standard deviation of the normally-distributed mutation
+    added to the death rate as it is inherited by daughter nodes; if 0, death
+    rate does not evolve on the tree.
+
+    Tree growth is controlled by one or more of the following arguments, of which
+    at least one must be specified:
+
+        - If ``ntax`` is given as a keyword argument, tree is grown until the number of
+          tips == ntax.
+        - If ``taxon_namespace`` is given as a keyword argument, tree is grown until the
+          number of tips == len(taxon_namespace), and the taxa are assigned randomly to the
+          tips.
+        - If 'max_time' is given as a keyword argument, tree is grown for ``max_time``
+          number of generations.
+
+    If more than one of the above is given, then tree growth will terminate when
+    *any* of the termination conditions (i.e., number of tips == ``ntax``, or number
+    of tips == len(taxon_namespace) or number of generations = ``max_time``) are met.
+
+    Also accepts a Tree object (with valid branch lengths) as an argument passed
+    using the keyword ``tree``: if given, then this tree will be used; otherwise
+    a new one will be created.
+
+    If ``assign_taxa`` is False, then taxa will *not* be assigned to the tips;
+    otherwise (default), taxa will be assigned. If ``taxon_namespace`` is given
+    (``tree.taxon_namespace``, if ``tree`` is given), and the final number of tips on the
+    tree after the termination condition is reached is less then the number of
+    taxa in ``taxon_namespace`` (as will be the case, for example, when
+    ``ntax`` < len(``taxon_namespace``)), then a random subset of taxa in ``taxon_namespace`` will
+    be assigned to the tips of tree. If the number of tips is more than the number
+    of taxa in the ``taxon_namespace``, new Taxon objects will be created and added
+    to the ``taxon_namespace`` if the keyword argument ``create_required_taxa`` is not given as
+    False.
+
+    Under some conditions, it is possible for all lineages on a tree to go extinct.
+    In this case, if the keyword argument ``repeat_until_success`` is `True`, then a new
+    branching process is initiated.
+    If `False` (default), then a TreeSimTotalExtinctionException is raised.
+
+    A Random() object or equivalent can be passed using the ``rng`` keyword;
+    otherwise GLOBAL_RNG is used.
+    """
+    if 'ntax' not in kwargs \
+        and 'taxon_namespace' not in kwargs \
+        and 'max_time' not in kwargs:
+            raise ValueError("At least one of the following must be specified: 'ntax', 'taxon_namespace', or 'max_time'")
+    target_num_taxa = None
+    taxon_namespace = None
+    target_num_gens = kwargs.get('max_time', None)
+    if 'taxon_namespace' in kwargs:
+        taxon_namespace = kwargs.get('taxon_namespace')
+        target_num_taxa = kwargs.get('ntax', len(taxon_namespace))
+    elif 'ntax' in kwargs:
+        target_num_taxa = kwargs['ntax']
+    if taxon_namespace is None:
+        taxon_namespace = dendropy.TaxonNamespace()
+    repeat_until_success = kwargs.get('repeat_until_success', False)
+    rng = kwargs.get('rng', GLOBAL_RNG)
+
+    # grow tree
+    if "tree" in kwargs:
+        tree = kwargs['tree']
+        if "taxon_namespace" in kwargs and kwargs['taxon_namespace'] is not tree.taxon_namespace:
+            raise ValueError("Cannot specify both ``tree`` and ``taxon_namespace``")
+    else:
+        tree = dendropy.Tree(taxon_namespace=taxon_namespace)
+        tree.is_rooted = True
+        tree.seed_node.edge.length = 0
+        tree.seed_node.birth_rate = birth_rate
+        tree.seed_node.death_rate = death_rate
+    leaf_nodes = tree.leaf_nodes()
+    num_gens = 0
+    while (target_num_taxa is None or len(leaf_nodes) < target_num_taxa) \
+            and (target_num_gens is None or num_gens < target_num_gens):
+        for nd in leaf_nodes:
+            if not hasattr(nd, 'birth_rate'):
+                nd.birth_rate = birth_rate
+            if not hasattr(nd, 'death_rate'):
+                nd.death_rate = death_rate
+            try:
+                nd.edge.length += 1
+            except TypeError:
+                nd.edge.length = 1
+            u = rng.uniform(0, 1)
+            if u < nd.birth_rate:
+                c1 = nd.new_child()
+                c2 = nd.new_child()
+                c1.edge.length = 0
+                c2.edge.length = 0
+                c1.birth_rate = nd.birth_rate + rng.gauss(0, birth_rate_sd)
+                c1.death_rate = nd.death_rate + rng.gauss(0, death_rate_sd)
+                c2.birth_rate = nd.birth_rate + rng.gauss(0, birth_rate_sd)
+                c2.death_rate = nd.death_rate + rng.gauss(0, death_rate_sd)
+            elif u > nd.birth_rate and u < (nd.birth_rate + nd.death_rate):
+                if nd is not tree.seed_node:
+                    tree.prune_subtree(nd)
+                elif not repeat_until_success:
+                    # all lineages are extinct: raise exception
+                    raise TreeSimTotalExtinctionException()
+                else:
+                    # all lineages are extinct: repeat
+                    num_gens = 0
+
+        num_gens += 1
+        leaf_nodes = tree.leaf_nodes()
+
+    # If termination condition specified by ntax or taxon_namespace, then the last
+    # split will have a daughter edges of length == 0;
+    # so we continue growing the edges until the next birth/death event *or*
+    # the max number of generations condition is given and met
+    gens_to_add = 0
+    while (target_num_gens is None or num_gens < target_num_gens):
+        u = rng.uniform(0, 1)
+        if u < (birth_rate + death_rate):
+            break
+        gens_to_add += 1
+    for nd in tree.leaf_nodes():
+        nd.edge.length += gens_to_add
+
+    if kwargs.get("assign_taxa", True):
+        tree.randomly_assign_taxa(create_required_taxa=True, rng=rng)
+
+    # return
+    return tree
+
+def uniform_pure_birth_tree(taxon_namespace, birth_rate=1.0, rng=None):
+    "Generates a uniform-rate pure-birth process tree. "
+    if rng is None:
+        rng = GLOBAL_RNG # use the global rng by default
+    tree = dendropy.Tree(taxon_namespace=taxon_namespace)
+    tree.seed_node.edge.length = 0.0
+    leaf_nodes = tree.leaf_nodes()
+    while len(leaf_nodes) < len(taxon_namespace):
+        waiting_time = rng.expovariate(len(leaf_nodes)/birth_rate)
+        for nd in leaf_nodes:
+            nd.edge.length += waiting_time
+        parent_node = rng.choice(leaf_nodes)
+        c1 = parent_node.new_child()
+        c2 = parent_node.new_child()
+        c1.edge.length = 0.0
+        c2.edge.length = 0.0
+        leaf_nodes = tree.leaf_nodes()
+    leaf_nodes = tree.leaf_nodes()
+    waiting_time = rng.expovariate(len(leaf_nodes)/birth_rate)
+    for nd in leaf_nodes:
+        nd.edge.length += waiting_time
+    for idx, leaf in enumerate(leaf_nodes):
+        leaf.taxon = taxon_namespace[idx]
+    tree.is_rooted = True
+    return tree
+
+def fit_pure_birth_model(**kwargs):
+    """
+    Calculates the maximum-likelihood estimate of the birth rate of a set of
+    *internal* node ages under a Yule (pure-birth) model.
+
+    Requires either a |Tree| object or an interable of *internal* node
+    ages to be passed in via keyword arguments ``tree`` or ``internal_node_ages``,
+    respectively. The former is more convenient when doing one-off
+    calculations, while the latter is more efficient if the list of internal
+    node ages needs to be used in other places and you already have it
+    calculated and want to avoid re-calculating it here.
+
+    Parameters
+    ----------
+    \*\*kwargs : keyword arguments, mandatory
+
+        Exactly *one* of the following *must* be specified:
+
+            tree : a |Tree| object.
+                A |Tree| object. The tree needs to be ultrametric for the
+                internal node ages (time from each internal node to the tips)
+                to make sense. The precision by which the ultrametricity is
+                checked can be specified using the ``ultrametricity_precision`` keyword
+                argument (see below). If ``tree`` is given, then
+                ``internal_node_ages`` cannot be given, and vice versa. If ``tree``
+                is not given, then ``internal_node_ages`` must be given.
+            internal_node_ages : iterable (of numerical values)
+                Iterable of node ages of the internal nodes of a tree, i.e., the
+                list of sum of the edge lengths between each internal node and
+                the tips of the tree. If ``internal_node_ages`` is given, then
+                ``tree`` cannot be given, and vice versa. If ``internal_node_ages``
+                is not given, then ``tree`` must be given.
+
+        While the following is optional, and is only used if internal node ages
+        need to be calculated (i.e., 'tree' is passed in).
+
+            ultrametricity_precision : float
+                When calculating the node ages, an error will be raised if the tree in
+                o ultrametric. This error may be due to floating-point or numerical
+                imprecision. You can set the precision of the ultrametricity validation
+                by setting the ``ultrametricity_precision`` parameter. E.g., use
+                ``ultrametricity_precision=0.01`` for a more relaxed precision,
+                down to 2 decimal places. Use ``ultrametricity_precision=False``
+                to disable checking of ultrametricity precision.
+
+            ignore_likelihood_calculation_failure: bool (default: False)
+                In some cases (typically, abnormal trees, e.g., 1-tip), the
+                likelihood estimation will fail. In this case a ValueError will
+                be raised. If ``ignore_likelihood_calculation_failure`` is
+                `True`, then the function call will still succeed, with the
+                likelihood set to -``inf``.
+
+    Returns
+    -------
+    m : dictionary
+
+    A dictionary with keys being parameter names and values being
+    estimates:
+
+        "birth_rate"
+            The birth rate.
+        "log_likelihood"
+            The log-likelihood of the model and given birth rate.
+
+    Examples
+    --------
+
+    Given trees such as::
+
+        import dendropy
+        from dendropy.model import birthdeath
+        trees = dendropy.TreeList.get_from_path(
+                "pythonidae.nex", "nexus")
+
+    Birth rates can be estimated by passing in trees directly::
+
+        for idx, tree in enumerate(trees):
+            m = birthdeath.fit_pure_birth_model(tree=tree)
+            print("Tree {}: birth rate = {} (logL = {})".format(
+                idx+1, m["birth_rate"], m["log_likelihood"]))
+
+    Or by pre-calculating and passing in a list of node ages::
+
+        for idx, tree in enumerate(trees):
+            m = birthdeath.fit_pure_birth_model(
+                    internal_node_ages=tree.internal_node_ages())
+            print("Tree {}: birth rate = {} (logL = {})".format(
+                idx+1, m["birth_rate"], m["log_likelihood"]))
+
+
+    Notes
+    -----
+    Adapted from the laser package for R:
+
+        -   Dan Rabosky and Klaus Schliep (2013). laser: Likelihood Analysis of
+            Speciation/Extinction Rates from Phylogenies. R package version
+            2.4-1. http://CRAN.R-project.org/package=laser
+
+    See also:
+
+        -   Nee, S.  2001.  Inferring speciation rates from phylogenies.
+            Evolution 55:661-668.
+        -   Yule, G. U. 1924. A mathematical theory of evolution based on the
+            conclusions of Dr.  J. C. Willis. Phil. Trans. R. Soc. Lond. B
+            213:21-87.
+
+    """
+    tree = kwargs.get("tree", None)
+    if tree is not None:
+        internal_node_ages = tree.internal_node_ages(ultrametricity_precision=kwargs.get("ultrametricity_precision", 0.0000001))
+    else:
+        try:
+            internal_node_ages = kwargs["internal_node_ages"]
+        except KeyError:
+            raise TypeError("Need to specify 'tree' or 'internal_node_ages'")
+    x = sorted(internal_node_ages, reverse=True)
+    st1 = x[0]
+    st2 = 0
+    nvec = range(2, len(x)+2)
+    nv = [i for i in x if (i < st1) and (i >= st2)]
+    lo = max(nvec[idx] for idx, i in enumerate(x) if i >= st1)
+    up = max(nvec[idx] for idx, i in enumerate(x) if i >= st2)
+    if st1 <= x[0]:
+        nv.insert(0, st1)
+        nv = [i - st2 for i in nv]
+    else:
+        nv = [i - st2 for i in nv]
+    t1 = (up-lo)
+    t2 = (lo*nv[0])
+    t3 = sum( nv[1:(up-lo+1)] )
+    smax = t1/(t2 + t3)
+
+    try:
+        s1 = sum(map(math.log, range(lo,up)))
+        s2 = (up-lo) * math.log(smax)
+        s3 = lo - up
+        lh = s1 + s2 + s3
+    except ValueError:
+        if kwargs.get("ignore_likelihood_calculation_failure", False):
+            raise ValueError("Likelihood estimation failure")
+        else:
+            lh = float("-inf")
+
+    result = {
+        "birth_rate" : smax,
+        "log_likelihood" : lh,
+    }
+    return result
+
+def fit_pure_birth_model_to_tree(tree, ultrametricity_precision=constants.DEFAULT_ULTRAMETRICITY_PRECISION):
+    """
+    Calculates the maximum-likelihood estimate of the birth rate a tree under a
+    Yule (pure-birth) model.
+
+    Parameters
+    ----------
+    tree : |Tree| object
+        A tree to be fitted.
+
+    Returns
+    -------
+    m : dictionary
+
+    A dictionary with keys being parameter names and values being
+    estimates:
+
+        -   "birth_rate"
+            The birth rate.
+        -   "log_likelihood"
+            The log-likelihood of the model and given birth rate.
+
+    Examples
+    --------
+
+    ::
+
+        import dendropy
+        from dendropy.model import birthdeath
+        trees = dendropy.TreeList.get_from_path(
+                "pythonidae.nex", "nexus")
+        for idx, tree in enumerate(trees):
+            m = birthdeath.fit_pure_birth_model_to_tree(tree)
+            print("Tree {}: birth rate = {} (logL = {})".format(
+                idx+1, m["birth_rate"], m["log_likelihood"]))
+
+    """
+    return fit_pure_birth_model(tree=tree, ultrametricity_precision=ultrametricity_precision)
+
+
+class ProtractedSpeciationModel(object):
+
+    # @staticmethod
+    # def filter_incipient_species(
+    #         source_tree,
+    #         collapse_incipient_speciation_nodes=True,
+    #         prune_incipient_species=True,
+    #         as_copy=True):
+    #     if not collapse_incipient_speciation_nodes and not prune_incipient_species:
+    #         return
+    #     if prune_incipient_species and not collapse_incipient_speciation_nodes:
+    #         raise TypeError("'prune_incipient_species=True' requires that 'collapse_incipient_speciation_nodes=True' as well")
+    #     for source_node in source_tree.leaf_node_iter():
+        # if as_copy:
+        #     tree = dendropy.Tree(tree)
+        # for nd in tree.postorder_node_iter():
+        #     if nd.is_leaf():
+        #         continue
+        #     else:
+        #         if not nd.is_parent_of_full_species:
+        #             nd.edge.collapse()
+        # return tree
+
+    class ProtractedSpeciationModelLineage(object):
+
+        def __init__(self,
+                index,
+                parent_lineage,
+                speciation_initiation_time,
+                is_incipient):
+            self.index = index
+            self.is_incipient = is_incipient
+            self.parent_lineage = parent_lineage
+            self.speciation_initiation_time = speciation_initiation_time
+            self.speciation_completion_time = None
+            self.extinction_time = None
+            self.node = None
+
+    def __init__(self,
+            full_species_birth_rate,
+            full_species_extinction_rate,
+            incipient_species_birth_rate,
+            incipient_species_conversion_rate,
+            incipient_species_extinction_rate,
+            rng=None,
+            ):
+        self.full_species_birth_rate = full_species_birth_rate
+        self.full_species_extinction_rate = full_species_extinction_rate
+        self.incipient_species_birth_rate = incipient_species_birth_rate
+        self.incipient_species_conversion_rate = incipient_species_conversion_rate
+        self.incipient_species_extinction_rate = incipient_species_extinction_rate
+        if rng is None:
+            self.rng = GLOBAL_RNG
+        else:
+            self.rng = rng
+        self.tree_factory = dendropy.Tree
+        self.node_factory = dendropy.Node
+        self.reset()
+
+    def reset(self):
+        self.current_time = 0.0
+        self.current_lineage_index = 0
+        self.current_full_species_lineages = []
+        self.current_incipient_species_lineages = []
+        self.lineage_table = {}
+
+    def generate_tree(self, **kwargs):
+        """
+
+        Samples from the Protracted Speciation Model process. Nodes on the tree which
+        represent full/good/true speciation events will have the attribute
+        ``is_parent_of_full_species`` set to `True`, while this attribute will be set
+        to `False` otherwise.
+
+        Parameters
+        ----------
+
+        max_time : float
+            Length of time for which to run process.
+        is_initial_species_incipient : bool
+            Whether the first lineage that initialies the process is an
+            incipient or full species. Defaults to `False`: first species on
+            the tree is a full species.
+        is_retry_on_total_extinction : bool
+            If ``False``, then a TreeSimTotalExtinctionException will be raised
+            if all lineages go extinct before the termination conditions are
+            met. Defaults to ``True``: if all lineages go extinct before the
+            termination conditions are met, then the simulation is rerun, up to
+            a maximum of ``max_retries``.
+        max_retries : int
+            Maximum number of runs to execute in the event of
+            prematurely-terminated simulations due to all lineages going
+            extinct. Once this number or re-runs is exceed, then
+            TreeSimTotalExtinctionException is raised. Defaults to 1000. Set to
+            ``None`` to never quit trying.
+        collapse_incipient_speciation_nodes : bool
+            If `True`, then nodes that are not full speciation events will be
+            collapsed. Defaults to `False`, incomplete speciation nodes are
+            retained on the tree.
+        prune_incipient_species : bool
+            If `True`, then incipient species will be removed from the tree.
+            Defaults to `False`, incipient species are retained on the tree.
+            Note: if `True`, this requires that
+            ``collapse_incipient_speciation_nodes`` is also ``True``.
+
+        Returns
+        -------
+        t1 : |Tree| instance
+            A tree resulting from sampling the protracted speciation process.
+
+        """
+        is_retry_on_total_extinction = kwargs.pop("is_retry_on_total_extinction", True)
+        max_retries = kwargs.pop("max_retries", 1000)
+        collapse_incipient_speciation_nodes = kwargs.pop("collapse_incipient_speciation_nodes", False)
+        prune_incipient_species = kwargs.pop("prune_incipient_species", False)
+        num_retries = 0
+        tree = None
+        while True:
+            try:
+                tree = self._run_protracted_speciation_process(**kwargs)
+                break
+            except TreeSimTotalExtinctionException:
+                if not is_retry_on_total_extinction:
+                    raise
+                num_retries += 1
+                if max_retries is not None and num_retries > max_retries:
+                    raise
+        assert tree is not None
+        return tree
+
+    def _run_protracted_speciation_process(self, **kwargs):
+        self.reset()
+        max_time = kwargs.get("max_time", None)
+        taxon_namespace = kwargs.get("taxon_namespace", None)
+
+        is_incipient = kwargs.get("is_initial_species_incipient", False)
+        if is_incipient:
+            initial_lineage = self._new_lineage(parent_lineage=None, is_incipient=True)
+        else:
+            initial_lineage = self._new_lineage(parent_lineage=None, is_incipient=False)
+        seed_node = self._new_node(lineage=initial_lineage)
+        tree = self.tree_factory( taxon_namespace=taxon_namespace, seed_node=seed_node)
+        tree.is_rooted = True
+
+        while True:
+
+            ## Draw time to next event
+            event_rates = []
+            num_full_species = len(self.current_full_species_lineages)
+            num_incipient_species = len(self.current_incipient_species_lineages)
+
+            # Event type 0
+            event_rates.append(self.full_species_birth_rate * num_full_species)
+
+            # Event type 1
+            event_rates.append(self.full_species_extinction_rate * num_full_species)
+
+            # Event type 2
+            event_rates.append(self.incipient_species_birth_rate * num_incipient_species)
+
+            # Event type 3
+            event_rates.append(self.incipient_species_conversion_rate * num_incipient_species)
+
+            # Event type 4
+            event_rates.append(self.incipient_species_extinction_rate * num_incipient_species)
+
+            # All events
+            rate_of_any_event = sum(event_rates)
+
+            # Waiting time
+            waiting_time = self.rng.expovariate(rate_of_any_event)
+            self.current_time += waiting_time
+            if max_time and self.current_time > max_time:
+                break
+            for lineage in itertools.chain(self.current_full_species_lineages, self.current_incipient_species_lineages):
+                lineage.node.edge.length += waiting_time
+
+            # Select event
+            event_type_idx = probability.weighted_index_choice(weights=event_rates, rng=self.rng)
+            assert (event_type_idx >= 0 and event_type_idx <= 4)
+            # print("time {}: {}, selected = {}".format(self.current_time, event_rates, event_type_idx))
+
+            if event_type_idx == 0:
+                self._process_full_species_birth(tree)
+            elif event_type_idx == 1:
+                self._process_full_species_extinction(tree)
+            elif event_type_idx == 2:
+                self._process_incipient_species_birth(tree)
+            elif event_type_idx == 3:
+                self._process_incipient_species_conversion(tree)
+            elif event_type_idx == 4:
+                self._process_incipient_species_extinction(tree)
+            else:
+                raise Exception("Unexpected event type index: {}".format(event_type_idx))
+
+            if len(self.current_full_species_lineages) + len(self.current_incipient_species_lineages) == 0:
+                raise TreeSimTotalExtinctionException()
+
+        return tree
+
+    def _process_full_species_birth(self, tree):
+        parent_lineage = self.rng.choice(self.current_full_species_lineages)
+        parent_node = parent_lineage.node
+        new_lineage = self._new_lineage(parent_lineage=parent_lineage, is_incipient=True)
+        c1 = self._new_node(lineage=parent_lineage)
+        c2 = self._new_node(lineage=new_lineage)
+        parent_node.add_child(c1)
+        parent_node.add_child(c2)
+
+    def _process_full_species_extinction(self, tree):
+        sp = self.rng.choice(self.current_full_species_lineages)
+        sp.extinction_time = self.current_time
+        self.current_full_species_lineages.remove(sp)
+        self._make_lineage_extinct_on_phylogeny(tree, sp.node)
+
+    def _process_incipient_species_birth(self, tree):
+        parent_lineage = self.rng.choice(self.current_incipient_species_lineages)
+        parent_node = parent_lineage.node
+        new_lineage = self._new_lineage(parent_lineage=parent_lineage, is_incipient=True)
+        c1 = self._new_node(lineage=parent_lineage)
+        c2 = self._new_node(lineage=new_lineage)
+        parent_node.add_child(c1)
+        parent_node.add_child(c2)
+
+    def _process_incipient_species_conversion(self, tree):
+        lineage = self.rng.choice(self.current_incipient_species_lineages)
+        self.current_incipient_species_lineages.remove(lineage)
+        self.current_full_species_lineages.append(lineage)
+        lineage.speciation_completion_time = self.current_time
+        if lineage.node.parent_node is not None:
+            lineage.node.parent_node.is_parent_of_full_species = True
+
+    def _process_incipient_species_extinction(self, tree):
+        sp = self.rng.choice(self.current_incipient_species_lineages)
+        sp.extinction_time = self.current_time
+        self.current_incipient_species_lineages.remove(sp)
+        self._make_lineage_extinct_on_phylogeny(tree, sp.node)
+
+    def _add_new_lineage(self,
+            parent_lineage,
+            is_incipient):
+        self.current_lineage_index += 1
+        sp_idx = self.current_lineage_index
+        node = self.node_factory()
+        node.edge.length = 0.0
+        node.parent_node = parent_lineage
+        node.is_parent_of_full_species = None
+        node.speciation_initiation_time = None
+        if is_incipient:
+            self.current_incipient_species.append(node)
+        else:
+            self.current_full_species.append(node)
+        return node
+
+    def _make_lineage_extinct_on_phylogeny(self, tree, sp):
+        if len(self.current_full_species_lineages) == 0 and len(self.current_incipient_species_lineages) == 0:
+            raise TreeSimTotalExtinctionException()
+        tree.prune_subtree(sp)
+
+    def _new_lineage(self, parent_lineage, is_incipient):
+        self.current_lineage_index += 1
+        lineage_index = self.current_lineage_index
+        new_lineage = ProtractedSpeciationModel.ProtractedSpeciationModelLineage(
+                index=lineage_index,
+                parent_lineage=parent_lineage,
+                speciation_initiation_time=self.current_time,
+                is_incipient=is_incipient)
+        if is_incipient:
+            self.current_incipient_species_lineages.append(new_lineage)
+        else:
+            self.current_full_species_lineages.append(new_lineage)
+        return new_lineage
+
+    def _new_node(self,
+            lineage,
+            ):
+        node = self.node_factory()
+        node.edge.length = 0.0
+        node.is_parent_of_full_species = None
+        node._protracted_speciation_model_lineage = lineage
+        lineage.node = node
+        return node
diff --git a/dendropy/model/coalescent.py b/dendropy/model/coalescent.py
new file mode 100644
index 0000000..91f7cc9
--- /dev/null
+++ b/dendropy/model/coalescent.py
@@ -0,0 +1,686 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Functions, classes, and methods for working with Kingman's n-coalescent
+framework.
+"""
+
+import math
+import dendropy
+from dendropy.utility import GLOBAL_RNG
+from dendropy.utility import constants
+from dendropy.calculate import probability
+
+###############################################################################
+## Calculations and statistics
+
+def discrete_time_to_coalescence(n_genes,
+                                 pop_size=None,
+                                 n_to_coalesce=2,
+                                 rng=None):
+    """
+    A random draw from the "Kingman distribution" (discrete time version): Time
+    to go from ``n_genes`` genes to ``n_genes``-1 genes in a discrete-time
+    Wright-Fisher population of ``pop_size`` genes; i.e. waiting time until
+    ``n-genes`` lineages coalesce in a population of ``pop_size`` genes.
+
+    Parameters
+    ----------
+
+    n_genes : integer
+        The number of genes in the sample.
+    pop_size : integer
+        The effective *haploid* population size; i.e., number of genes in the
+        population: 2 * N in a diploid population of N individuals, or N in a
+        haploid population of N individuals.
+    n_to_coalesce : integer
+        The waiting time that will be returned will be the waiting time for
+        this number of genes in the sample to coalesce.
+    rng : `Random`
+        The random number generator instance.
+
+    Returns
+    -------
+    k : integer
+        A randomly-generated waiting time (in discrete generations) for
+        ``n_to_coalesce`` genes to coalesce out of a sample of ``n_genes`` in a
+        population of ``pop_size`` genes.
+
+    """
+    if not pop_size:
+        time_units = 1.0
+    else:
+        time_units = pop_size
+    if rng is None:
+        rng = GLOBAL_RNG
+    p = pop_size / probability.binomial_coefficient(n_genes, n_to_coalesce)
+    tmrca = probability.geometric_rv(p)
+    return tmrca * time_units
+
+def time_to_coalescence(n_genes,
+        pop_size=None,
+        n_to_coalesce=2,
+        rng=None):
+    """
+    A random draw from the "Kingman distribution" (discrete time version): Time
+    to go from ``n_genes`` genes to ``n_genes``-1 genes in a continuous-time
+    Wright-Fisher population of ``pop_size`` genes; i.e. waiting time until
+    ``n-genes`` lineages coalesce in a population of ``pop_size`` genes.
+
+    Given the number of gene lineages in a sample, ``n_genes``, and a
+    population size, ``pop_size``, this function returns a random number from
+    an exponential distribution with rate $\choose(``pop_size``, 2)$.
+    ``pop_size`` is the effective *haploid* population size; i.e., number of gene
+    in the population: 2 * N in a diploid population of N individuals,
+    or N in a haploid population of N individuals. If ``pop_size`` is 1 or 0 or
+    None, then time is in haploid population units; i.e. where 1 unit of time
+    equals 2N generations for a diploid population of size N, or N generations
+    for a haploid population of size N. Otherwise time is in generations.
+
+    The coalescence time, or the waiting time for the coalescence, of two
+    gene lineages evolving in a population with haploid size $N$ is an
+    exponentially-distributed random variable with rate of $N$ an
+    expectation of $\frac{1}{N}$).
+    The waiting time for coalescence of *any* two gene lineages in a sample of
+    $n$ gene lineages evolving in a population with haploid size $N$ is an
+    exponentially-distributed random variable with rate of $\choose{N, 2}$ and
+    an expectation of $\frac{1}{\choose{N, 2}}$.
+
+    Parameters
+    ----------
+    n_genes : integer
+        The number of genes in the sample.
+    pop_size : integer
+        The effective *haploid* population size; i.e., number of genes in the
+        population: 2 * N in a diploid population of N individuals, or N in a
+        haploid population of N individuals.
+    n_to_coalesce : integer
+        The waiting time that will be returned will be the waiting time for
+        this number of genes in the sample to coalesce.
+    rng : `Random`
+        The random number generator instance to use.
+
+    Returns
+    -------
+    k : float
+        A randomly-generated waiting time (in continuous time) for
+        ``n_to_coalesce`` genes to coalesce out of a sample of ``n_genes`` in a
+        population of ``pop_size`` genes.
+    """
+    if rng is None:
+        rng = GLOBAL_RNG
+    if not pop_size:
+        time_units = 1.0
+    else:
+        time_units = pop_size
+    rate = probability.binomial_coefficient(n_genes, n_to_coalesce)
+    tmrca = rng.expovariate(rate)
+    return tmrca * time_units
+
+def expected_tmrca(n_genes, pop_size=None, n_to_coalesce=2):
+    """
+    Expected (mean) value for the Time to the Most Recent Common Ancestor of
+    ``n_to_coalesce`` genes in a sample of ``n_genes`` drawn from a population of
+    ``pop_size`` genes.
+
+    Parameters
+    ----------
+    n_genes : integer
+        The number of genes in the sample.
+    pop_size : integer
+        The effective *haploid* population size; i.e., number of genes in the
+        population: 2 * N in a diploid population of N individuals, or N in a
+        haploid population of N individuals.
+    n_to_coalesce : integer
+        The waiting time that will be returned will be the waiting time for
+        this number of genes in the sample to coalesce.
+    rng : `Random`
+        The random number generator instance.
+
+    Returns
+    -------
+    k : float
+        The expected waiting time (in continuous time) for ``n_to_coalesce``
+        genes to coalesce out of a sample of ``n_genes`` in a population of
+        ``pop_size`` genes.
+
+    """
+    nc2 = probability.binomial_coefficient(n_genes, n_to_coalesce)
+    tmrca = (float(1)/nc2)
+    if pop_size is not None:
+        return tmrca * pop_size
+    else:
+        return tmrca
+
+def coalesce_nodes(nodes,
+             pop_size=None,
+             period=None,
+             rng=None,
+             use_expected_tmrca=False):
+    """
+    Returns a list of nodes that have not yet coalesced once ``period`` is
+    exhausted.
+
+    This function will a draw a coalescence time, ``t``, from an exponential
+    distribution with a rate of ``choose(k, 2)``, where ``k`` is the number of
+    nodes. If ``period`` is given and if this time is less than ``period``, or if
+    ``period`` is not given, then two nodes are selected at random from ``nodes``,
+    and coalesced: a new node is created, and the two nodes are added as
+    child_nodes to this node with an edge length such the the total length from
+    tip to the ancestral node is equal to the depth of the deepest child + ``t``.
+    The two nodes are removed from the list of nodes, and the new node is added
+    to it. ``t`` is then deducted from ``period``, and the process repeats.
+
+    The function ends and returns the list of nodes once ``period`` is
+    exhausted or if any draw of ``t`` exceeds ``period``, if ``period`` is
+    given or when there is only one node left.
+
+    As each coalescent event occurs, *all* nodes have their edges
+    extended to the point of the coalescent event. In the case of
+    constrained coalescence, all uncoalesced nodes have their edges
+    extended to the end of the period (coalesced nodes have the edges
+    fixed by the coalescent event in their ancestor).  Thus multiple
+    calls to this method with the same set of nodes will gradually
+    'grow' the edges, until all the the nodes coalesce. The edge
+    lengths of the nodes passed to this method thus should not be
+    modified or reset until the process is complete.
+
+    Parameters
+    ----------
+    nodes : iterable[|Node|]
+        An interable of |Node| objects representing a sample of neutral
+        genes (some, all, or none of these nodes may have descendent nodes).
+    pop_size : integer
+        The effective *haploid* population size; i.e., number of genes in the
+        population: 2 * N in a diploid population of N individuals, or N in a
+        haploid population of N individuals.
+    period : numeric
+        The time that the genes have to coalesce. If ``pop_size`` is 1 or 0 or
+        None, then time is in haploid population units; i.e. where 1 unit of
+        time equals 2N generations for a diploid population of size N, or N
+        generations for a haploid population of size N. Otherwise time is in
+        generations.
+    rng : `Random`
+        The random number generator instance to use. If not specified, the
+        default RNG will be used.
+    use_expected_tmrca : bool
+        If `True`, then instead of random times, the *expected* times will be
+        used.
+
+    Returns
+    -------
+    nodes : iterable[|Node|]
+        A list of nodes once ``period`` is exhausted or if any draw of ``t``
+        exceeds ``period``, if ``period`` is given or when there is only one node
+        left.
+    """
+
+    # idiot-check, because I can be an idiot
+    if not nodes:
+        return []
+
+    # set the random number generator
+    if rng is None:
+        rng = GLOBAL_RNG
+
+    # define the function needed to create new coalescence nodes
+    new_node = nodes[0].__class__
+
+    # make a shallow copy of the node list
+    nodes = list(nodes)
+
+    # start tracking the time remaining
+    time_remaining = period
+
+    # If there is no time constraint, we want to continue coalescing
+    # until there is only one gene left in the pool. If there is a
+    # time constraint, we continue as long as there is time remaining,
+    # but we do not control for that here: it is automatically taken
+    # care of when the time drawn for the next coalescent event
+    # exceeds the time remaining, and triggers a break from the loop
+    while len(nodes) > 1:
+
+        if use_expected_tmrca:
+            tmrca = expected_tmrca(len(nodes), pop_size=pop_size)
+        else:
+            # draw a time to coalesce: this will be an exponential random
+            # variable with parameter (rate) of BINOMIAL[n_genes 2]
+            # multiplied pop_size
+            tmrca = time_to_coalescence(len(nodes), pop_size=pop_size, rng=rng)
+
+        # if no time_remaining is given (i.e, we want to coalesce till
+        # there is only one gene left) or, if we are working under the
+        # constrained coalescent, if the time to the next coalescence
+        # event is not longer than the time_remaining
+        if time_remaining is None or tmrca <= time_remaining:
+
+            # stretch out the edges of all the nodes to this time
+            for node in nodes:
+                if node.edge.length is None:
+                    node.edge.length = 0.0
+                node.edge.length = node.edge.length + tmrca
+
+            # pick two nodes to coalesce at random
+            to_coalesce = rng.sample(nodes, 2)
+
+            # create the new ancestor of these nodes
+            new_ancestor = new_node()
+
+            # add the nodes as child nodes of the new node, their
+            # common ancestor, and set the ancestor's edge length
+            new_ancestor.add_child(to_coalesce[0])
+            new_ancestor.add_child(to_coalesce[1])
+            new_ancestor.edge.length = 0.0
+
+            # remove the nodes that have coalesced from the pool of
+            # nodes
+            nodes.remove(to_coalesce[0])
+            nodes.remove(to_coalesce[1])
+
+            # add the ancestor to the pool of nodes
+            nodes.append(new_ancestor)
+
+            # adjust the time_remaining left to coalesce
+            if time_remaining is not None:
+                time_remaining = time_remaining - tmrca
+        else:
+            # the next coalescent event takes place after the period constraint
+            break
+
+    # adjust the edge lengths of all the nodes, so they are at the
+    # correct height, with the edges 'lining up' at the end of
+    # coalescent period
+    if time_remaining is not None and time_remaining > 0:
+        for node in nodes:
+            if node.edge.length is None:
+                node.edge.length = 0.0
+            node.edge.length = node.edge.length + time_remaining
+
+    # return the list of nodes that have not coalesced
+    return nodes
+
+def node_waiting_time_pairs(tree, ultrametricity_precision=constants.DEFAULT_ULTRAMETRICITY_PRECISION):
+    """
+    Returns a list of tuples of (nodes, coalescent interval time) on the tree.
+    That is, each element in the list is tuple pair consisting of where: the
+    first element of the pair is an internal node representing a coalescent
+    event on the tree, and the second element of the pair is the time between
+    this coalescence event and the earlier (more recent) one.
+
+    Parameters
+    ----------
+    tree : |Tree|
+        A tree instance.
+    ultrametricity_precision : float
+        When calculating the node ages, an error will be raised if the tree is
+        not ultrametric. This error may be due to floating-point or numerical
+        imprecision. You can set the precision of the ultrametricity validation
+        by setting the ``ultrametricity_precision`` parameter. E.g., use
+        ``ultrametricity_precision=0.01`` for a more relaxed precision, down to
+        2 decimal places. Use ``ultrametricity_precision=False`` to disable
+        checking of ultrametricity.
+
+    Returns
+    -------
+    x : list of tuples (node, coalescent interval)
+        Returns list of tuples of (node, coalescent interval [= time between
+        last coalescent event and current node age])
+    """
+    tree.calc_node_ages(ultrametricity_precision=ultrametricity_precision)
+    ages = [(n, n.age) for n in tree.internal_nodes()]
+    ages.sort(key=lambda x: x[1])
+    intervals = []
+    intervals.append(ages[0])
+    for i, d in enumerate(ages[1:]):
+        nd = d[0]
+        prev_nd = ages[i][0]
+        intervals.append( (nd, nd.age - prev_nd.age) )
+    return intervals
+
+def extract_coalescent_frames(tree, ultrametricity_precision=constants.DEFAULT_ULTRAMETRICITY_PRECISION):
+    """
+    Returns a list of tuples describing the coalescent frames on the tree. That
+    is, each element in the list is tuple pair consisting of where: the first
+    element of the pair is the number of separate lineages remaining on the
+    tree at coalescence event, and the second element of the pair is the time
+    between this coalescence event and the earlier (more recent) one.
+
+    Parameters
+    ----------
+    tree : |Tree|
+        A tree instance.
+    ultrametricity_precision : float
+        When calculating the node ages, an error will be raised if the tree is
+        not ultrametric. This error may be due to floating-point or numerical
+        imprecision. You can set the precision of the ultrametricity validation
+        by setting the ``ultrametricity_precision`` parameter. E.g., use
+        ``ultrametricity_precision=0.01`` for a more relaxed precision, down to
+        2 decimal places. Use ``ultrametricity_precision=False`` to disable
+        checking of ultrametricity.
+
+    Returns
+    -------
+    x : list of tuples (node, coalescent interval)
+        Returns dictionary, with key = number of alleles, and values = waiting
+        time for coalescent for the given tree
+    """
+    nwti = node_waiting_time_pairs(tree, ultrametricity_precision=ultrametricity_precision)
+#     num_genes = len(tree.taxon_namespace)
+    num_genes = len(tree.leaf_nodes())
+    num_genes_wt = {}
+    for n in nwti:
+        num_genes_wt[num_genes] = n[1]
+        num_genes = num_genes - len(n[0].child_nodes()) + 1
+
+    import sys
+    num_alleles_list = sorted(num_genes_wt.keys(), reverse=True)
+    return num_genes_wt
+
+def log_probability_of_coalescent_frames(coalescent_frames, haploid_pop_size):
+    """
+    Under the classical neutral coalescent \citep{Kingman1982,
+    Kingman1982b}, the waiting times between coalescent events in a
+    sample of $k$ alleles segregating in a  population of (haploid) size
+    $N_e$ is distributed exponentially with a rate parameter of
+    $\frac{{k \choose 2}}{N_e}$::
+
+         \Pr(T) =  \frac{{k \choose 2}}{N_e} \e{-  \frac{{k \choose 2}}{N_e} T},
+
+    where $T$ is the length of  (chronological) time in which there are
+    $k$ alleles in the sample (i.e., for $k$ alleles to coalesce into
+    $k-1$ alleles).
+    """
+    lp = 0.0
+    for k, t in coalescent_frames.items():
+        k2N = (float(k * (k-1)) / 2) / haploid_pop_size
+#         k2N = float(probability.binomial_coefficient(k, 2)) / haploid_pop_size
+        lp =  lp + math.log(k2N) - (k2N * t)
+    return lp
+
+def log_probability_of_coalescent_tree(tree, haploid_pop_size, ultrametricity_precision=constants.DEFAULT_ULTRAMETRICITY_PRECISION):
+    """
+    Wraps up extraction of coalescent frames and reporting of probability.
+    """
+    return log_probability_of_coalescent_frames(extract_coalescent_frames(tree),
+            haploid_pop_size)
+
+###############################################################################
+## Tree Simulations
+
+def contained_coalescent_tree(containing_tree,
+        gene_to_containing_taxon_map,
+        edge_pop_size_attr="pop_size",
+        default_pop_size=1,
+        rng=None):
+    """
+    Returns a gene tree simulated under the coalescent contained within a
+    population or species tree.
+
+        ``containing_tree``
+            The population or species tree. If ``edge_pop_size_map`` is not None,
+            and population sizes given are non-trivial (i.e., >1), then edge
+            lengths on this tree are in units of generations. Otherwise edge
+            lengths are in population units; i.e. 2N generations for diploid
+            populations of size N, or N generations for diploid populations of
+            size N.
+
+        ``gene_to_containing_taxon_map``
+            A TaxonNamespaceMapping object mapping Taxon objects in the
+            ``containing_tree`` TaxonNamespace to corresponding Taxon objects in the
+            resulting gene tree.
+
+        ``edge_pop_size_attr``
+            Name of attribute of edges that specify population size. By default
+            this is "pop_size". If this attribute does not exist,
+            ``default_pop_size`` will be used.  The value for this attribute
+            should be the haploid population size or the number of genes;
+            i.e.  2N for a diploid population of N individuals, or N for a
+            haploid population of N individuals. This value determines how
+            branch length units are interpreted in the input tree,
+            ``containing_tree``.  If a biologically-meaningful value, then branch
+            lengths on the ``containing_tree`` are properly read as generations.
+            If not (e.g. 1 or 0), then they are in population units, i.e. where
+            1 unit of time equals 2N generations for a diploid population of
+            size N, or N generations for a haploid population of size N.
+            Otherwise time is in generations. If this argument is None, then
+            population sizes default to ``default_pop_size``.
+
+        ``default_pop_size``
+            Population size to use if ``edge_pop_size_attr`` is None or
+            if an edge does not have the attribute. Defaults to 1.
+
+    The returned gene tree will have the following extra attributes:
+
+        ``pop_node_genes``
+            A dictionary with nodes of ``containing_tree`` as keys and a list of gene
+            tree nodes that are uncoalesced as values.
+
+    Note that this function does very much the same thing as
+    ``constrained_kingman()``, but provides a very different API.
+    """
+
+    if rng is None:
+        rng = GLOBAL_RNG
+
+    gene_tree_taxon_namespace = gene_to_containing_taxon_map.domain_taxon_namespace
+    if gene_tree_taxon_namespace is None:
+        gene_tree_taxon_namespace = dendropy.TaxonNamespace()
+        for gene_taxa in pop_gene_taxa_map:
+            for taxon in gene_taxa:
+                gene_tree_taxon_namespace.add(taxon)
+    gene_tree = dendropy.Tree(taxon_namespace=gene_tree_taxon_namespace)
+    gene_tree.is_rooted = True
+
+    pop_node_genes = {}
+    pop_gene_taxa = gene_to_containing_taxon_map.reverse
+    for nd in containing_tree.postorder_node_iter():
+        if nd.taxon and nd.taxon in pop_gene_taxa:
+            pop_node_genes[nd] = []
+            gene_taxa = pop_gene_taxa[nd.taxon]
+            for gene_taxon in gene_taxa:
+                gene_node = dendropy.Node()
+                gene_node.taxon = gene_taxon
+                pop_node_genes[nd].append(gene_node)
+            #gene_nodes = [dendropy.Node() for i in range(len(gene_taxa))]
+            #for gidx, gene_node in enumerate(gene_nodes):
+            #    gene_node.taxon = gene_taxa[gidx]
+            #    pop_node_genes[nd].append(gene_node)
+
+    for edge in containing_tree.postorder_edge_iter():
+
+        if edge_pop_size_attr and hasattr(edge, edge_pop_size_attr):
+            pop_size = getattr(edge, edge_pop_size_attr)
+        else:
+            pop_size = default_pop_size
+        if edge.head_node.parent_node is None:
+            if len(pop_node_genes[edge.head_node]) > 1:
+                final = coalesce_nodes(nodes=pop_node_genes[edge.head_node],
+                                            pop_size=default_pop_size,
+                                            period=None,
+                                            rng=rng)
+            else:
+                final = pop_node_genes[edge.head_node]
+            gene_tree.seed_node = final[0]
+        else:
+            uncoal = coalesce_nodes(nodes=pop_node_genes[edge.head_node],
+                                         pop_size=pop_size,
+                                         period=edge.length,
+                                         rng=rng)
+            if edge.tail_node not in pop_node_genes:
+                pop_node_genes[edge.tail_node] = []
+            pop_node_genes[edge.tail_node].extend(uncoal)
+
+    gene_tree.pop_node_genes = pop_node_genes
+    return gene_tree
+
+def pure_kingman_tree(taxon_namespace, pop_size=1, rng=None):
+    """
+    Generates a tree under the unconstrained Kingman's coalescent process.
+    """
+    if rng is None:
+        rng = GLOBAL_RNG # use the global rng by default
+    nodes = [dendropy.Node(taxon=t) for t in taxon_namespace]
+    seed_node = coalesce_nodes(nodes=nodes,
+                                    pop_size=pop_size,
+                                    period=None,
+                                    rng=rng,
+                                    use_expected_tmrca=False)[0]
+    tree = dendropy.Tree(taxon_namespace=taxon_namespace, seed_node=seed_node)
+    return tree
+
+def mean_kingman_tree(taxon_namespace, pop_size=1, rng=None):
+    """
+    Returns a tree with coalescent intervals given by the expected times under
+    Kingman's neutral coalescent.
+    """
+    if rng is None:
+        rng = GLOBAL_RNG # use the global rng by default
+    nodes = [dendropy.Node(taxon=t) for t in taxon_namespace]
+    seed_node = coalesce_nodes(nodes=nodes,
+                                    pop_size=pop_size,
+                                    period=None,
+                                    rng=rng,
+                                    use_expected_tmrca=True)[0]
+    tree = dendropy.Tree(taxon_namespace=taxon_namespace, seed_node=seed_node)
+    return tree
+
+def constrained_kingman_tree(pop_tree,
+                        gene_tree_list=None,
+                        rng=None,
+                        gene_node_label_fn=None,
+                        num_genes_attr='num_genes',
+                        pop_size_attr='pop_size',
+                        decorate_original_tree=False):
+    """
+    Given a population tree, ``pop_tree`` this will return a *pair of
+    trees*: a gene tree simulated on this population tree based on
+    Kingman's n-coalescent, and population tree with the additional
+    attribute 'gene_nodes' on each node, which is a list of
+    uncoalesced nodes from the gene tree associated with the given
+    node from the population tree.
+
+    ``pop_tree`` should be a DendroPy Tree object or an object
+    of a class derived from this with the following attribute
+    ``num_genes`` -- the number of gene samples from each population in the
+    present.  Each edge on the tree should also have the attribute
+
+    ``pop_size_attr`` is the attribute name of the edges of ``pop_tree`` that
+    specify the population size. By default it is ``pop_size``. The should
+    specify the effective *haploid* population size; i.e., number of gene
+    in the population: 2 * N in a diploid population of N individuals,
+    or N in a haploid population of N individuals.
+
+    If ``pop_size`` is 1 or 0 or None, then the edge lengths of ``pop_tree`` is
+    taken to be in haploid population units; i.e. where 1 unit equals 2N
+    generations for a diploid population of size N, or N generations for a
+    haploid population of size N. Otherwise the edge lengths of ``pop_tree`` is
+    taken to be in generations.
+
+    If ``gene_tree_list`` is given, then the gene tree is added to the
+    tree block, and the tree block's taxa block will be used to manage
+    the gene tree's ``taxa``.
+
+    ``gene_node_label_fn`` is a function that takes two arguments (a string
+    and an integer, respectively, where the string is the containing species
+    taxon label and the integer is the gene index) and returns a label for
+    the corresponding the gene node.
+
+    if ``decorate_original_tree`` is True, then the list of uncoalesced nodes at
+    each node of the population tree is added to the original (input) population
+    tree instead of a copy.
+
+    Note that this function does very much the same thing as ``contained_coalescent()``,
+    but provides a very different API.
+    """
+
+    # get our random number generator
+    if rng is None:
+        rng = GLOBAL_RNG # use the global rng by default
+
+    if gene_tree_list is not None:
+        gtaxa = gene_tree_list.taxon_namespace
+    else:
+        gtaxa = dendropy.TaxonNamespace()
+
+    if gene_node_label_fn is None:
+        gene_node_label_fn = lambda x, y: "%s_%02d" % (x, y)
+
+    # we create a set of gene nodes for each leaf node on the population
+    # tree, and associate those gene nodes to the leaf by assignment
+    # of 'taxon'.
+    for leaf_count, leaf in enumerate(pop_tree.leaf_node_iter()):
+        gene_nodes = []
+        for gene_count in range(getattr(leaf, num_genes_attr)):
+            gene_node = dendropy.Node()
+            gene_node.taxon = gtaxa.require_taxon(label=gene_node_label_fn(leaf.taxon.label, gene_count+1))
+            gene_nodes.append(gene_node)
+        leaf.gene_nodes = gene_nodes
+
+    # We iterate through the edges of the population tree in post-order,
+    # i.e., visiting child edges before we visit parent edges. For
+    # each edge visited, we take the genes found in the child nodes,
+    # and run the coalescent simulation on them attacheded by the length
+    # of the edge. Any genes that have not yet coalesced at the end of
+    # this period are added to the genes of the tail (parent) node of
+    # the edge.
+
+    if decorate_original_tree:
+        working_poptree = pop_tree
+    else:
+        # start with a new (deep) copy of the population tree so as to not
+        # to change the original tree
+        working_poptree = dendropy.Tree(pop_tree)
+
+    # start with a new tree
+    gene_tree = dendropy.Tree()
+    gene_tree.taxon_namespace = gtaxa
+    for edge in working_poptree.postorder_edge_iter():
+
+        # if mrca root, run unconstrained coalescent
+        if edge.head_node.parent_node is None:
+            if len(edge.head_node.gene_nodes) > 1:
+                final = coalesce_nodes(nodes=edge.head_node.gene_nodes,
+                                            pop_size=pop_size,
+                                            period=None,
+                                            rng=rng)
+            else:
+                final = edge.head_node.gene_nodes
+            gene_tree.seed_node = final[0]
+        else:
+
+            if hasattr(edge, pop_size_attr):
+                pop_size = getattr(edge, pop_size_attr)
+            else:
+                # this means all our time will be in population units
+                pop_size = 1
+
+            uncoal = coalesce_nodes(nodes=edge.head_node.gene_nodes,
+                                         pop_size=pop_size,
+                                         period=edge.length,
+                                         rng=rng)
+            if not hasattr(edge.tail_node, 'gene_nodes'):
+                edge.tail_node.gene_nodes = []
+            edge.tail_node.gene_nodes.extend(uncoal)
+
+    gene_tree.is_rooted = True
+    if gene_tree_list is not None:
+        gene_tree_list.append(gene_tree)
+        return gene_tree, working_poptree
+    else:
+        return gene_tree, working_poptree
diff --git a/dendropy/model/continuous.py b/dendropy/model/continuous.py
new file mode 100644
index 0000000..27d5168
--- /dev/null
+++ b/dendropy/model/continuous.py
@@ -0,0 +1,500 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Models and modeling of continuous character evolution.
+"""
+
+import math
+from functools import reduce
+import operator
+import dendropy
+from dendropy.utility import GLOBAL_RNG
+
+class PhylogeneticIndependentConstrasts(object):
+    """
+    Phylogenetic Independent Contrasts.
+
+    References:
+
+        -   Felsenstein, J. 1985. Phylogenies and the comparative method. American
+            Naturalist 125:1-15.
+        -   Garland, T., Jr., Jr., A. F. Bennett, and E. L. Rezende. 2005.
+            Phylogenetic approaches in comparative physiology. Journal of
+            Experimental Biology 208:3015-3035.
+
+    """
+
+    def __init__(self,
+            tree,
+            char_matrix,
+            polytomy_strategy=None):
+        """
+
+        Parameters
+        ----------
+        tree : |Tree| object
+            Tree to use.
+        char_matrix : |ContinuousCharacterMatrix|
+            ContinuousCharacterMatrix that is the source of the data
+        polytomy_strategy
+            One of: 'error', 'ignore', 'resolve'.
+
+                'error'
+                    Throws an error if tree has polytomies.
+                'ignore'
+                    No error, but raw contrasts will not be calculated for
+                    polytomies.
+                'resolve'
+                    Randomly resolve polytomies.
+
+            Defaults to 'error' if not specified or set to None.
+        """
+        self._tree = None
+        self._char_matrix = None
+        self._is_dirty = None
+        self._is_fully_analyzed = False
+        self._polytomy_strategy = None
+        self._character_contrasts = {}
+        self._set_polytomy_strategy(polytomy_strategy)
+        self.tree = tree
+        self.char_matrix = char_matrix
+
+    def _get_polytomy_strategy(self):
+        return self._polytomy_strategy
+    def _set_polytomy_strategy(self, polytomy_strategy):
+        if polytomy_strategy is None:
+            self._polytomy_strategy = 'error'
+        else:
+            polytomy_strategy = polytomy_strategy.lower()
+            if polytomy_strategy not in ['error', 'ignore', 'resolve']:
+                raise ValueError("Invalid polytomy strategy: '%s'" % polytomy_strategy)
+            else:
+                self._polytomy_strategy = polytomy_strategy
+    polytomy_strategy = property(_get_polytomy_strategy, None)
+
+    def _get_tree(self):
+        if not self._is_fully_analyzed:
+            analyzed_chars = self._character_contrasts.keys()
+            for idx in range(len(self.char_matrix[0])):
+                if idx in analyzed_chars:
+                    continue
+                self._get_contrasts(idx)
+            self._is_fully_analyzed = True
+        return self._tree
+    def _set_tree(self, tree):
+        self._tree = dendropy.Tree(tree)
+        if self._polytomy_strategy == 'resolve':
+            self._tree.resolve_polytomies()
+        self.is_dirty = True
+    tree = property(_get_tree, _set_tree, None, """\
+            This tree will have an attribute added to each node, ``pic``. This
+            attribute will be a dictionary with character (column) index as
+            keys. Each column index will map to another dictionary that has the
+            following keys (and values):
+
+                - ``pic_state_value``
+                - ``pic_state_variance``
+                - ``pic_contrast_raw``
+                - ``pic_contrast_variance``
+                - ``pic_contrast_standardized``
+                - ``pic_edge_length_error``
+                - ``pic_corrected_edge_length``
+            """)
+
+    def _get_char_matrix(self):
+        return self._char_matrix
+    def _set_char_matrix(self, char_matrix):
+        self._char_matrix = char_matrix
+        self.is_dirty = True
+    char_matrix = property(_get_char_matrix, _set_char_matrix)
+
+    def _get_is_dirty(self):
+        return self._is_dirty
+    def _set_is_dirty(self, is_dirty):
+        self._is_dirty = is_dirty
+        if self._is_dirty:
+            self._character_contrasts = {}
+            self._is_fully_analyzed = False
+    is_dirty = property(_get_is_dirty, _set_is_dirty)
+
+    def _get_contrasts(self, character_index):
+        """
+        Main work-horse method. If needed, adds an entry to
+        self._character_constrants, with key being the character index, and a
+        value being another dictionary that contains the constrast information.
+        This second dictionary has the node's id as a key and as a value the a
+        dictionary with the following:
+
+                - ``pic_state_value``
+                - ``pic_state_variance``
+                - ``pic_contrast_raw``
+                - ``pic_contrast_variance``
+                - ``pic_contrast_standardized``
+                - ``pic_edge_length_error``
+                - ``pic_corrected_edge_length``
+
+        """
+        if character_index in self._character_contrasts:
+            return self._character_contrasts[character_index]
+        all_results = {}
+        for nd in self._tree.postorder_node_iter():
+            nd_results = {}
+            child_nodes = nd.child_nodes()
+            if len(child_nodes) == 0:
+                nd_results['pic_state_value'] = self._char_matrix[nd.taxon][character_index]
+                nd_results['pic_state_variance'] = None
+                nd_results['pic_contrast_raw'] = None
+                nd_results['pic_contrast_variance'] = None
+                nd_results['pic_contrast_standardized'] = None
+                nd_results['pic_edge_length_error'] = 0.0
+                nd_results['pic_corrected_edge_length'] = nd.edge.length
+            elif len(child_nodes) == 1:
+                # root node?
+                nd_results['pic_state_value'] = None
+                nd_results['pic_state_variance'] = None
+                nd_results['pic_contrast_raw'] = None
+                nd_results['pic_contrast_variance'] = None
+                nd_results['pic_contrast_standardized'] = None
+                nd_results['pic_edge_length_error'] = None
+                nd_results['pic_corrected_edge_length'] = None
+            else:
+                state_vals = []
+                corrected_edge_lens = []
+                actual_edge_lens = []
+                for cnd in child_nodes:
+                    state_vals.append(all_results[cnd._track_id]['pic_state_value'])
+                    actual_edge_lens.append(cnd.edge.length)
+                    if all_results[cnd._track_id]['pic_corrected_edge_length'] is not None:
+                        corrected_edge_lens.append(all_results[cnd._track_id]['pic_corrected_edge_length'])
+                    else:
+                        corrected_edge_lens.append(cnd.edge.length)
+                n = len(state_vals)
+                numerator_fn = lambda i : (1.0/corrected_edge_lens[i]) * state_vals[i]
+                denominator_fn = lambda i  : 1.0/corrected_edge_lens[i]
+                nd_results['pic_state_value'] = \
+                        sum(numerator_fn(i) for i in range(n)) \
+                        / sum(denominator_fn(i) for i in range(n))
+
+                sum_of_child_edges = sum(corrected_edge_lens)
+                prod_of_child_edges = reduce(operator.mul, corrected_edge_lens)
+                nd_results['pic_edge_length_error'] = (  prod_of_child_edges / (sum_of_child_edges) )
+                if nd.edge.length is not None:
+                    nd_results['pic_corrected_edge_length'] = nd.edge.length + nd_results['pic_edge_length_error']
+                else:
+                    nd_results['pic_corrected_edge_length'] = None
+                nd_results['pic_state_variance'] = nd_results['pic_corrected_edge_length']
+
+                if len(child_nodes) != 2:
+                    if self._polytomy_strategy == "ignore":
+                        nd_results['pic_contrast_raw'] = None
+                        nd_results['pic_contrast_standardized'] = None
+                        nd_results['pic_contrast_variance'] = sum_of_child_edges
+                    else:
+                        raise ValueError("Tree is not fully-bifurcating")
+                else:
+                    nd_results['pic_contrast_raw'] = state_vals[0] - state_vals[1]
+                    nd_results['pic_contrast_standardized'] = nd_results['pic_contrast_raw'] / (sum_of_child_edges ** 0.5)
+                    nd_results['pic_contrast_variance'] = sum_of_child_edges
+
+            nd._track_id = id(nd) # will get cloned
+            all_results[nd._track_id] = nd_results
+            try:
+                nd.pic[character_index] = dict(nd_results)
+            except AttributeError:
+                nd.pic = {character_index: dict(nd_results)}
+        self._character_contrasts[character_index] = dict(all_results)
+        return self._character_contrasts[character_index]
+
+    def contrasts_tree(self,
+            character_index,
+            annotate_pic_statistics=True,
+            state_values_as_node_labels=False,
+            corrected_edge_lengths=False):
+        """
+        Returns a Tree object annotated with the following attributes added
+        to each node (as annotations to be serialized if
+        ``annotate_pic_statistics`` is True):
+
+            - ``pic_state_value``
+            - ``pic_state_variance``
+            - ``pic_contrast_raw``
+            - ``pic_contrast_variance``
+            - ``pic_contrast_standardized``
+            - ``pic_edge_length_error``
+            - ``pic_corrected_edge_length``
+
+        """
+        contrasts = self._get_contrasts(character_index)
+        tree = dendropy.Tree(self._tree)
+        for nd in tree.postorder_node_iter():
+            nd_results = contrasts[nd._track_id]
+            for k, v in nd_results.items():
+                setattr(nd, k, v)
+                if annotate_pic_statistics:
+                    nd.annotations.add_bound_attribute(k)
+            if corrected_edge_lengths and nd_results['pic_corrected_edge_length'] is not None:
+                nd.edge.length = nd_results['pic_corrected_edge_length']
+            if state_values_as_node_labels:
+                nd.label = str(nd_results['pic_state_value'])
+        return tree
+
+def evolve_continuous_char(node, rng=None, **kwargs):
+    """
+    Takes a node and a random number generator object, ``rng`` This function
+    "evolves" a set of rates on the subtree descending from the  ``node``.
+
+    kwargs keys that are used are:
+
+    ``roeotroe``
+        the rate of evolution of the rate of evolution. This
+        parameter that controls the degree of deviations away from the
+        molecular clock.
+    ``min_rate``
+        is the minimum rate (default None)
+    ``max_rate``
+        is the maximum rate (default None),
+    ``model``
+        is a string specifying the name of the model. Currently only the
+        KTB (Kishino, Thorne, Bruno) is supported
+    ``time_attr``
+        is a string that specifies the name of the attribute
+        that returns the branch length in terms of time for a node. The
+        default is "edge_length"
+    ``val_attr``
+        is the string that specifies the name of the attribute
+        used to hold the value that is evolving along the nodes.  The root
+        of the subtree is assumed to have this field on calling of the
+        function.  On success all nodes in the subtree will have the
+        attribute.  The default is "mutation_rate"
+    ``mean_val_attr``
+        if specified this is string that gives the name of
+        attribute in each node that is mean value for the branch (default is
+        None). This is filled in after time_attr and val_attr are read,
+        so it is permissible to have this attribute match one of thos
+        strings (although it will make the model odd if the mean_val_attr
+        is the same as the val_attr)
+        ``constrain_rate_mode``
+        controls the behavior when the minimum or maximum rate is
+        simulated. The choices are "crop", and "linear_bounce" "crop" means
+        that the rate is set to the most extreme value allowed.
+        "linear_bounce" considers the path of evolution of rate to be a
+        simple line from the ancestor's rate to the proposed rate. The
+        point at which the path crosses the extreme value is determined and
+        the rate is "reflected off" the limiting rate at that point.  This
+        causes the rate to avoid the extreme values more than a simulation
+        of small time slices that simply rejects illegal rates.
+
+    Currently the only model supported is the one of Kishino, Thorne, and Bruno.
+    "Performance of a Divergence Time Estimation Method under a Probabilistic
+    Model of Rate Evolution." Molecular Biology and Evolution (2001) vol. 18
+    (3) pp. 352-361. This model is specified by the code "KTB". A node's rate
+    is a log-normal variate with variance determined by the product of the
+    duration of the branch and the roeotroe parameter.  The mean of the
+    distribution is chosen such that mean of the log-normal is identical to the
+    rate at the parent. The mean_rate for the branch is the average of the rates
+    at the endpoints.
+
+    """
+    if rng is None:
+        rng = GLOBAL_RNG
+    nd_iter = node.preorder_iter()
+    # skip the first node -- it should already have a rate
+    next(nd_iter)
+    if kwargs.get("model", "KTB").upper() != "KTB":
+        raise ValueError("Only the Kishino-Thorne-Bruno model is supported at this time")
+    val_attr = kwargs.get("val_attr", "mutation_rate")
+    if not val_attr:
+        raise ValueError("val_attr cannot be an empty string")
+    time_attr = kwargs.get("time_attr", "edge_length")
+    mean_val_attr = kwargs.get("mean_val_attr")
+    constrain_rate_mode = kwargs.get("constrain_rate_mode", "crop").lower()
+    if constrain_rate_mode not in ["crop", "linear_bounce"]:
+        raise ValueError('Only "crop" and "linear_bounce" are supported at this time')
+    roeotroe = kwargs.get("roeotroe", 1.0)
+    min_rate = kwargs.get("min_rate", 0.0)
+    if min_rate < 0.0:
+        raise ValueError("min_rate cannot be less than 0")
+    max_rate = kwargs.get("max_rate")
+    anc_rate = getattr(node, val_attr)
+    if max_rate is not None:
+        if min_rate is not None:
+            if min_rate > max_rate:
+                raise ValueError("max_rate must be greater than the min_rate")
+            if min_rate == max_rate:
+                for nd in nd_iter:
+                    setattr(nd, val_attr, min_rate)
+                    if mean_val_attr:
+                        # here we assume that the rate changed from the
+                        #   ancestral rate to the only allowed rate
+                        #   instantaneously, so the mean rat is min_rate
+                        setattr(nd, mean_val_attr, min_rate)
+                return
+        if max_rate <= 0.0:
+            raise ValueError("max_rate must be positive")
+        if anc_rate > max_rate:
+            raise ValueError("rate for the incoming node is > max_rate")
+    if (min_rate is not None) and anc_rate < min_rate:
+        raise ValueError("rate for the incoming node is > max_rate")
+
+    if constrain_rate_mode == "crop":
+        rate_fn = _calc_KTB_rates_crop
+    else:
+        rate_fn = _calc_KTB_rates_linear_bounce
+    for nd in nd_iter:
+        starting_rate = getattr(nd.parent_node, val_attr)
+        duration = getattr(nd, time_attr)
+        r, mr  = rate_fn(starting_rate, duration, roeotroe, rng, min_rate, max_rate)
+        setattr(nd, val_attr, r)
+        if mean_val_attr:
+            setattr(nd, mean_val_attr, mr)
+
+def _bounce_constrain(start_x, x, min_x=None, max_x=None):
+    """Returns the value of variable and its mean value over a path.
+    We assume that some variable started at ``start_x`` and moved toward ``x``, but
+    has to bounce of barriers specified by ``min_x`` and ``max_x``.
+
+    ``x`` determines the direction and magnitude of the change.
+
+    ``start_x`` must fall in the legal range (between the min and max). If
+    ``x`` is also legal, then (x, (x + start_x)/2.0) will be returned reflecting
+    the fact that the arithmetic mean of the endpoints represents the mean value
+    of the variable if it took a direct path (at constant rate).
+    """
+
+    if max_x is not None and min_x is not None:
+        assert(max_x > min_x)
+    gt_max = (max_x is not None  and x > max_x)
+    lt_min = (min_x is not None and x < min_x)
+    prev_x = start_x
+    prop_dur_remaining = 1.0
+    mx = 0.0
+    while gt_max or lt_min:
+        if gt_max:
+            p_changing = (max_x - prev_x)/(x - prev_x)
+            mean_changing = (prev_x + max_x)/2.0
+            mx += p_changing*prop_dur_remaining*mean_changing
+            prop_dur_remaining *= 1.0 - p_changing
+            x = 2*max_x - x
+            lt_min = (min_x is not None and x < min_x)
+            prev_x =  max_x
+            gt_max = False
+        if lt_min:
+            p_changing = (prev_x - min_x)/(prev_x - x)
+            mean_changing = (prev_x + min_x)/2.0
+            mx += prop_dur_remaining*p_changing*mean_changing
+            prop_dur_remaining *= 1.0 - p_changing
+            x = 2*min_x - x
+            lt_min = False
+            gt_max = (max_x is not None  and x > max_x)
+            prev_x =  min_x
+    mean_changing = (prev_x + x)/2.0
+    mx += mean_changing*prop_dur_remaining
+    return x, mx
+
+def _calc_TKP_rate(starting_rate, duration, roeotroe, rng):
+    """
+    Returns a simulated rate for the head node of a tree when:
+
+        * the tail node has rate ``starting_rate``
+        * the time duration of the edge is ``duration``
+        * the rate of evolution of the rate of evolution is ``roeotroe`` (this is
+            the parameter nu in Kishino, Thorne, and Bruno 2001)
+    ``rng`` is a random number generator.
+
+    The model used to generate the rate is the one described by Thorne, Kishino,
+    and Painter 1998.  The descendant rates or lognormally distributed.
+    The mean rate returned will have an expectation of ``starting_rate``
+    The variance of the normal distribution for the logarithm of the ending rate
+        is the product of ``duration`` and ``roeotroe``
+    """
+    rate_var = duration*roeotroe
+    if rate_var > 0.0:
+        mu = math.log(starting_rate)
+        return rng.lognormvariate(mu, math.sqrt(rate_var))
+    return starting_rate
+
+def _calc_KTB_rate(starting_rate, duration, roeotroe, rng):
+    """
+    Returns a simulated rate for the head node of a tree when:
+        * the tail node has rate ``starting_rate``
+        * the time duration of the edge is ``duration``
+        * the rate of evolution of the rate of evolution is ``roeotroe`` (this is
+            the parameter nu in Kishino, Thorne, and Bruno 2001)
+    ``rng`` is a random number generator.
+
+    The model used to generate the rate is the one described by Kishino, Thorne,
+    and Bruno 2001.  The descendant rates or lognormally distributed.
+    The mean rate returned will have an expectation of ``starting_rate``
+    The variance of the normal distribution for the logarithm of the ending rate
+        is the product of ``duration`` and ``roeotroe``
+    """
+    if starting_rate <= 0.0:
+        raise ValueError("starting_rate must be positive in the KTB model")
+    rate_var = duration*roeotroe
+    if rate_var > 0.0:
+        # Kishino, Thorne and Bruno corrected the tendency for the rate to
+        #   increase seen in teh TKP, 1998 model
+        mu = math.log(starting_rate) - (rate_var/2.0)
+        return rng.lognormvariate(mu, math.sqrt(rate_var))
+    return starting_rate
+
+def _calc_KTB_rates_crop(starting_rate, duration, roeotroe, rng,  min_rate=None, max_rate=None):
+    """Returns a descendant rate and mean rate according to the Kishino, Thorne,
+    Bruno model.  Assumes that the min_rate <= starting_rate <= max_rate if a max
+    and min are provided.
+    rate is kept within in the [min_rate, max_rate] range by cropping at these
+    values and acting is if the cropping occurred at an appropriate point
+    in the duration of the branch (based on a linear change in rate from the
+    beginning of the random_variate drawn for the end of the branch).
+    """
+    if roeotroe*duration <= 0.0:
+        if (min_rate and starting_rate < min_rate) or (max_rate and starting_rate > max_rate):
+            raise ValueError("Parent rate is out of bounds, but no rate change is possible")
+    r = _calc_KTB_rate(starting_rate, duration, roeotroe, rng)
+    if max_rate and r > max_rate:
+        assert(starting_rate <= max_rate)
+        p_changing =  (max_rate - starting_rate)/(r - starting_rate)
+        mean_changing = (starting_rate + max_rate)/2.0
+        mr = p_changing*mean_changing + (1.0 - p_changing)*max_rate
+        return max_rate, mr
+    elif min_rate and r < min_rate:
+        assert(starting_rate >= min_rate)
+        p_changing = (starting_rate - min_rate)/(starting_rate - r)
+        mean_changing = (starting_rate + min_rate)/2.0
+        mr = p_changing*mean_changing + (1.0 - p_changing)*min_rate
+        return min_rate, mr
+    return r, (starting_rate + r)/2.0
+
+def _calc_KTB_rates_linear_bounce(starting_rate, duration, roeotroe, rng,  min_rate=0.0, max_rate=None):
+    """Returns a descendant rate and mean rate according to the Kishino, Thorne,
+    Bruno model.  Assumes that the min_rate <= starting_rate <= max_rate if a max
+    and min are provided.
+    The rate is kept within in the [min_rate, max_rate] range by "bouncing" off
+    of the barriers, where the "collision" is estimated by assuming a linear
+    change in rate from the beginning of the random_variate drawn for the end
+    of the branch).
+    """
+    if roeotroe*duration <= 0.0:
+        if (min_rate and starting_rate < min_rate) or (max_rate and starting_rate > max_rate):
+            raise ValueError("Parent rate is out of bounds, but no rate change is possible")
+    r = _calc_KTB_rate(starting_rate, duration, roeotroe, rng)
+    if min_rate is None:
+        min_rate = 0.0
+    return _bounce_constrain(starting_rate, r, min_rate, max_rate)
+
diff --git a/dendropy/model/discrete.py b/dendropy/model/discrete.py
new file mode 100644
index 0000000..3c126ff
--- /dev/null
+++ b/dendropy/model/discrete.py
@@ -0,0 +1,572 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Models and modeling of discrete character evolution.
+"""
+
+import copy
+import math
+import itertools
+from dendropy.utility import GLOBAL_RNG
+from dendropy.calculate import probability
+import dendropy
+
+############################################################################
+## Character Evolution Modeling
+
+class DiscreteCharacterEvolutionModel(object):
+    "Base class for discrete character substitution models."
+
+    def __init__(self, state_alphabet, stationary_freqs=None, rng=None):
+        """
+        __init__ initializes the state_alphabet to define the character type on which
+        this model acts.  The objects random number generator will be ``rng`` or 'GLOBAL_RNG'
+        """
+        self.state_alphabet = state_alphabet
+        if rng is None:
+            self.rng = GLOBAL_RNG
+        else:
+            self.rng = rng
+
+    def pmatrix(self, tlen, rate=1.0):
+        """
+        Returns a matrix of nucleotide substitution
+        probabilities.
+        """
+        raise NotImplementedError
+
+    def simulate_descendant_states(self,
+        ancestral_states,
+        edge_length,
+        mutation_rate=1.0,
+        rng=None):
+        """
+        Returns descendent sequence given ancestral sequence.
+        """
+        if rng is None:
+            rng = self.rng
+        pmat = self.pmatrix(edge_length, mutation_rate)
+        multi = probability.sample_multinomial
+        desc_states = []
+        for state in ancestral_states:
+            anc_state_idx = state.index
+            desc_state_idx = multi(pmat[anc_state_idx], rng)
+            desc_states.append(self.state_alphabet[desc_state_idx])
+        return desc_states
+
+class DiscreteCharacterEvolver(object):
+    "Evolves sequences on a tree."
+
+    def __init__(self,
+     seq_model=None,
+     mutation_rate=None,
+     seq_attr='sequences',
+     seq_model_attr="seq_model",
+     edge_length_attr="length",
+     edge_rate_attr="mutation_rate",
+     seq_label_attr='taxon'):
+        "__init__ sets up meta-data dealing with object nomenclature and semantics."
+        self.seq_model = seq_model
+        self.mutation_rate = mutation_rate
+        self.seq_attr = seq_attr
+        self.seq_model_attr = seq_model_attr
+        self.edge_length_attr = edge_length_attr
+        self.edge_rate_attr = edge_rate_attr
+        self.seq_label_attr = seq_label_attr
+
+    def evolve_states(self,
+            tree,
+            seq_len,
+            root_states=None,
+            simulate_root_states=True,
+            in_place=True,
+            rng=None):
+        """
+        Appends a new sequence of length ``seq_len`` to a list at each node
+        in ``tree``.  The attribute name of this list in each node is given
+        by ``seq_attr``. If ``seq_model`` is None, ``tree.seq_model`` or
+        ``seq_model`` at each node must be specified. If ``in_place`` is
+        False, the tree is copied first, otherwise original tree is modified.
+        If ``root_states`` is given, this will be used as the sequence for the root.
+        If not, and if ``simulate_root_states`` is True, then the sequence for the
+        root will be drawn from the stationary distribution of the character model.
+        """
+        if rng is None:
+            rng = GLOBAL_RNG
+        if not in_place:
+            tree = tree.clone(1) # ==> taxon_namespace_scoped_copy()
+
+        if self.seq_model is None:
+            seq_model = getattr(tree, self.seq_model_attr, None)
+
+        # loop through edges in preorder (root->tips)
+        for edge in tree.preorder_edge_iter():
+            node = edge.head_node
+            if not hasattr(node, self.seq_attr):
+                setattr(node, self.seq_attr, [])
+            seq_list = getattr(node, self.seq_attr)
+            if edge.tail_node:
+                par = edge.tail_node
+                if len(seq_list) != n_prev_seq:
+                    raise ValueError("'%s' length varies among nodes" % self.seq_attr)
+                par_seq = getattr(par, self.seq_attr)[-1]
+                seq_model  = getattr(edge, self.seq_model_attr, None) or self.seq_model
+                length = getattr(edge, self.edge_length_attr)
+                mutation_rate = getattr(edge, self.edge_rate_attr, None) or self.mutation_rate
+                seq_list.append(seq_model.simulate_descendant_states(par_seq, length, mutation_rate))
+            else:
+                # no tail node: root
+                n_prev_seq = len(seq_list)
+                if root_states is not None:
+                    seq_list.append(root_states)
+                elif simulate_root_states:
+                    seq_model  = getattr(node.edge, self.seq_model_attr, None) or self.seq_model
+                    seq_list.append(seq_model.stationary_sample(seq_len, rng=rng))
+                else:
+                    assert n_prev_seq > 0
+                    n_prev_seq -= 1
+        return tree
+
+    def extend_char_matrix_with_characters_on_tree(self,
+            char_matrix,
+            tree,
+            include=None,
+            exclude=None):
+        """
+        Creates a character matrix with new sequences (or extends sequences of
+        an existing character matrix if provided via ``char_matrix``),
+        where the the sequence for each taxon corresponds to the concatenation
+        of all sequences in the list of sequences associated with tip that
+        references the given taxon.
+        Specific sequences to be included/excluded can be fine-tuned using the
+        ``include`` and ``exclude`` args, where ``include=None`` means to include all
+        by default, and ``exclude=None`` means to exclude all by default.
+        """
+        for leaf in tree.leaf_nodes():
+            cvec = char_matrix[leaf.taxon]
+            seq_list = getattr(leaf, self.seq_attr)
+            for seq_idx, seq in enumerate(seq_list):
+                if ((include is None) or (seq_idx in include))  \
+                    and ((exclude is None) or (seq_idx not in exclude)):
+                    for state in seq:
+                        cvec.append(state)
+        return char_matrix
+
+    def clean_tree(self, tree):
+        for nd in tree:
+            # setattr(nd, self.seq_attr, [])
+            delattr(nd, self.seq_attr)
+
+############################################################################
+## Specialized Models: nucldeotides
+
+class NucleotideCharacterEvolutionModel(DiscreteCharacterEvolutionModel):
+    "General nucleotide substitution model."
+
+    def __init__(self, base_freqs=None, state_alphabet=None, rng=None):
+        "__init__ calls SeqModel.__init__ and sets the base_freqs field"
+        if state_alphabet is None:
+            state_alphabet = dendropy.DNA_STATE_ALPHABET
+        DiscreteCharacterEvolutionModel.__init__(
+                self,
+                state_alphabet=state_alphabet,
+                rng=rng)
+        if base_freqs is None:
+            self.base_freqs = [0.25, 0.25, 0.25, 0.25]
+        else:
+            self.base_freqs = base_freqs
+
+    def stationary_sample(self, seq_len, rng=None):
+        """
+        Returns a NucleotideSequence() object with length ``length``
+        representing a sample of characters drawn from this model's
+        stationary distribution.
+        """
+        probs = self.base_freqs
+        char_state_indices = [probability.sample_multinomial(probs, rng) for i in range(seq_len)]
+        return [self.state_alphabet[idx] for idx in char_state_indices]
+
+    def is_purine(self, state_index):
+        """
+        Returns True if state_index represents a purine (A or G) row or column
+        index: 0, 2
+        """
+        return state_index % 2 == 0
+
+    def is_pyrimidine(self, state_index):
+        """
+        Returns True if state_index represents a pyrimidine (C or T) row or column
+        index: 1, 3
+        """
+        return state_index % 2 == 1
+
+    def is_transversion(self, state1_idx, state2_idx):
+        """
+        Returns True if the change from state1 to state2, as
+        represented by the row or column indices, is a transversional
+        change.
+        """
+        return (self.is_purine(state1_idx) and self.is_pyrimidine(state2_idx)) \
+               or (self.is_pyrimidine(state1_idx) and self.is_purine(state2_idx))
+
+    def is_purine_transition(self, state1_idx, state2_idx):
+        """
+        Returns True if the change from state1 to state2, as
+        represented by the row or column indices, is a purine
+        transitional change.
+        """
+        return self.is_purine(state1_idx) and self.is_purine(state2_idx)
+
+    def is_pyrimidine_transition(self, state1_idx, state2_idx):
+        """
+        Returns True if the change from state1 to state2, as
+        represented by the row or column indices, is a pyrimidine
+        transitional change.
+        """
+        return self.is_pyrimidine(state1_idx) \
+               and self.is_pyrimidine(state2_idx)
+
+    def is_transition(self, state1_idx, state2_idx):
+        """
+        Returns True if the change from state1 to state2, as
+        represented by the row or column indices, is a
+        transitional change.
+        """
+        return (self.is_purine(state1_idx) and self.is_purine(state2_idx)) \
+               or (self.is_pyrimidine(state1_idx) and self.is_pyrimidine(state2_idx))
+
+class Hky85(NucleotideCharacterEvolutionModel):
+    """
+    Hasegawa et al. 1985 model. Implementation following Swofford et
+    al., 1996.
+    """
+
+    def __init__(self, kappa=1.0, base_freqs=None, state_alphabet=None, rng=None):
+        "__init__: if no arguments given, defaults to JC69."
+        if state_alphabet is None:
+            state_alphabet = dendropy.DNA_STATE_ALPHABET
+        NucleotideCharacterEvolutionModel.__init__(
+                self,
+                base_freqs=base_freqs,
+                state_alphabet=state_alphabet,
+                rng=rng)
+        self.correct_rate = True
+        self.kappa = kappa
+
+    def __repr__(self):
+        rep = "kappa=%f bases=%s" % (self.kappa, str(self.base_freqs))
+        return rep
+
+    def corrected_substitution_rate(self, rate):
+        """Returns the factor that we have to multiply to the branch length
+        to make branch lengths proportional to # of substitutions per site."""
+        if self.correct_rate:
+            pia = self.base_freqs[0]
+            pic = self.base_freqs[1]
+            pig = self.base_freqs[2]
+            pit = self.base_freqs[3]
+            f = self.kappa*(pia*pig + pic*pit)
+            f += (pia + pig)*(pic + pit)
+            return (rate * 0.5/f)  # (rate * 0.5/f)
+        else:
+            return rate
+
+    def pij(self, state_i, state_j, tlen, rate=1.0):
+        """
+        Returns probability, p_ij, of going from state i to state j
+        over time tlen at given rate. (tlen * rate = nu, expected
+        number of substitutions)
+        """
+        nu = self.corrected_substitution_rate(rate) * tlen
+        if self.is_purine(state_j):
+            sumfreqs = self.base_freqs[0] + self.base_freqs[2]
+        else:
+            sumfreqs = self.base_freqs[1] + self.base_freqs[3]
+        factorA = 1 + (sumfreqs * (self.kappa - 1.0))
+        if state_i == state_j:
+            pij = self.base_freqs[state_j] \
+                  + self.base_freqs[state_j] \
+                      * (1.0/sumfreqs - 1) * math.exp(-1.0 * nu) \
+                  + ((sumfreqs - self.base_freqs[state_j])/sumfreqs) \
+                      * math.exp(-1.0 * nu * factorA)
+
+        elif self.is_transition(state_i, state_j):
+            pij = self.base_freqs[state_j] \
+                  + self.base_freqs[state_j] \
+                      * (1.0/sumfreqs - 1) * math.exp(-1.0 * nu) \
+                  - (self.base_freqs[state_j] / sumfreqs) \
+                      * math.exp(-1.0 * nu * factorA)
+        else:
+            pij = self.base_freqs[state_j] * (1.0 - math.exp(-1.0 * nu))
+        return pij
+
+    def qmatrix(self, rate=1.0):
+        "Returns the instantaneous rate of change matrix."
+        rate = self.corrected_substitution_rate(rate)
+        qmatrix = []
+        for state_i in range(4):
+            qmatrix.append([])
+            for state_j in range(4):
+                if state_i == state_j:
+                    # we cheat here and insert a placeholder till the
+                    # other cells are calculated
+                    qij = 0.0
+                else:
+                    if self.is_transition(state_i, state_j):
+                        qij = rate * self.kappa * self.base_freqs[state_j]
+                    else:
+                        qij = rate * self.base_freqs[state_j]
+                qmatrix[state_i].append(qij)
+        for state in range(4):
+            qmatrix[state][state] = -1.0 * sum(qmatrix[state])
+        return qmatrix
+
+    def pvector(self, state, tlen, rate=1.0):
+        """
+        Returns a vector of transition probabilities for a given state
+        over time ``tlen`` at rate ``rate`` for ``state``. (tlen * rate =
+        nu, expected number of substitutions)
+        """
+        pvec = []
+        # in case later we want to allow characters passed in here
+        state_i = state
+        for state_j in range(4):
+            pvec.append(self.pij(state_i, state_j, tlen=tlen, rate=rate))
+        return pvec
+
+    def pmatrix(self, tlen, rate=1.0):
+        """
+        Returns a matrix of nucleotide substitution
+        probabilities. Based on analytical solution by Swofford et
+        al., 1996. (tlen * rate = nu, expected number of
+        substitutions)
+        """
+        pmatrix = []
+        for state_i in range(4):
+            pmatrix.append(self.pvector(state_i, tlen=tlen, rate=rate))
+        return pmatrix
+
+class Jc69(Hky85):
+    """
+    Jukes-Cantor 1969 model. Specializes HKY85 such that
+    kappa = 1.0, and base frequencies = [0.25, 0.25, 0.25, 0.25].
+    """
+    def __init__(self, state_alphabet=None, rng=None):
+        "__init__: uses Hky85.__init__"
+        Hky85.__init__(self,
+                kappa=1.0,
+                base_freqs=[0.25, 0.25, 0.25, 0.25],
+                state_alphabet=state_alphabet,
+                rng=rng,
+                )
+
+
+
+##############################################################################
+## Wrappers for Convenience
+
+def simulate_discrete_char_dataset(seq_len,
+        tree_model,
+        seq_model,
+        mutation_rate=1.0,
+        root_states=None,
+        dataset=None,
+        rng=None):
+    """
+    Wrapper to conveniently generate a DataSet simulated under
+    the given tree and character model.
+
+    Parameters
+    ----------
+
+    seq_len       : int
+        Length of sequence (number of characters).
+    tree_model    : |Tree|
+        Tree on which to simulate.
+    seq_model     : dendropy.model.discrete.DiscreteCharacterEvolutionModel
+        The character substitution model under which to to evolve the
+        characters.
+    mutation_rate : float
+        Mutation *modifier* rate (should be 1.0 if branch lengths on tree
+        reflect true expected number of changes).
+    root_states``   : list
+        Vector of root states (length must equal ``seq_len``).
+    dataset       : |DataSet|
+        If given, the new dendropy.CharacterMatrix object will be
+        added to this (along with a new taxon_namespace if
+        required). Otherwise, a new dendropy.DataSet
+        object will be created.
+    rng           : random number generator
+        If not given, 'GLOBAL_RNG' will be used.
+
+    Returns
+    -------
+
+    d : |DataSet|
+
+    """
+    if dataset is None:
+        dataset = dendropy.DataSet()
+    if tree_model.taxon_namespace not in dataset.taxon_namespaces:
+        taxon_namespace = dataset.add_taxon_namespace(tree_model.taxon_namespace)
+    else:
+        taxon_namespace = tree_model.taxon_namespace
+    char_matrix = simulate_discrete_chars(
+        seq_len=seq_len,
+        tree_model=tree_model,
+        seq_model=seq_model,
+        mutation_rate=mutation_rate,
+        root_states=root_states,
+        char_matrix=None,
+        rng=None)
+    dataset.add_char_matrix(char_matrix=char_matrix)
+    return dataset
+
+def simulate_discrete_chars(
+        seq_len,
+        tree_model,
+        seq_model,
+        mutation_rate=1.0,
+        root_states=None,
+        char_matrix=None,
+        retain_sequences_on_tree=False,
+        rng=None):
+    """
+    Wrapper to conveniently generate a characters simulated under
+    the given tree and character model.
+
+    Since characters will be appended to existing sequences, you can simulate a
+    sequences under a mixed model by calling this method multiple times with
+    different character models and/or different mutation rates, passing
+    in the same ``char_matrix`` object each time.
+
+    Parameters
+    ----------
+
+    seq_len       : int
+        Length of sequence (number of characters).
+    tree_model    : |Tree|
+        Tree on which to simulate.
+    seq_model     : dendropy.model.discrete.DiscreteCharacterEvolutionModel
+        The character substitution model under which to to evolve the
+        characters.
+    mutation_rate : float
+        Mutation *modifier* rate (should be 1.0 if branch lengths on tree
+        reflect true expected number of changes).
+    root_states``   : list
+        Vector of root states (length must equal ``seq_len``).
+    char_matrix   : |DnaCharacterMatrix|
+        If given, new sequences for taxa on ``tree_model`` leaf_nodes will be
+        appended to existing sequences of corresponding taxa in char_matrix; if
+        not, a new |DnaCharacterMatrix| object will be created.
+    retain_sequences_on_tree : bool
+        If `False`, sequence annotations will be cleared from tree after
+        simulation. Set to `True` if you want to, e.g., evolve and accumulate
+        different sequences on tree, or retain information for other purposes.
+    rng           : random number generator
+        If not given, 'GLOBAL_RNG' will be used.
+
+    Returns
+    -------
+    d : a dendropy.datamodel.CharacterMatrix object.
+
+    """
+    seq_evolver = DiscreteCharacterEvolver(seq_model=seq_model,
+                               mutation_rate=mutation_rate)
+    tree = seq_evolver.evolve_states(
+        tree=tree_model,
+        seq_len=seq_len,
+        root_states=None,
+        rng=rng)
+    if char_matrix is None:
+        char_matrix = dendropy.DnaCharacterMatrix(taxon_namespace=tree_model.taxon_namespace)
+        char_matrix.taxon_namespace = tree_model.taxon_namespace
+    else:
+        assert char_matrix.taxon_namespace is tree_model.taxon_namespace, "conflicting taxon sets"
+    seq_evolver.extend_char_matrix_with_characters_on_tree(
+            char_matrix=char_matrix,
+            tree=tree)
+    if not retain_sequences_on_tree:
+        seq_evolver.clean_tree(tree)
+    return char_matrix
+
+def hky85_chars(
+        seq_len,
+        tree_model,
+        mutation_rate=1.0,
+        kappa=1.0,
+        base_freqs=[0.25, 0.25, 0.25, 0.25],
+        root_states=None,
+        char_matrix=None,
+        retain_sequences_on_tree=False,
+        rng=None):
+    """
+    Convenience class to wrap generation of characters (as a CharacterBlock
+    object) based on the HKY model.
+
+    Parameters
+    ----------
+
+    seq_len       : int
+        Length of sequence (number of characters).
+    tree_model    : |Tree|
+        Tree on which to simulate.
+    mutation_rate : float
+        Mutation *modifier* rate (should be 1.0 if branch lengths on tree
+        reflect true expected number of changes).
+    root_states``   : list
+        Vector of root states (length must equal ``seq_len``).
+    char_matrix   : |DnaCharacterMatrix|
+        If given, new sequences for taxa on ``tree_model`` leaf_nodes will be
+        appended to existing sequences of corresponding taxa in char_matrix; if
+        not, a new |DnaCharacterMatrix| object will be created.
+    retain_sequences_on_tree : bool
+        If `False`, sequence annotations will be cleared from tree after
+        simulation. Set to `True` if you want to, e.g., evolve and accumulate
+        different sequences on tree, or retain information for other purposes.
+    rng           : random number generator
+        If not given, 'GLOBAL_RNG' will be used.
+
+    Returns
+    -------
+    d : |DnaCharacterMatrix|
+        The simulated alignment.
+
+    Since characters will be appended to existing sequences, you can simulate a
+    sequences under a mixed model by calling this method multiple times with
+    different character model parameter values and/or different mutation
+    rates, passing in the same ``char_matrix`` object each time.
+    """
+    if char_matrix is None:
+        char_matrix = dendropy.DnaCharacterMatrix(taxon_namespace=tree_model.taxon_namespace)
+    else:
+        assert char_matrix.taxon_namespace is tree_model.taxon_namespace
+    state_alphabet = char_matrix.default_state_alphabet
+    seq_model = Hky85(
+            kappa=kappa,
+            base_freqs=base_freqs,
+            state_alphabet=state_alphabet)
+    return simulate_discrete_chars(seq_len=seq_len,
+                               tree_model=tree_model,
+                               seq_model=seq_model,
+                               mutation_rate=mutation_rate,
+                               root_states=root_states,
+                               char_matrix=char_matrix,
+                               rng=rng)
+
diff --git a/dendropy/model/parsimony.py b/dendropy/model/parsimony.py
new file mode 100644
index 0000000..c63a376
--- /dev/null
+++ b/dendropy/model/parsimony.py
@@ -0,0 +1,291 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Models, modeling and model-fitting of parsimony.
+"""
+
+from functools import reduce
+import operator
+import dendropy
+
+class _NodeStateSetMap(dict):
+    def __init__(self, taxon_state_sets_map=None):
+        self.taxon_state_sets_map = taxon_state_sets_map
+    def __getitem__(self, key):
+        try:
+            return dict.__getitem__(self, key)
+        except KeyError:
+            v = self.taxon_state_sets_map[key.taxon]
+            self[key] = v
+            return v
+
+def _store_sets_as_attr(n, state_sets_attr_name, v):
+    setattr(n, state_sets_attr_name, v)
+
+def _retrieve_state_sets_from_attr(n, state_sets_attr_name, taxon_state_sets_map):
+    try:
+        return getattr(n, state_sets_attr_name)
+    except AttributeError:
+        v = taxon_state_sets_map[n.taxon]
+        setattr(n, state_sets_attr_name, v)
+        return v
+
+def fitch_down_pass(
+        postorder_nodes,
+        state_sets_attr_name="state_sets",
+        taxon_state_sets_map=None,
+        weights=None,
+        ):
+    """
+    Returns the parsimony score given a list of nodes in postorder and
+    associated states, using Fitch's (1971) unordered parsimony algorithm.
+
+    Parameters
+    ----------
+    postorder_nodes : iterable of/over |Node| objects
+        An iterable of |Node| objects in in order of post-order
+        traversal of the tree.
+    state_sets_attr_name : str
+        Name of attribute on |Node| objects in which state set lists
+        will stored/accessed. If `None`, then state sets will not be stored on
+        the tree.
+    taxon_state_sets_map : dict[taxon] = state sets
+        A dictionary that takes a taxon object as a key and returns a state set
+        list as a value. This will be used to populate the state set of a node
+        that has not yet had its state sets scored and recorded (typically,
+        leaves of a tree that has not yet been processed).
+    weights : iterable
+        A list of weights for each pattern.
+
+    Returns
+    -------
+    s : int
+        Parismony score of tree.
+
+    Notes
+    -----
+    Currently this requires a bifurcating tree (even at the root).
+
+    Examples
+    --------
+
+    Assume that we have a tree, ``tree``, and an associated data set, ``data``::
+
+        import dendropy
+        from dendropy.model.parsimony import fitch_down_pass
+
+        taxa = dendropy.TaxonNamespace()
+        data = dendropy.StandardCharacterMatrix.get_from_path(
+                "apternodus.chars.nexus",
+                "nexus",
+                taxon_namespace=taxa)
+        tree = dendropy.Tree.get_from_path(
+                "apternodus.tre",
+                "nexus",
+                taxon_namespace=taxa)
+        taxon_state_sets_map = data.taxon_state_sets_map(gaps_as_missing=True)
+
+    The following will return the parsimony score of the ``tree`` with
+    respect to the data in ``data``::
+
+        score = fitch_down_pass(
+                nodes=tree.postorder_node_iter(),
+                taxon_state_sets_map=taxon_set_map)
+        print(score)
+
+    In the above, every |Node| object of ``tree`` will have an attribute
+    added, "state_sets", that stores the list of state sets from the analysis::
+
+        for nd in tree:
+            print(nd.state_sets)
+
+    If you want to store the list of state sets in a different attribute, e.g.,
+    "analysis1_states"::
+
+        score = fitch_down_pass(
+                nodes=tree.postorder_node_iter(),
+                state_sets_attr_name="analysis1_states",
+                taxon_state_sets_map=taxon_set_map)
+        print(score)
+        for nd in tree:
+            print(nd.analysis1_states)
+
+    Or not to store these at all::
+
+        score = fitch_down_pass(
+                nodes=tree.postorder_node_iter(),
+                state_sets_attr_name=None,
+                taxon_state_sets_map=taxon_set_map)
+        print(score)
+
+    Scoring custom data can be done by something like the following::
+
+        taxa = dendropy.TaxonNamespace()
+        taxon_state_sets_map = {}
+        t1 = taxa.require_taxon("A")
+        t2 = taxa.require_taxon("B")
+        t3 = taxa.require_taxon("C")
+        t4 = taxa.require_taxon("D")
+        t5 = taxa.require_taxon("E")
+        taxon_state_sets_map[t1] = [ set([0,1]),  set([0,1]),  set([0]),     set([0]) ]
+        taxon_state_sets_map[t2] = [ set([1]),    set([1]),    set([1]),     set([0]) ]
+        taxon_state_sets_map[t3] = [ set([0]),    set([1]),    set([1]),     set([0]) ]
+        taxon_state_sets_map[t4] = [ set([0]),    set([1]),    set([0,1]),   set([1]) ]
+        taxon_state_sets_map[t5] = [ set([1]),    set([0]),    set([1]),     set([1]) ]
+        tree = dendropy.Tree.get_from_string(
+                "(A,(B,(C,(D,E))));", "newick",
+                taxon_namespace=taxa)
+        score = fitch_down_pass(tree.postorder_node_iter(),
+                taxon_state_sets_map=taxon_state_sets_map)
+        print(score)
+
+    """
+    score = 0
+    if state_sets_attr_name is None:
+        node_state_set_map = _NodeStateSetMap(taxon_state_sets_map)
+        get_node_state_sets = lambda node : node_state_set_map[node]
+        set_node_state_sets = lambda node, v : node_state_set_map.__setitem__(node, v)
+    else:
+        get_node_state_sets = lambda node : _retrieve_state_sets_from_attr(node, state_sets_attr_name, taxon_state_sets_map)
+        set_node_state_sets = lambda node, v : _store_sets_as_attr(node, state_sets_attr_name, v)
+    for nd in postorder_nodes:
+        c = nd.child_nodes()
+        if not c:
+            ss = get_node_state_sets(nd)
+            continue
+        left_c, right_c = c[:2]
+        remaining = c[2:]
+        left_ssl = get_node_state_sets(left_c)
+        while True:
+            right_ssl = get_node_state_sets(right_c)
+            result = []
+            for n, ssp in enumerate(zip(left_ssl, right_ssl)):
+                left_ss, right_ss = ssp
+                inter = left_ss.intersection(right_ss)
+                if inter:
+                    result.append(inter)
+                else:
+                    if weights is None:
+                        wt = 1
+                    else:
+                        wt = weights[n]
+                    score += wt
+                    result.append(left_ss.union(left_ss, right_ss))
+            if remaining:
+                right_c = remaining.pop(0)
+                left_ssl = result
+            else:
+                break
+        # setattr(nd, state_sets_attr_name, result)
+        set_node_state_sets(nd, result)
+    return score
+
+def fitch_up_pass(
+        preorder_node_list,
+        state_sets_attr_name="state_sets",
+        taxon_state_sets_map=None):
+    """
+    Finalizes the state set lists associated with each node using the "final
+    phase" of Fitch's (1971) unordered parsimony algorithm.
+
+    Parameters
+    ----------
+    postorder_nodes : iterable of/over |Node| objects
+        An iterable of |Node| objects in in order of post-order
+        traversal of the tree.
+    state_sets_attr_name : str
+        Name of attribute on |Node| objects in which state set lists
+        will stored/accessed. If `None`, then state sets will not be stored on
+        the tree.
+    taxon_state_sets_map : dict[taxon] = state sets
+        A dictionary that takes a taxon object as a key and returns a state set
+        list as a value. This will be used to populate the state set of a node
+        that has not yet had its state sets scored and recorded (typically,
+        leaves of a tree that has not yet been processed).
+
+    Notes
+    -----
+    Currently this requires a bifurcating tree (even at the root).
+
+    Examples
+    --------
+
+    ::
+
+        taxa = dendropy.TaxonNamespace()
+        data = dendropy.StandardCharacterMatrix.get_from_path(
+                "apternodus.chars.nexus",
+                "nexus",
+                taxon_namespace=taxa)
+        tree = dendropy.Tree.get_from_path(
+                "apternodus.tre",
+                "nexus",
+                taxon_namespace=taxa)
+        taxon_state_sets_map = data.taxon_state_sets_map(gaps_as_missing=True)
+        score = fitch_down_pass(tree.postorder_node_iter(),
+                taxon_state_sets_map=taxon_state_sets_map)
+        print(score)
+        fitch_up_pass(tree.preorder_node_iter())
+        for nd in tree:
+            print(nd.state_sets)
+
+    """
+    node_state_sets_map = {}
+    for nd in preorder_node_list:
+        c = nd.child_nodes()
+        p = nd.parent_node
+        if (not c) or (not p):
+            continue
+        assert(len(c) == 2)
+        left_c, right_c = c
+        try:
+            left_ssl = getattr(left_c, state_sets_attr_name)
+        except AttributeError:
+            if not taxon_state_sets_map:
+                raise
+            left_ssl = taxon_state_sets_map[left_c.taxon]
+        try:
+            right_ssl = getattr(right_c, state_sets_attr_name)
+        except AttributeError:
+            if not taxon_state_sets_map:
+                raise
+            right_ssl = taxon_state_sets_map[right_c.taxon]
+        par_ssl = getattr(p, state_sets_attr_name)
+        curr_ssl = getattr(nd, state_sets_attr_name)
+        result = []
+        for n, ssp in enumerate(zip(par_ssl, curr_ssl, left_ssl, right_ssl)):
+            par_ss, curr_ss, left_ss, right_ss = ssp
+
+            down_parup_inter = par_ss.intersection(curr_ss)
+            if down_parup_inter == par_ss:
+                final_ss = down_parup_inter
+            else:
+                rl_inter = left_ss.intersection(right_ss)
+                if not rl_inter:
+                    final_ss = par_ss.union(curr_ss)
+                else:
+                    in_par_and_left = par_ss.intersection(left_ss)
+                    in_par_and_right = par_ss.intersection(right_ss)
+                    final_ss = in_par_and_left.union(in_par_and_right, curr_ss)
+            #_LOG.debug("downpass = %s, par = %s, left = %s, right = %s, final_ss= %s" %
+            #                    (str(curr_ss), str(par_ss), str(left_ss), str(right_ss), str(final_ss)))
+            result.append(final_ss)
+        setattr(nd, state_sets_attr_name, result)
+
+
diff --git a/dendropy/model/reconcile.py b/dendropy/model/reconcile.py
new file mode 100644
index 0000000..7f9cfd0
--- /dev/null
+++ b/dendropy/model/reconcile.py
@@ -0,0 +1,599 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Classes and Methods for working with tree reconciliation, fitting, embedding,
+contained/containing etc.
+"""
+
+import dendropy
+from dendropy.model import coalescent
+
+class ContainingTree(dendropy.Tree):
+    """
+    A "containing tree" is a (usually rooted) tree data structure within which
+    other trees are "contained". For example, species trees and their contained
+    gene trees; host trees and their contained parasite trees; biogeographical
+    "area" trees and their contained species or taxon trees.
+    """
+
+    def __init__(self,
+            containing_tree,
+            contained_taxon_namespace,
+            contained_to_containing_taxon_map,
+            contained_trees=None,
+            fit_containing_edge_lengths=True,
+            collapse_empty_edges=True,
+            ultrametricity_precision=False,
+            ignore_root_deep_coalescences=True,
+            **kwargs):
+        """
+        __init__ converts ``self`` to ContainingTree class, embedding the trees
+        given in the list, ``contained_trees.``
+
+
+        Mandatory Arguments:
+
+            ``containing_tree``
+                A |Tree| or |Tree|-like object that describes the topological
+                constraints or conditions of the containing tree (e.g., species,
+                host, or biogeographical area trees).
+
+            ``contained_taxon_namespace``
+                A |TaxonNamespace| object that will be used to manage the taxa of
+                the contained trees.
+
+            ``contained_to_containing_taxon_map``
+                A |TaxonNamespaceMapping| object mapping |Taxon| objects in the
+                contained |TaxonNamespace| to corresponding |Taxon| objects in the
+                containing tree.
+
+        Optional Arguments:
+
+            ``contained_trees``
+                An iterable container of |Tree| or |Tree|-like objects that
+                will be contained into ``containing_tree``; e.g. gene or
+                parasite trees.
+
+            ``fit_containing_edge_lengths``
+                If `True` [default], then the branch lengths of
+                ``containing_tree`` will be adjusted to fit the contained tree
+                as they are added. Otherwise, the containing tree edge lengths
+                will not be changed.
+
+            ``collapse_empty_edges``
+                If `True` [default], after edge lengths are adjusted,
+                zero-length branches will be collapsed.
+
+            ``ultrametricity_precision``
+                If `False` [default], then trees will not be checked for
+                ultrametricity. Otherwise this is the threshold within which
+                all node to tip distances for sister nodes must be equal.
+
+            ``ignore_root_deep_coalescences``
+                If `True` [default], then deep coalescences in the root will
+                not be counted.
+
+        Other Keyword Arguments: Will be passed to Tree().
+
+    """
+        if "taxon_namespace" not in kwargs:
+            kwargs["taxon_namespace"] = containing_tree.taxon_namespace
+        dendropy.Tree.__init__(self,
+                containing_tree,
+                taxon_namespace=containing_tree.taxon_namespace)
+        self.original_tree = containing_tree
+        for edge in self.postorder_edge_iter():
+            edge.head_contained_edges = {}
+            edge.tail_contained_edges = {}
+            edge.containing_taxa = set()
+            edge.contained_taxa = set()
+        self._contained_taxon_namespace = contained_taxon_namespace
+        self._contained_to_containing_taxon_map = None
+        self._contained_trees = None
+        self._set_contained_to_containing_taxon_map(contained_to_containing_taxon_map)
+        self.fit_containing_edge_lengths = fit_containing_edge_lengths
+        self.collapse_empty_edges = collapse_empty_edges
+        self.ultrametricity_precision = ultrametricity_precision
+        self.ignore_root_deep_coalescences = ignore_root_deep_coalescences
+        if contained_trees:
+            self._set_contained_trees(contained_trees)
+        if self.contained_trees:
+            self.rebuild(rebuild_taxa=False)
+
+    def _set_contained_taxon_namespace(self, taxon_namespace):
+        self._contained_taxon_namespace = taxon_namespace
+
+    def _get_contained_taxon_namespace(self):
+        if self._contained_taxon_namespace is None:
+            self._contained_taxon_namespace = dendropy.TaxonNamespace()
+        return self._contained_taxon_namespace
+
+    contained_taxon_namespace = property(_get_contained_taxon_namespace)
+
+    def _set_contained_to_containing_taxon_map(self, contained_to_containing_taxon_map):
+        """
+        Sets mapping of |Taxon| objects of the genes/parasite/etc. to that of
+        the population/species/host/etc.
+        Creates mapping (e.g., species to genes) and decorates edges of self
+        with sets of both containing |Taxon| objects and the contained
+        |Taxon| objects that map to them.
+        """
+        if isinstance(contained_to_containing_taxon_map, dendropy.TaxonNamespaceMapping):
+            if self._contained_taxon_namespace is not contained_to_containing_taxon_map.domain_taxon_namespace:
+                raise ValueError("Domain TaxonNamespace of TaxonNamespaceMapping ('domain_taxon_namespace') not the same as 'contained_taxon_namespace' TaxonNamespace")
+            self._contained_to_containing_taxon_map = contained_to_containing_taxon_map
+        else:
+            self._contained_to_containing_taxon_map = dendropy.TaxonNamespaceMapping(
+                    mapping_dict=contained_to_containing_taxon_map,
+                    domain_taxon_namespace=self.contained_taxon_namespace,
+                    range_taxon_namespace=self.taxon_namespace)
+        self.build_edge_taxa_sets()
+
+    def _get_contained_to_containing_taxon_map(self):
+        return self._contained_to_containing_taxon_map
+
+    contained_to_containing_taxon_map = property(_get_contained_to_containing_taxon_map)
+
+    def _set_contained_trees(self, trees):
+        if hasattr(trees, 'taxon_namespace'):
+            if self._contained_taxon_namespace is None:
+                self._contained_taxon_namespace = trees.taxon_namespace
+            elif self._contained_taxon_namespace is not trees.taxon_namespace:
+                raise ValueError("'contained_taxon_namespace' of ContainingTree is not the same TaxonNamespace object of 'contained_trees'")
+        self._contained_trees = dendropy.TreeList(trees, taxon_namespace=self._contained_taxon_namespace)
+        if self._contained_taxon_namespace is None:
+            self._contained_taxon_namespace = self._contained_trees.taxon_namespace
+
+    def _get_contained_trees(self):
+        if self._contained_trees is None:
+            self._contained_trees = dendropy.TreeList(taxon_namespace=self._contained_taxon_namespace)
+        return self._contained_trees
+
+    contained_trees = property(_get_contained_trees)
+
+    def _get_containing_to_contained_taxa_map(self):
+        return self._contained_to_containing_taxon_map.reverse
+
+    containing_to_contained_taxa_map = property(_get_containing_to_contained_taxa_map)
+
+    def clear(self):
+        """
+        Clears all contained trees and mapped edges.
+        """
+        self.contained_trees = dendropy.TreeList(taxon_namespace=self._contained_to_containing_taxon_map.domain_taxa)
+        self.clear_contained_edges()
+
+    def clear_contained_edges(self):
+        """
+        Clears all contained mapped edges.
+        """
+        for edge in self.postorder_edge_iter():
+            edge.head_contained_edges = {}
+            edge.tail_contained_edges = {}
+
+    def fit_edge_lengths(self, contained_trees):
+        """
+        Recalculate node ages / edge lengths of containing tree to accomodate
+        contained trees.
+        """
+
+        # set the ages
+        for node in self.postorder_node_iter():
+            if node.is_internal():
+                disjunct_leaf_set_list_split_bitmasks = []
+                for i in node.child_nodes():
+                    disjunct_leaf_set_list_split_bitmasks.append(self.taxon_namespace.taxa_bitmask(taxa=i.edge.containing_taxa))
+                min_age = float('inf')
+                for et in contained_trees:
+                    min_age = self._find_youngest_intergroup_age(et, disjunct_leaf_set_list_split_bitmasks, min_age)
+                node.age = max( [min_age] + [cn.age for cn in node.child_nodes()] )
+            else:
+                node.age = 0
+
+        # set the corresponding edge lengths
+        self.set_edge_lengths_from_node_ages()
+
+        # collapse 0-length branches
+        if self.collapse_empty_edges:
+           self.collapse_unweighted_edges()
+
+    def rebuild(self, rebuild_taxa=True):
+        """
+        Recalculate edge taxa sets, node ages / edge lengths of containing
+        tree, and embed edges of contained trees.
+        """
+        if rebuild_taxa:
+            self.build_edge_taxa_sets()
+        if self.fit_containing_edge_lengths:
+            self.fit_edge_lengths(self.contained_trees)
+        self.clear_contained_edges()
+        for et in self.contained_trees:
+            self.embed_tree(et)
+
+    def embed_tree(self, contained_tree):
+        """
+        Map edges of contained tree into containing tree (i.e., self).
+        """
+        if self.seed_node.age is None:
+            self.calc_node_ages(ultrametricity_precision=self.ultrametricity_precision)
+        if contained_tree not in self.contained_trees:
+            self.contained_trees.append(contained_tree)
+        if self.fit_containing_edge_lengths:
+            self.fit_edge_lengths(self.contained_trees)
+        if contained_tree.seed_node.age is None:
+            contained_tree.calc_node_ages(ultrametricity_precision=self.ultrametricity_precision)
+        contained_leaves = contained_tree.leaf_nodes()
+        taxon_to_contained = {}
+        for nd in contained_leaves:
+            containing_taxon = self.contained_to_containing_taxon_map[nd.taxon]
+            x = taxon_to_contained.setdefault(containing_taxon, set())
+            x.add(nd.edge)
+        for containing_edge in self.postorder_edge_iter():
+            if containing_edge.is_terminal():
+                containing_edge.head_contained_edges[contained_tree] = taxon_to_contained[containing_edge.head_node.taxon]
+            else:
+                containing_edge.head_contained_edges[contained_tree] = set()
+                for nd in containing_edge.head_node.child_nodes():
+                    containing_edge.head_contained_edges[contained_tree].update(nd.edge.tail_contained_edges[contained_tree])
+
+            if containing_edge.tail_node is None:
+                if containing_edge.length is not None:
+                    target_age =  containing_edge.head_node.age + containing_edge.length
+                else:
+                    # assume all coalesce?
+                    containing_edge.tail_contained_edges[contained_tree] = set([contained_tree.seed_node.edge])
+                    continue
+            else:
+                target_age = containing_edge.tail_node.age
+
+            containing_edge.tail_contained_edges[contained_tree] = set()
+            for contained_edge in containing_edge.head_contained_edges[contained_tree]:
+                if contained_edge.tail_node is not None:
+                    remaining = target_age - contained_edge.tail_node.age
+                elif contained_edge.length is not None:
+                    remaining = target_age - (contained_edge.head_node.age + contained_edge.length)
+                else:
+                    continue
+                while remaining > 0:
+                    if contained_edge.tail_node is not None:
+                        contained_edge = contained_edge.tail_node.edge
+                    else:
+                        if contained_edge.length is not None and (remaining - contained_edge.length) <= 0:
+                            contained_edge = None
+                            remaining = 0
+                            break
+                        else:
+                            remaining = 0
+                            break
+                    if contained_edge and remaining > 0:
+                        remaining -= contained_edge.length
+                if contained_edge is not None:
+                    containing_edge.tail_contained_edges[contained_tree].add(contained_edge)
+
+    def build_edge_taxa_sets(self):
+        """
+        Rebuilds sets of containing and corresponding contained taxa at each
+        edge.
+        """
+        for edge in self.postorder_edge_iter():
+            if edge.is_terminal():
+                edge.containing_taxa = set([edge.head_node.taxon])
+            else:
+                edge.containing_taxa = set()
+                for i in edge.head_node.child_nodes():
+                    edge.containing_taxa.update(i.edge.containing_taxa)
+            edge.contained_taxa = set()
+            for t in edge.containing_taxa:
+                edge.contained_taxa.update(self.containing_to_contained_taxa_map[t])
+
+    def num_deep_coalescences(self):
+        """
+        Returns total number of deep coalescences of the contained trees.
+        """
+        return sum(self.deep_coalescences().values())
+
+    def deep_coalescences(self):
+        """
+        Returns dictionary where the contained trees are keys, and the number of
+        deep coalescences corresponding to the tree are values.
+        """
+        dc = {}
+        for tree in self.contained_trees:
+            for edge in self.postorder_edge_iter():
+                if edge.tail_node is None and self.ignore_root_deep_coalescences:
+                    continue
+                try:
+                    dc[tree] += len(edge.tail_contained_edges[tree]) - 1
+                except KeyError:
+                    dc[tree] = len(edge.tail_contained_edges[tree]) - 1
+        return dc
+
+    def embed_contained_kingman(self,
+            edge_pop_size_attr='pop_size',
+            default_pop_size=1,
+            label=None,
+            rng=None,
+            use_expected_tmrca=False):
+        """
+        Simulates, *embeds*, and returns a "censored" (Kingman) neutral coalescence tree
+        conditional on self.
+
+            ``rng``
+                Random number generator to use. If `None`, the default will
+                be used.
+
+            ``edge_pop_size_attr``
+                Name of attribute of self's edges that specify the population
+                size. If this attribute does not exist, then the population
+                size is taken to be 1.
+
+        Note that all edge-associated taxon sets must be up-to-date (otherwise,
+        ``build_edge_taxa_sets()`` should be called).
+        """
+        et = self.simulate_contained_kingman(
+                edge_pop_size_attr=edge_pop_size_attr,
+                default_pop_size=default_pop_size,
+                label=label,
+                rng=rng,
+                use_expected_tmrca=use_expected_tmrca)
+        self.embed_tree(et)
+        return et
+
+    def simulate_contained_kingman(self,
+            edge_pop_size_attr='pop_size',
+            default_pop_size=1,
+            label=None,
+            rng=None,
+            use_expected_tmrca=False):
+        """
+        Simulates and returns a "censored" (Kingman) neutral coalescence tree
+        conditional on self.
+
+            ``rng``
+                Random number generator to use. If `None`, the default will
+                be used.
+
+            ``edge_pop_size_attr``
+                Name of attribute of self's edges that specify the population
+                size. If this attribute does not exist, then the population
+                size is taken to be 1.
+
+        Note that all edge-associated taxon sets must be up-to-date (otherwise,
+        ``build_edge_taxa_sets()`` should be called), and that the tree
+        is *not* added to the set of contained trees. For the latter, call
+        ``embed_contained_kingman``.
+        """
+
+        # Dictionary that maps nodes of containing tree to list of
+        # corresponding nodes on gene tree, initially populated with leaf
+        # nodes.
+        contained_nodes = {}
+        for nd in self.leaf_node_iter():
+            contained_nodes[nd] = []
+            for gt in nd.edge.contained_taxa:
+                gn = dendropy.Node(taxon=gt)
+                contained_nodes[nd].append(gn)
+
+        # Generate the tree structure
+        for edge in self.postorder_edge_iter():
+            if edge.head_node.parent_node is None:
+                # root: run unconstrained coalescence until just one gene node
+                # remaining
+                if hasattr(edge, edge_pop_size_attr):
+                    pop_size = getattr(edge, edge_pop_size_attr)
+                else:
+                    pop_size = default_pop_size
+                if len(contained_nodes[edge.head_node]) > 1:
+                    final = coalescent.coalesce_nodes(nodes=contained_nodes[edge.head_node],
+                            pop_size=pop_size,
+                            period=None,
+                            rng=rng,
+                            use_expected_tmrca=use_expected_tmrca)
+                else:
+                    final = contained_nodes[edge.head_node]
+            else:
+                # run until next coalescence event, as determined by this edge
+                # size.
+                if hasattr(edge, edge_pop_size_attr):
+                    pop_size = getattr(edge, edge_pop_size_attr)
+                else:
+                    pop_size = default_pop_size
+                remaining = coalescent.coalesce_nodes(nodes=contained_nodes[edge.head_node],
+                        pop_size=pop_size,
+                        period=edge.length,
+                        rng=rng,
+                        use_expected_tmrca=use_expected_tmrca)
+                try:
+                    contained_nodes[edge.tail_node].extend(remaining)
+                except KeyError:
+                    contained_nodes[edge.tail_node] = remaining
+
+        # Create and return the full tree
+        contained_tree = dendropy.Tree(taxon_namespace=self.contained_taxon_namespace, label=label)
+        contained_tree.seed_node = final[0]
+        contained_tree.is_rooted = True
+        return contained_tree
+
+    def _find_youngest_intergroup_age(self, contained_tree, disjunct_leaf_set_list_split_bitmasks, starting_min_age=None):
+        """
+        Find the age of the youngest MRCA of disjunct leaf sets.
+        """
+        if starting_min_age is None:
+            starting_min_age = float('inf')
+        if contained_tree.seed_node.age is None:
+            contained_tree.calc_node_ages(ultrametricity_precision=self.ultrametricity_precision)
+        for nd in contained_tree.ageorder_node_iter(include_leaves=False):
+            if nd.age > starting_min_age:
+                break
+            prev_intersections = False
+            for bm in disjunct_leaf_set_list_split_bitmasks:
+                if bm & nd.edge.split_bitmask:
+                    if prev_intersections:
+                        return nd.age
+                    prev_intersections = True
+        return starting_min_age
+
+    def write_as_mesquite(self, out, **kwargs):
+        """
+        For debugging purposes, write out a Mesquite-format file.
+        """
+        from dendropy.dataio import nexuswriter
+        nw = nexuswriter.NexusWriter(**kwargs)
+        nw.is_write_block_titles = True
+        out.write("#NEXUS\n\n")
+        nw._write_taxa_block(out, self.taxon_namespace)
+        out.write('\n')
+        nw._write_taxa_block(out, self.contained_trees.taxon_namespace)
+        if self.contained_trees.taxon_namespace.label:
+            domain_title = self.contained_trees.taxon_namespace.label
+        else:
+            domain_title = self.contained_trees.taxon_namespace.oid
+        contained_taxon_namespace = self.contained_trees.taxon_namespace
+        contained_label = self.contained_trees.label
+        out.write('\n')
+        self._contained_to_containing_taxon_map.write_mesquite_association_block(out)
+        out.write('\n')
+        nw._write_trees_block(out, dendropy.TreeList([self], taxon_namespace=self.taxon_namespace))
+        out.write('\n')
+        nw._write_trees_block(out, dendropy.TreeList(self.contained_trees, taxon_namespace=contained_taxon_namespace, label=contained_label))
+        out.write('\n')
+
+def reconciliation_discordance(gene_tree, species_tree):
+    """
+    Given two trees (with splits encoded), this returns the number of gene
+    duplications implied by the gene tree reconciled on the species tree, based
+    on the algorithm described here:
+
+        Goodman, M. J. Czelnusiniak, G. W. Moore, A. E. Romero-Herrera, and
+        G. Matsuda. 1979. Fitting the gene lineage into its species lineage,
+        a parsimony strategy illustrated by cladograms constructed from globin
+        sequences. Syst. Zool. 19: 99-113.
+
+        Maddison, W. P. 1997. Gene trees in species trees. Syst. Biol. 46:
+        523-536.
+
+    This function requires that the gene tree and species tree *have the same
+    leaf set*. Note that for correct results,
+
+        (a) trees must be rooted (i.e., is_rooted = True)
+        (b) split masks must have been added as rooted (i.e., when
+            encode_splits was called, is_rooted must have been set to True)
+
+    """
+    taxa_mask = species_tree.taxon_namespace.all_taxa_bitmask()
+    species_node_gene_nodes = {}
+    gene_node_species_nodes = {}
+    for gnd in gene_tree.postorder_node_iter():
+        gn_children = gnd.child_nodes()
+        if len(gn_children) > 0:
+            ssplit = 0
+            for gn_child in gn_children:
+                ssplit = ssplit | gene_node_species_nodes[gn_child].edge.leafset_bitmask
+            sanc = species_tree.mrca(start_node=species_tree.seed_node, leafset_bitmask=ssplit)
+            gene_node_species_nodes[gnd] = sanc
+            if sanc not in species_node_gene_nodes:
+                species_node_gene_nodes[sanc] = []
+            species_node_gene_nodes[sanc].append(gnd)
+        else:
+            gene_node_species_nodes[gnd] = species_tree.find_node(lambda x : x.taxon == gnd.taxon)
+    contained_gene_lineages = {}
+    for snd in species_tree.postorder_node_iter():
+        if snd in species_node_gene_nodes:
+            for gnd in species_node_gene_nodes[snd]:
+                for gnd_child in gnd.child_nodes():
+                    sanc = gene_node_species_nodes[gnd_child]
+                    p = sanc
+                    while p is not None and p != snd:
+                        if p.edge not in contained_gene_lineages:
+                            contained_gene_lineages[p.edge] = 0
+                        contained_gene_lineages[p.edge] += 1
+                        p = p.parent_node
+
+    dc = 0
+    for v in contained_gene_lineages.values():
+        dc += v - 1
+    return dc
+
+def monophyletic_partition_discordance(tree, taxon_namespace_partition):
+    """
+    Returns the number of deep coalescences on tree ``tree`` that would result
+    if the taxa in ``tax_sets`` formed K mutually-exclusive monophyletic groups,
+    where K = len(tax_sets)
+    ``taxon_namespace_partition`` == TaxonNamespacePartition
+    """
+
+    tax_sets = taxon_namespace_partition.subsets()
+
+    # from dendropy.model import parsimony
+    # taxon_state_sets_map = {}
+    # assert tree.taxon_namespace is taxon_namespace_partition.taxon_namespace
+    # for taxon in tree.taxon_namespace:
+    #     taxon_state_sets_map[taxon] = [0 for i in range(len(tax_sets))]
+    # for idx, ts in enumerate(tax_sets):
+    #     for taxon in ts:
+    #         taxon_state_sets_map[taxon][idx] = 1
+    # for taxon in tree.taxon_namespace:
+    #     taxon_state_sets_map[taxon] = [set([i]) for i in taxon_state_sets_map[taxon]]
+    # return parsimony.fitch_down_pass(
+    #         postorder_nodes=tree.postorder_node_iter(),
+    #         taxon_state_sets_map=taxon_state_sets_map
+    #         )
+
+    dc_tree = dendropy.Tree()
+    dc_tree.taxon_namespace = dendropy.TaxonNamespace()
+    for t in range(len(tax_sets)):
+        dc_tree.taxon_namespace.add_taxon(dendropy.Taxon(label=str(t)))
+    def _get_dc_taxon(nd):
+        for idx, tax_set in enumerate(tax_sets):
+            if nd.taxon in tax_set:
+                return dc_tree.taxon_namespace[idx]
+        assert "taxon not found in partition: '%s'" % nd.taxon.label
+    src_dc_map = {}
+    for snd in tree.postorder_node_iter():
+        nnd = dendropy.Node()
+        src_dc_map[snd] = nnd
+        children = snd.child_nodes()
+        if len(children) == 0:
+            nnd.taxon = _get_dc_taxon(snd)
+        else:
+            taxa_set = []
+            for cnd in children:
+                dc_node = src_dc_map[cnd]
+                if len(dc_node.child_nodes()) > 1:
+                    nnd.add_child(dc_node)
+                else:
+                    ctax = dc_node.taxon
+                    if ctax is not None and ctax not in taxa_set:
+                        taxa_set.append(ctax)
+                    del src_dc_map[cnd]
+            if len(taxa_set) > 1:
+                for t in taxa_set:
+                    cnd = dendropy.Node()
+                    cnd.taxon = t
+                    nnd.add_child(cnd)
+            else:
+                if len(nnd.child_nodes()) == 0:
+                    nnd.taxon = taxa_set[0]
+                elif len(taxa_set) == 1:
+                    cnd = dendropy.Node()
+                    cnd.taxon = taxa_set[0]
+                    nnd.add_child(cnd)
+    dc_tree.seed_node = nnd
+    return len(dc_tree.leaf_nodes()) - len(tax_sets)
+
diff --git a/dendropy/model/treeshape.py b/dendropy/model/treeshape.py
new file mode 100644
index 0000000..1448049
--- /dev/null
+++ b/dendropy/model/treeshape.py
@@ -0,0 +1,31 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Models of canonical tree shapes.
+"""
+
+import dendropy
+
+def star_tree(taxon_namespace, **kwargs):
+    "Builds and returns a star tree from the given taxa block."
+    star_tree = dendropy.Tree(taxon_namespace=taxon_namespace, **kwargs)
+    for taxon in taxon_namespace:
+        star_tree.seed_node.new_child(taxon=taxon)
+    return star_tree
+
diff --git a/dendropy/simulate/__init__.py b/dendropy/simulate/__init__.py
new file mode 100644
index 0000000..a084375
--- /dev/null
+++ b/dendropy/simulate/__init__.py
@@ -0,0 +1,24 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Simulation of trees and characters.
+"""
+
+from dendropy.simulate.treesim import *
+from dendropy.simulate.charsim import *
diff --git a/dendropy/simulate/charsim.py b/dendropy/simulate/charsim.py
new file mode 100644
index 0000000..dd13177
--- /dev/null
+++ b/dendropy/simulate/charsim.py
@@ -0,0 +1,32 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+This module provides a convenient interface that aggregates, wraps, and/or
+implements functions and classes that simulate character data under various
+models.
+"""
+
+from dendropy.model.continuous import evolve_continuous_char
+from dendropy.model.discrete import DiscreteCharacterEvolutionModel
+from dendropy.model.discrete import DiscreteCharacterEvolver
+from dendropy.model.discrete import simulate_discrete_char_dataset
+from dendropy.model.discrete import simulate_discrete_chars
+from dendropy.model.discrete import Hky85
+from dendropy.model.discrete import Jc69
+from dendropy.model.discrete import hky85_chars
diff --git a/dendropy/simulate/popgensim.py b/dendropy/simulate/popgensim.py
new file mode 100644
index 0000000..9cbefad
--- /dev/null
+++ b/dendropy/simulate/popgensim.py
@@ -0,0 +1,259 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Population genetic simlations.
+"""
+
+try:
+    from StringIO import StringIO # Python 2 legacy support: StringIO in this module is the one needed (not io)
+except ImportError:
+    from io import StringIO # Python 3
+import random
+import copy
+
+from dendropy.utility import GLOBAL_RNG
+from dendropy.interop import seqgen
+from dendropy.model import discrete
+from dendropy.model import coalescent
+import dendropy
+
+class FragmentedPopulations(object):
+
+    def __init__(self,
+                 div_time_gens,
+                 num_desc_pops = 2,
+                 mutrate_per_site_per_generation=10e-8,
+                 desc_pop_size=10000,
+                 use_seq_gen=False,
+                 rng=GLOBAL_RNG):
+        """
+        __init__ arguments:
+
+            - ``div_time_gens`` : generations since divergence,
+            - ``num_desc_pops`` : number of descendent populations,
+            - ``mutrate_per_site_per_generation`` : sequence mutation rate, per-site per-generation
+            - ``desc_diploid_pop_size`` : descendent lineage population size (=N; ancestral pop size = num_desc_pops * N)
+            - ``rng`` : random number generator
+        """
+        self.div_time_gens = div_time_gens
+        self.num_desc_pops = num_desc_pops
+        self.mutrate_per_site_per_generation = mutrate_per_site_per_generation
+        self.desc_pop_size = desc_pop_size
+        self.rng = rng
+        self.kappa = 1.0
+        self.base_freqs=[0.25, 0.25, 0.25, 0.25]
+        self.seqgen_path = 'seq-gen'
+        self.use_seq_gen = use_seq_gen
+        self.gene_tree = None
+        self.pop_tree = None
+        self.mutation_tree = None
+
+    def _get_theta(self):
+        return 4 * self.mutrate_per_gene_per_generation * self.desc_pop_size
+
+    def generate_sequences(self,
+                           species_name,
+                           samples_per_pop=10,
+                           seq_len=2000,
+                           use_seq_gen=True):
+
+        self.generate_pop_tree(species_name=species_name, samples_per_pop=samples_per_pop)
+        self.generate_gene_tree(species_name=species_name, samples_per_pop=samples_per_pop)
+        d = dendropy.DataSet(self.mutation_tree.taxon_namespace)
+        if self.use_seq_gen is True:
+            sg = seqgen.SeqGen()
+            sg.seqgen_path = self.seqgen_path
+            sg.num_replicates = 1
+            sg.quiet = True
+            sg.rng = self.rng
+            sg.seq_len = seq_len
+            sg.char_model = 'HKY'
+            sg.ti_tv = float(self.kappa) / 2
+            sg.state_freqs = self.base_freqs
+            sg.trees = [self.mutation_tree]
+            d = sg.generate_dataset(dataset=d)
+        else:
+            char_matrix = discrete.hky85_chars(
+                    seq_len=seq_len,
+                    tree_model=self.mutation_tree,
+                    mutation_rate=1.0,
+                    kappa=1.0,
+                    base_freqs=[0.25, 0.25, 0.25, 0.25],
+                    root_states=None,
+                    rng=self.rng)
+            d.add_char_matrix(char_matrix)
+        return d
+
+    def generate_pop_tree(self, species_name, samples_per_pop=10):
+        tree_data = { 'sp': species_name, 'divt': self.div_time_gens }
+        desc_lineages = []
+        for i in xrange(self.num_desc_pops):
+            tree_data['id'] = i+1
+            desc_lineages.append("%(sp)s%(id)d:%(divt)d" % tree_data)
+        tree_string = "(" + (",".join(desc_lineages)) + ("):%d" % 0) #% (self.num_desc_pops * self.desc_pop_size * 10))
+        self.pop_tree = dendropy.Tree.get_from_string(tree_string, schema="newick")
+        return self.pop_tree
+
+    def generate_gene_tree(self, species_name, samples_per_pop=10):
+        """
+        Given:
+            ``species_name`` : string identifying species/taxon
+            ``samples_per_pop`` : number of samples (genes) per population
+        Returns:
+            DendroPy tree, with branch lengths in generations
+        """
+        if self.pop_tree is None:
+            self.generate_pop_tree(species_name, samples_per_pop=10)
+        for idx, leaf in enumerate(self.pop_tree.leaf_iter()):
+            if idx == 1:
+                # ancestral population = num_desc_pops * desc population
+                leaf.parent_node.edge.pop_size = self.num_desc_pops * self.desc_pop_size
+            leaf.edge.pop_size = self.desc_pop_size
+            leaf.num_genes = samples_per_pop
+        self.gene_tree, self.pop_tree = coalescent.constrained_kingman_tree(self.pop_tree,
+                                                          gene_node_label_fn=lambda x,y: "%sX%d" % (x,y),
+                                                          rng=self.rng)
+
+        self.mutation_tree = copy.deepcopy(self.gene_tree)
+        for edge in self.mutation_tree.preorder_edge_iter():
+            edge.length = edge.length * self.mutrate_per_site_per_generation
+        return self.gene_tree
+
+def pop_gen_tree(tree=None,
+                 taxon_namespace=None,
+                 ages=None,
+                 num_genes=None,
+                 pop_sizes=None,
+                 num_genes_attr = 'num_genes',
+                 pop_size_attr = 'pop_size',
+                 rng=None):
+    """
+    This will simulate and return a tree with edges decorated with
+    population sizes and leaf nodes decorated by the number of genes
+    (samples or lineages) in each leaf.
+
+    If ``tree`` is given, then this is used as the tree to be decorated.
+    Otherwise, a Yule tree is generated based on the given taxon_namespace.
+    Either ``tree`` or ``taxon_namespace`` must be given.
+
+    The timing of the divergences can be controlled by specifying a vector of
+    ages, ``ages``. This should be sequences of values specifying the ages of the
+    first, second, third etc. divergence events, in terms of time from the
+    present, specified either in generations (if the ``pop_sizes`` vector is
+    given) or population units (if the pop_size vector is not given).
+    If an ages vector is given and there are less than num_pops-1 of these,
+    then an exception is raised.
+
+    The number of gene lineages per population can be specified through
+    the 'num_genes', which can either be an scalar integer or a list.
+    If it is an integer, all the population get the same number of
+    genes. If it is a list, it must be at least as long as num_pops.
+
+    The population sizes of each edge can be specified using the ``pop_sizes``
+    vector, which should be a sequence of values specifying the population
+    sizes of the edges in postorder. If the pop_size vector is given, then it
+    must be at least as long as there are branches on a tree, i.e. 2 * num_pops
+    + 1, otherwise it is an error.  The population size should be the effective
+    *haploid* population size; i.e., number of gene copies in the population: 2
+    * N in a diploid population of N individuals, or N in a haploid population
+    * of N individuals.
+
+    If ``pop_size`` is 1 or 0 or None, then edge lengths of the tree are in
+    haploid population units; i.e. where 1 unit of time equals 2N generations
+    for a diploid population of size N, or N generations for a haploid
+    population of size N. Otherwise edge lengths of the tree are in
+    generations.
+
+    This function first generates a tree using a pure-birth model with a
+    uniform birth rate of 1.0. If an ages vector is given, it then sweeps
+    through the internal nodes, assigning branch lengths such that the
+    divergence events correspond to the ages in the vector. If a population
+    sizes vector is given, it then visits all the edges in postorder, assigning
+    population sizes to the attribute with the name specified in
+    'pop_size_attr' (which is persisted as an annotation). During this, if an
+    ages vector was *not* given, then the edge lengths are multiplied by the
+    population size of the edge so the branch length units will be in
+    generations. If an ages vector was given, then it is assumed that the ages
+    are already in the proper scale/units.
+    """
+
+    # get our random number generator
+    if rng is None:
+        rng = GLOBAL_RNG # use the global rng by default
+
+    # get a yule tree
+    if not tree:
+        if taxon_namespace:
+            from dendropy.simulate import treesim
+            tree = treesim.uniform_pure_birth_tree(taxon_namespace=taxon_namespace,
+                                      rng=rng)
+        else:
+            raise Exception("Either tree or taxon namespace must be given")
+
+    num_pops = len(tree.leaf_nodes())
+
+    # basic idiot-checking
+    if ages is not None and len(ages) < (num_pops - 1):
+        msg = "Too few ages specified."
+        raise Exception(msg)
+    if num_genes is not None:
+        if isinstance(num_genes, list):
+            if len(num_genes) < num_pops:
+                msg = "Too few number of gene samples specified"
+                raise Exception(msg)
+            else:
+                samples = num_genes
+        else:
+            samples = [num_genes for tax in range(num_pops)]
+    else:
+        samples = None
+    if pop_sizes is not None and len(pop_sizes) < (2 * num_pops + 1):
+        msg = "Too few population sizes specified."
+        raise Exception(msg)
+
+    # set the ages
+    if ages is not None:
+
+        # get the internal nodes on the tree in reverse branching
+        # order, so that newest nodes are returned first
+        nodes = tree.nodes(filter_fn = lambda x : not x.is_leaf())
+        nodes.sort(key=lambda x: x.distance_from_root())
+        # assign the ages
+        for index, node in enumerate(nodes):
+            for child in node.child_nodes():
+                child.edge.length = ages[index] - child.distance_from_tip()
+
+    # set the gene samples
+    if samples is not None:
+        for index, leaf in enumerate(tree.leaf_node_iter()):
+            setattr(leaf, num_genes_attr, samples[index])
+            leaf.annotations.add_bound_attribute(num_genes_attr)
+
+    # set the population sizes
+    if pop_sizes is not None:
+        index = 0
+        for edge in tree.postorder_edge_iter():
+            setattr(edge, pop_size_attr, pop_sizes[index])
+            edge.annotations.add_bound_attribute(pop_size_attr)
+            if ages is None:
+                edge.length = edge.length * getattr(edge, pop_size_attr)
+            index = index + 1
+
+    return tree
+
diff --git a/dendropy/simulate/treesim.py b/dendropy/simulate/treesim.py
new file mode 100644
index 0000000..b670071
--- /dev/null
+++ b/dendropy/simulate/treesim.py
@@ -0,0 +1,51 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+This module provides a convenient interface that aggregates, wraps, and/or
+implements functions and classes that simulate trees under various
+models and processes. This module just exposes these function and classes under
+the ``dendropy.simulate.treesim`` namespace. The actual functions and classes
+are defined under the the appropriate model namespace in the ``dendropy.model``
+sub-package.
+"""
+
+import dendropy
+
+###############################################################################
+## Import tree generation functions
+
+from dendropy.model.birthdeath import birth_death_tree
+from dendropy.model.birthdeath import discrete_birth_death_tree
+from dendropy.model.birthdeath import uniform_pure_birth_tree
+from dendropy.model.coalescent import contained_coalescent_tree
+from dendropy.model.coalescent import pure_kingman_tree
+from dendropy.model.coalescent import mean_kingman_tree
+from dendropy.model.coalescent import constrained_kingman_tree
+from dendropy.model.treeshape import star_tree
+
+## Required for Sphix auto-documentation of this module
+__all__ = [
+    "birth_death_tree",
+    "discrete_birth_death_tree",
+    "contained_coalescent_tree",
+    "pure_kingman_tree",
+    "mean_kingman_tree",
+    "constrained_kingman_tree",
+    "star_tree",
+    ]
diff --git a/dendropy/test/__init__.py b/dendropy/test/__init__.py
new file mode 100644
index 0000000..32bd3d4
--- /dev/null
+++ b/dendropy/test/__init__.py
@@ -0,0 +1,26 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+DendroPy testing suite.
+"""
+
+import unittest
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/dendropy/test/__main__.py b/dendropy/test/__main__.py
new file mode 100644
index 0000000..b3f8058
--- /dev/null
+++ b/dendropy/test/__main__.py
@@ -0,0 +1,144 @@
+#! /usr/bin/env python
+
+###############################################################################
+##
+##  Copyright 2012 Jeet Sukumaran.
+##
+##  This program is free software; you can redistribute it and/or modify
+##  it under the terms of the GNU General Public License as published by
+##  the Free Software Foundation; either version 3 of the License, or
+##  (at your option) any later version.
+##
+##  This program is distributed in the hope that it will be useful,
+##  but WITHOUT ANY WARRANTY; without even the implied warranty of
+##  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+##  GNU General Public License for more details.
+##
+##  You should have received a copy of the GNU General Public License along
+##  with this program. If not, see <http://www.gnu.org/licenses/>.
+##
+###############################################################################
+
+import sys
+import os
+import argparse
+import collections
+import unittest
+from dendropy.utility import metavar
+from dendropy.utility import messaging
+sys.path.insert(0, os.path.dirname(__file__))
+from dendropy.test.support import dendropytest
+
+def main():
+    group_names = (
+        ("@all"       , ".*"),
+        ("@datamodel" , ".*_datamodel_.*"),
+        ("@dataio"    , ".*_dataio_.*"),
+        ("@newick"    , ".*_newick_.*"),
+        ("@tree"      , ".*_tree_.*"),
+        )
+    test_group_patterns = collections.OrderedDict(group_names)
+    test_group_names = list(test_group_patterns)
+    parser = argparse.ArgumentParser()
+    parser.add_argument("test_names",
+            metavar="TEST",
+            nargs="*",
+            help= "Name of test(s) to run. These can be (dot-)qualified module, test"
+            "case, or test name (e.g., 'test_module', 'test_module.TestCase1',"
+            "'test_module.TestCase1.test1') or special pre-defined groups of"
+            "tests (e.g., '@datamodel', '@dataio'). Type '--help-testgroups' for"
+            "a list of available groups.")
+    parser.add_argument("--help-testgroups",
+            action="store_true",
+            default=False,
+            help="Show list of available test groups and exit.")
+    parser.add_argument("--list-only",
+            action="store_true",
+            default=False,
+            help="Do not actually run tests: just print list of test module names and exit.")
+    parser.add_argument("-v", "--verbosity",
+            default=3,
+            type=int,
+            help="Messaging noisiness (default: %(default)s)")
+    parser.add_argument("--logging-level",
+            default=os.environ.get(metavar.LOGGING_LEVEL_ENVAR, "NOTSET"),
+            choices=["NOTSET", "DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"],
+            help="Test logging level (default: '%(default)s')")
+    parser.add_argument("-f", "--fail-fast",
+            action="store_true",
+            default=False,
+            help="Stop the test run on the first error or failure.")
+    parser.add_argument("-I", "--fail-incomplete",
+            action="store_true",
+            default=False,
+            help="Fail incomplete or partially-complete test stubs.")
+    args = parser.parse_args()
+
+    if args.help_testgroups:
+        out = sys.stdout
+        out.write("Available special test groups:\n")
+        for name in test_group_names:
+            out.write("  - {}\n".format(name))
+        sys.exit(0)
+
+    # Set logging level:
+    os.environ[metavar.LOGGING_LEVEL_ENVAR] = args.logging_level
+    _LOG = messaging.get_logger("dendropy")
+
+    # Set test specifications
+    if args.fail_incomplete:
+        os.environ[metavar.FAIL_INCOMPLETE_TESTS_ENVAR] = "1"
+
+    # get test modules
+    test_names = []
+    filter_patterns = []
+    for name in args.test_names:
+        if name is None:
+            continue
+        if name.startswith("@"):
+            try:
+                filter_patterns.append(test_group_patterns[name])
+            except KeyError:
+                sys.exit("Unrecognized test group name '{}'. Accepted names: {}".format(name, test_group_names))
+        else:
+            name = name.replace(os.sep, ".")
+            if name.endswith(".py"):
+                name = name[:-3]
+            if not name.startswith("dendropy.test."):
+                if name.startswith("test."):
+                    name = "dendropy." + name
+                else:
+                    name = "dendropy.test." + name
+            test_names.append(name)
+
+    if not test_names and not filter_patterns:
+        test_names = dendropytest.discover_test_module_paths() # get all
+    if filter_patterns:
+        test_names.extend(dendropytest.discover_test_module_paths(filter_patterns))
+    test_names = sorted(set(test_names))
+
+    # 0: nothing
+    # 1: errors and mishaps only + 0
+    # 2: warnings + 1
+    # 3: general messages + 2
+    if args.verbosity >= 3 or args.list_only:
+        if args.list_only:
+            out = sys.stdout
+        else:
+            out = sys.stderr
+        out.write("DendroPy tests to be run:\n")
+        for mp in test_names:
+            out.write(" + {}\n".format(mp))
+
+    if args.list_only:
+        sys.exit(0)
+
+    tests = unittest.defaultTestLoader.loadTestsFromNames(test_names)
+    test_suite = unittest.TestSuite(tests)
+    test_runner = unittest.TextTestRunner(verbosity=args.verbosity, failfast=args.fail_fast)
+    test_runner.run(test_suite)
+
+if __name__ == '__main__':
+    main()
+
+
diff --git a/dendropy/test/base_newick_test_cases.py b/dendropy/test/base_newick_test_cases.py
new file mode 100644
index 0000000..ef37ee1
--- /dev/null
+++ b/dendropy/test/base_newick_test_cases.py
@@ -0,0 +1,71 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+import dendropy
+from dendropy.test.support import standard_file_test_trees
+
+class NewickTreeListReaderTaxaManagementBaseTestCase(standard_file_test_trees.NewickTestTreesChecker):
+
+    def test_get(self):
+        tree_file_title = "dendropy-test-trees-n12-x2"
+        tree_filepath = self.schema_tree_filepaths[tree_file_title]
+        kwargs = {
+                "suppress_internal_node_taxa": self.__class__.suppress_internal_node_taxa,
+                "suppress_leaf_node_taxa": self.__class__.suppress_leaf_node_taxa,
+        }
+        with open(tree_filepath, "r") as src:
+            tree_string = src.read()
+            with open(tree_filepath, "r") as tree_stream:
+                approaches = (
+                        (dendropy.TreeList.get_from_path, tree_filepath),
+                        (dendropy.TreeList.get_from_stream, tree_stream),
+                        (dendropy.TreeList.get_from_string, tree_string),
+                        )
+                for method, src in approaches:
+                    tree_list = method(src,
+                            self.__class__.schema,
+                            **kwargs)
+                    self.verify_standard_trees(
+                            tree_list=tree_list,
+                            tree_file_title=tree_file_title)
+
+    def test_selective_taxa_read(self):
+        tree_file_title = "dendropy-test-trees-n12-x2"
+        tree_filepath = self.schema_tree_filepaths[tree_file_title]
+        kwargs = {
+            "suppress_internal_node_taxa": self.__class__.suppress_internal_node_taxa,
+            "suppress_leaf_node_taxa": self.__class__.suppress_leaf_node_taxa,
+        }
+        with open(tree_filepath, "r") as src:
+            tree_string = src.read()
+            with open(tree_filepath, "r") as tree_stream:
+                approaches = (
+                        ("read_from_path", tree_filepath),
+                        ("read_from_stream", tree_stream),
+                        ("read_from_string", tree_string),
+                        )
+                for method, src in approaches:
+                    tree_list = dendropy.TreeList()
+                    old_id = id(tree_list)
+                    f = getattr(tree_list, method)
+                    f(src, self.__class__.schema, **kwargs)
+                    new_id = id(tree_list)
+                    self.verify_standard_trees(
+                            tree_list=tree_list,
+                            tree_file_title=tree_file_title)
+
diff --git a/dendropy/test/base_standard_trees_parsing_test_cases.py b/dendropy/test/base_standard_trees_parsing_test_cases.py
new file mode 100644
index 0000000..d83b7f8
--- /dev/null
+++ b/dendropy/test/base_standard_trees_parsing_test_cases.py
@@ -0,0 +1,268 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+import random
+import dendropy
+from dendropy.test.support import standard_file_test_trees
+
+class StandardTreesParsingTestCase(standard_file_test_trees.StandardTestTreesChecker):
+
+    def test_default_get(self):
+        for tree_file_title in [
+            "dendropy-test-trees-multifurcating-rooted",
+            "dendropy-test-trees-multifurcating-unrooted",
+            "dendropy-test-trees-n10-rooted-treeshapes",
+            "dendropy-test-trees-n14-unrooted-treeshapes",
+                ]:
+            tree_filepath = self.schema_tree_filepaths[tree_file_title]
+            with open(tree_filepath, "r") as src:
+                tree_string = src.read()
+            with open(tree_filepath, "r") as tree_stream:
+                approaches = (
+                        {"path": tree_filepath},
+                        {"file": tree_stream},
+                        {"data": tree_string},
+                        )
+                for approach_kwargs in approaches:
+                    approach_kwargs["schema"] = self.__class__.schema
+                    tree_list = dendropy.TreeList.get(**approach_kwargs)
+                    self.verify_standard_trees(tree_list=tree_list,
+                            tree_file_title=tree_file_title)
+
+    def test_default_read(self):
+        preloaded_tree_file_title = "dendropy-test-trees-n33-unrooted-x10a"
+        preloaded_tree_reference = self.tree_references[preloaded_tree_file_title]
+        tree_file_title = "dendropy-test-trees-n33-unrooted-x10a"
+        tree_reference = self.tree_references[tree_file_title]
+        tree_filepath = self.schema_tree_filepaths[tree_file_title]
+        with open(tree_filepath, "r") as src:
+            tree_string = src.read()
+        with open(tree_filepath, "r") as tree_stream:
+            approaches = (
+                    {"path": tree_filepath},
+                    {"file": tree_stream},
+                    {"data": tree_string},
+                    )
+            for approach_kwargs in approaches:
+                # prepopulate
+                tree_list = dendropy.TreeList.get(
+                        path=self.schema_tree_filepaths[preloaded_tree_file_title],
+                        schema=self.__class__.schema)
+                # check to make sure trees were loaded
+                old_len = len(tree_list)
+                self.assertEqual(old_len, len(tree_list._trees))
+                self.assertEqual(old_len, preloaded_tree_reference["num_trees"])
+                self.verify_standard_trees(tree_list, preloaded_tree_file_title)
+
+                # load
+                old_id = id(tree_list)
+                approach_kwargs["schema"] = self.__class__.schema
+                trees_read = tree_list.read(**approach_kwargs)
+                new_id = id(tree_list)
+                self.assertEqual(old_id, new_id)
+
+                # make sure new trees added
+                new_len = len(tree_list)
+                self.assertEqual(new_len, len(tree_list._trees))
+                expected_number_of_trees = tree_reference["num_trees"]
+                self.assertEqual(old_len + expected_number_of_trees, new_len)
+                self.assertEqual(trees_read, expected_number_of_trees)
+
+                # check new trees
+                for tree_idx, tree in enumerate(tree_list[old_len:]):
+                    self.compare_to_reference_by_title_and_index(
+                            tree=tree,
+                            tree_file_title=tree_file_title,
+                            reference_tree_idx=tree_idx)
+
+                # make sure old ones still intact
+                for tree_idx, tree in enumerate(tree_list[:old_len]):
+                    self.compare_to_reference_by_title_and_index(
+                            tree=tree,
+                            tree_file_title=preloaded_tree_file_title,
+                            reference_tree_idx=tree_idx)
+
+    def test_tree_offset_get(self):
+        tree_file_title = "dendropy-test-trees-n33-unrooted-x100a"
+        tree_reference = self.tree_references[tree_file_title]
+        expected_number_of_trees = tree_reference["num_trees"]
+        tree_offsets = set([0, expected_number_of_trees-1, -1, -expected_number_of_trees])
+        while len(tree_offsets) < 8:
+            tree_offsets.add(random.randint(1, expected_number_of_trees-2))
+        while len(tree_offsets) < 12:
+            tree_offsets.add(random.randint(-expected_number_of_trees-2, -2))
+        tree_filepath = self.schema_tree_filepaths[tree_file_title]
+        with open(tree_filepath, "r") as src:
+            tree_string = src.read()
+        for tree_offset in tree_offsets:
+            with open(tree_filepath, "r") as tree_stream:
+                approaches = (
+                        (dendropy.TreeList.get_from_path, tree_filepath),
+                        (dendropy.TreeList.get_from_stream, tree_stream),
+                        (dendropy.TreeList.get_from_string, tree_string),
+                        )
+                for method, src in approaches:
+                    tree_list = method(
+                            src,
+                            self.__class__.schema,
+                            collection_offset=0,
+                            tree_offset=tree_offset)
+                    self.verify_standard_trees(
+                            tree_list=tree_list,
+                            tree_file_title=tree_file_title,
+                            tree_offset=tree_offset)
+
+    def test_tree_offset_read(self):
+        tree_file_title = "dendropy-test-trees-n33-unrooted-x100a"
+        tree_reference = self.tree_references[tree_file_title]
+        expected_number_of_trees = tree_reference["num_trees"]
+        tree_offsets = set([0, expected_number_of_trees-1, -1, -expected_number_of_trees])
+        while len(tree_offsets) < 8:
+            tree_offsets.add(random.randint(1, expected_number_of_trees-2))
+        while len(tree_offsets) < 12:
+            tree_offsets.add(random.randint(-expected_number_of_trees-2, -2))
+        tree_filepath = self.schema_tree_filepaths[tree_file_title]
+        with open(tree_filepath, "r") as src:
+            tree_string = src.read()
+        for tree_offset in tree_offsets:
+            with open(tree_filepath, "r") as tree_stream:
+                approaches = (
+                        ("read_from_path", tree_filepath),
+                        ("read_from_stream", tree_stream),
+                        ("read_from_string", tree_string),
+                        )
+                for method, src in approaches:
+                    tree_list = dendropy.TreeList()
+                    f = getattr(tree_list, method)
+                    trees_read = f(src,
+                            self.__class__.schema,
+                            # collection_offset=0,
+                            tree_offset=tree_offset)
+                    self.verify_standard_trees(
+                            tree_list=tree_list,
+                            tree_file_title=tree_file_title,
+                            tree_offset=tree_offset)
+
+    def test_out_of_range_tree_offset_get(self):
+        tree_file_title = 'dendropy-test-trees-n33-unrooted-x10a'
+        tree_filepath = self.schema_tree_filepaths[tree_file_title]
+        tree_reference = self.tree_references[tree_file_title]
+        expected_number_of_trees = tree_reference["num_trees"]
+        with open(tree_filepath, "r") as src:
+            tree_string = src.read()
+        with open(tree_filepath, "r") as tree_stream:
+            approaches = (
+                    (dendropy.TreeList.get_from_path, tree_filepath),
+                    (dendropy.TreeList.get_from_stream, tree_stream),
+                    (dendropy.TreeList.get_from_string, tree_string),
+                    )
+            for method, src in approaches:
+                with self.assertRaises(IndexError):
+                    method(src, self.__class__.schema, collection_offset=0, tree_offset=expected_number_of_trees)
+
+    def test_out_of_range_tree_offset_read(self):
+        tree_file_title = 'dendropy-test-trees-n33-unrooted-x10a'
+        tree_filepath = self.schema_tree_filepaths[tree_file_title]
+        tree_reference = self.tree_references[tree_file_title]
+        expected_number_of_trees = tree_reference["num_trees"]
+        with open(tree_filepath, "r") as src:
+            tree_string = src.read()
+        with open(tree_filepath, "r") as tree_stream:
+            approaches = (
+                    ("read_from_path", tree_filepath),
+                    ("read_from_stream", tree_stream),
+                    ("read_from_string", tree_string),
+                    )
+            for method, src in approaches:
+                tree_list = dendropy.TreeList()
+                f = getattr(tree_list, method)
+                with self.assertRaises(IndexError):
+                    f(src, self.__class__.schema, collection_offset=0, tree_offset=expected_number_of_trees)
+
+    def test_out_of_range_collection_offset_get(self):
+        tree_file_title = 'dendropy-test-trees-n33-unrooted-x10a'
+        tree_filepath = self.schema_tree_filepaths[tree_file_title]
+        with open(tree_filepath, "r") as src:
+            tree_string = src.read()
+        with open(tree_filepath, "r") as tree_stream:
+            approaches = (
+                    (dendropy.TreeList.get_from_path, tree_filepath),
+                    (dendropy.TreeList.get_from_stream, tree_stream),
+                    (dendropy.TreeList.get_from_string, tree_string),
+                    )
+            for method, src in approaches:
+                with self.assertRaises(IndexError):
+                    method(src, self.__class__.schema, collection_offset=1, tree_offset=0)
+
+    def test_out_of_range_collection_offset_read(self):
+        tree_file_title = 'dendropy-test-trees-n33-unrooted-x10a'
+        tree_filepath = self.schema_tree_filepaths[tree_file_title]
+        with open(tree_filepath, "r") as src:
+            tree_string = src.read()
+        with open(tree_filepath, "r") as tree_stream:
+            approaches = (
+                    ("read_from_path", tree_filepath),
+                    ("read_from_stream", tree_stream),
+                    ("read_from_string", tree_string),
+                    )
+            for method, src in approaches:
+                tree_list = dendropy.TreeList()
+                f = getattr(tree_list, method)
+                with self.assertRaises(IndexError):
+                    f(src, self.__class__.schema, collection_offset=1, tree_offset=0)
+
+    def test_unsupported_keyword_arguments_get(self):
+        tree_file_title = 'dendropy-test-trees-n12-x2'
+        tree_filepath = self.schema_tree_filepaths[tree_file_title]
+        with open(tree_filepath, "r") as src:
+            tree_string = src.read()
+        with open(tree_filepath, "r") as tree_stream:
+            approaches = (
+                    (dendropy.TreeList.get_from_path, tree_filepath),
+                    (dendropy.TreeList.get_from_stream, tree_stream),
+                    (dendropy.TreeList.get_from_string, tree_string),
+                    )
+            for method, src in approaches:
+                with self.assertRaises(TypeError):
+                    method(src,
+                            self.__class__.schema,
+                            suppress_internal_taxa=True,  # should be suppress_internal_node_taxa
+                            gobbledegook=False,
+                            )
+
+    def test_unsupported_keyword_arguments_read(self):
+        tree_file_title = 'dendropy-test-trees-n12-x2'
+        tree_filepath = self.schema_tree_filepaths[tree_file_title]
+        with open(tree_filepath, "r") as src:
+            tree_string = src.read()
+        with open(tree_filepath, "r") as tree_stream:
+            approaches = (
+                    ("read_from_path", tree_filepath),
+                    ("read_from_stream", tree_stream),
+                    ("read_from_string", tree_string),
+                    )
+            for method, src in approaches:
+                tree_list = dendropy.TreeList()
+                f = getattr(tree_list, method)
+                with self.assertRaises(TypeError):
+                    f(src,
+                      self.__class__.schema,
+                      suppress_internal_taxa=True,  # should be suppress_internal_node_taxa
+                      gobbledegook=False,
+                    )
+
diff --git a/dendropy/test/benchmark/__init__.py b/dendropy/test/benchmark/__init__.py
new file mode 100644
index 0000000..442f116
--- /dev/null
+++ b/dendropy/test/benchmark/__init__.py
@@ -0,0 +1,18 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
diff --git a/dendropy/test/benchmark/benchmark_newick_light_tree_parser.py b/dendropy/test/benchmark/benchmark_newick_light_tree_parser.py
new file mode 100644
index 0000000..9179b13
--- /dev/null
+++ b/dendropy/test/benchmark/benchmark_newick_light_tree_parser.py
@@ -0,0 +1,205 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Benchmarking NEWICK tree parsing using a light tree data model.
+"""
+
+import sys
+import os
+import timeit
+import argparse
+try:
+    from StringIO import StringIO # Python 2 legacy support: StringIO in this module is the one needed (not io)
+except ImportError:
+    from io import StringIO # Python 3
+from dendropy.utility import messaging
+from dendropy.test.support import pathmap
+
+from dendropy.dataio import nexusprocessing
+from dendropy.dataio import newickreader
+
+TREE_FILENAMES = [
+    "APG_Angiosperms.newick",
+    "GEBA.tree.newick",
+    "feb032009.trees.newick",
+    "Bininda-emonds_2007_mammals.newick",
+    "Jetz_et_al_2012_Aves.sample.tree.newick",
+    "Smith_2001_angiosperms.newick",
+        ]
+
+class Tree(object):
+
+    def __init__(self):
+        self.seed_node = Node()
+
+    def node_factory(self):
+        return Node()
+
+class Node(object):
+
+    def __init__(self,
+            taxon=None,
+            label=None,
+            edge=None,
+            edge_length=None):
+        self.age = None
+        self._edge = None
+        self._child_nodes = []
+        self._parent_node = None
+        if edge is not None:
+            self.edge = edge
+        else:
+            self.edge = Edge(head_node=self,
+                    length=edge_length)
+        self.label = label
+
+    def add_child(self, node):
+        self._child_nodes.append(node)
+
+    ###########################################################################
+    ## Hacked-in NEWICK representation.
+
+    def as_newick_string(self, **kwargs):
+        """
+        This returns the Node as a NEWICK statement according to the given
+        formatting rules. This should be used for debugging purposes only.
+        For production purposes, use the the full-fledged 'as_string()'
+        method of the object.
+        """
+        out = io.StringIO()
+        self.write_newick(out, **kwargs)
+        return out.getvalue()
+
+    def write_newick(self, out, **kwargs):
+        """
+        This returns the Node as a NEWICK statement according to the given
+        formatting rules. This should be used for debugging purposes only.  For
+        production purposes, use the the full-fledged 'write_to_stream()'
+        method of the object.
+        """
+        child_nodes = self._child_nodes
+        if child_nodes:
+            out.write('(')
+            f_child = child_nodes[0]
+            for child in child_nodes:
+                if child is not f_child:
+                    out.write(',')
+                child.write_newick(out, **kwargs)
+            out.write(')')
+        if self.label is not None:
+            out.write(self.label)
+        if self.edge.length is not None:
+            out.write(":{}".format(self.edge.length))
+
+class Edge(object):
+
+    def __init__(self,
+            tail_node=None,
+            head_node=None,
+            length=None,
+            rootedge=False,
+            label=None):
+        self.tail_node = tail_node
+        self.head_node = head_node
+        self.rootedge = rootedge
+        self.length = length
+
+def tree_parsing_fn_factory(src_paths, verbose=False):
+    def f():
+        for src_path in src_paths:
+            if verbose:
+                sys.stderr.write("  .. {}\n".format(src_path))
+            src = open(src_path, "rU")
+            nt = nexusprocessing.NexusTokenizer(src)
+            np = newickreader.NewickTreeParser()
+            while True:
+                t = np.parse_tree_statement(nt, tree_factory=Tree)
+                if t is None:
+                    break
+    return f
+
+def main():
+    parser = argparse.ArgumentParser(description=__doc__)
+    parser.add_argument("-f", "--target-file",
+            type=str,
+            dest="target_files",
+            default=[],
+            action="append",
+            help="""Path to file to be tokenized; option may be specified multiple times for multiple files. If not specified, default target set will be used.""")
+    parser.add_argument("-r", "--repeat",
+            type=int,
+            default=10,
+            help="Repeat each tokenization this number of times (default=%(default)s).")
+    parser.add_argument("--delimited_output",
+            action="store_true",
+            default=False,
+            help="Output in tab-delimited instead of aligned format")
+    parser.add_argument("--delimited-output",
+            action="store_true",
+            default=False,
+            help="Output in tab-delimited instead of aligned format")
+    args = parser.parse_args()
+
+    messenger = messaging.ConsoleMessenger(name="-benchmark")
+
+    src_descs = []
+    src_paths = []
+    results = []
+
+    if args.target_files:
+        for f in args.target_files:
+            ff = os.path.expanduser(os.path.expandvars(f))
+            src_paths.append(ff)
+            src_descs.append( ("User", f) )
+    else:
+        messenger.info("No sources specified: adding default benchmark target set")
+        for f in TREE_FILENAMES:
+            ff = pathmap.tree_source_path(f)
+            src_paths.append(ff)
+            src_descs.append( ("Default", f) )
+
+    for src_path, src_desc in zip(src_paths, src_descs):
+        messenger.info("Processing: '{}'".format(src_desc[1]))
+        t = timeit.Timer(tree_parsing_fn_factory([src_path]))
+        result = min(t.repeat(args.repeat, 1))
+        messenger.info("Best time (of {} repetions): {:.10f} seconds".format(args.repeat, result))
+        results.append(result)
+
+    messenger.info("Benchmarking complete: all files processed")
+
+    if args.delimited_output:
+        result_template = "{}\t{}\t{:.10f}\n"
+        header_template = "{}\t{}\t{}\n"
+    else:
+        max_len1 = max(len(r[0]) for r in src_descs)
+        max_len2 = max(len(r[1]) for r in src_descs)
+        col1 = "{{:{}}}".format(max_len1)
+        col2 = "{{:{}}}".format(max_len2)
+        result_template = "[" + col1 + "]  " + col2 + "  {:.10f}\n"
+        header_template = col1 + "    " + col2 + "  {}\n"
+    sys.stdout.write(header_template.format("Type", "File", "Seconds"))
+    for result, src_desc in zip(results, src_descs):
+        sys.stdout.write(result_template.format(src_desc[0], src_desc[1], result))
+
+if __name__ == "__main__":
+    main()
+
+
+
+
diff --git a/dendropy/test/benchmark/benchmark_newick_tree_parser.py b/dendropy/test/benchmark/benchmark_newick_tree_parser.py
new file mode 100644
index 0000000..3340cf6
--- /dev/null
+++ b/dendropy/test/benchmark/benchmark_newick_tree_parser.py
@@ -0,0 +1,123 @@
+
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Benchmarking tree parsing.
+"""
+
+import sys
+import os
+import timeit
+import argparse
+try:
+    from StringIO import StringIO # Python 2 legacy support: StringIO in this module is the one needed (not io)
+except ImportError:
+    from io import StringIO # Python 3
+from dendropy.utility import messaging
+from dendropy.test.support import pathmap
+
+import dendropy
+
+TREE_FILENAMES = [
+    "APG_Angiosperms.newick",
+    "GEBA.tree.newick",
+    "feb032009.trees.newick",
+    "Bininda-emonds_2007_mammals.newick",
+    "Jetz_et_al_2012_Aves.sample.tree.newick",
+    "Smith_2001_angiosperms.newick",
+        ]
+
+def tree_parsing_fn_factory(src_paths, verbose=False):
+    def f():
+        trees = dendropy.TreeList()
+        for src_path in src_paths:
+            if verbose:
+                sys.stderr.write("  .. {}\n".format(src_path))
+            trees.read_from_path(src_path, "newick")
+    return f
+
+def main():
+    parser = argparse.ArgumentParser(description=__doc__)
+    parser.add_argument("-f", "--target-file",
+            type=str,
+            dest="target_files",
+            default=[],
+            action="append",
+            help="""Path to file to be tokenized; option may be specified multiple times for multiple files. If not specified, default target set will be used.""")
+    parser.add_argument("-r", "--repeat",
+            type=int,
+            default=10,
+            help="Repeat each tokenization this number of times (default=%(default)s).")
+    parser.add_argument("--delimited_output",
+            action="store_true",
+            default=False,
+            help="Output in tab-delimited instead of aligned format")
+    parser.add_argument("--delimited-output",
+            action="store_true",
+            default=False,
+            help="Output in tab-delimited instead of aligned format")
+    args = parser.parse_args()
+
+    messenger = messaging.ConsoleMessenger(name="-benchmark")
+
+    src_descs = []
+    src_paths = []
+    results = []
+
+    if args.target_files:
+        for f in args.target_files:
+            ff = os.path.expanduser(os.path.expandvars(f))
+            src_paths.append(ff)
+            src_descs.append( ("User", f) )
+    else:
+        messenger.info("No sources specified: adding default benchmark target set")
+        for f in TREE_FILENAMES:
+            ff = pathmap.tree_source_path(f)
+            src_paths.append(ff)
+            src_descs.append( ("Default", f) )
+
+    for src_path, src_desc in zip(src_paths, src_descs):
+        messenger.info("Processing: '{}'".format(src_desc[1]))
+        t = timeit.Timer(tree_parsing_fn_factory([src_path]))
+        result = min(t.repeat(args.repeat, 1))
+        messenger.info("Best time (of {} repetions): {:.10f} seconds".format(args.repeat, result))
+        results.append(result)
+
+    messenger.info("Benchmarking complete: all files processed")
+
+    if args.delimited_output:
+        result_template = "{}\t{}\t{:.10f}\n"
+        header_template = "{}\t{}\t{}\n"
+    else:
+        max_len1 = max(len(r[0]) for r in src_descs)
+        max_len2 = max(len(r[1]) for r in src_descs)
+        col1 = "{{:{}}}".format(max_len1)
+        col2 = "{{:{}}}".format(max_len2)
+        result_template = "[" + col1 + "]  " + col2 + "  {:.10f}\n"
+        header_template = col1 + "    " + col2 + "  {}\n"
+    sys.stdout.write(header_template.format("Type", "File", "Seconds"))
+    for result, src_desc in zip(results, src_descs):
+        sys.stdout.write(result_template.format(src_desc[0], src_desc[1], result))
+
+if __name__ == "__main__":
+    main()
+
+
+
+
diff --git a/dendropy/test/benchmark/benchmark_tokenizer.py b/dendropy/test/benchmark/benchmark_tokenizer.py
new file mode 100644
index 0000000..2ed57bf
--- /dev/null
+++ b/dendropy/test/benchmark/benchmark_tokenizer.py
@@ -0,0 +1,144 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Benchmarking (raw) NEXUS tokenizer performance.
+"""
+
+import sys
+import os
+import timeit
+import argparse
+
+from dendropy.utility import messaging
+from dendropy.test.support import pathmap
+
+from dendropy.dataio import nexusprocessing
+
+TREE_FILENAMES = [
+    "APG_Angiosperms.nexus",
+    "APG_Angiosperms.newick",
+    "GEBA.tree.nexus",
+    "GEBA.tree.newick",
+    "feb032009.trees.nexus",
+    "feb032009.trees.newick",
+    "Bininda-emonds_2007_mammals.nexus",
+    "Bininda-emonds_2007_mammals.newick",
+    "Jetz_et_al_2012_Aves.sample.tree.nexus",
+    "Jetz_et_al_2012_Aves.sample.tree.newick",
+    "Smith_2001_angiosperms.nexus",
+    "Smith_2001_angiosperms.newick",
+        ]
+CHAR_FILENAMES = [
+    "actinopterygii.chars.nexus",
+    "angiosperms.chars.nexus",
+        ]
+
+def tokenizing_fn_factory(src_paths, verbose=False):
+    def f():
+        for src_path in src_paths:
+            if verbose:
+                sys.stderr.write("  .. {}\n".format(src_path))
+            src = open(src_path, "rU")
+            nt = nexusprocessing.NexusTokenizer(src)
+            for token in nt:
+                pass
+    return f
+
+def main():
+    parser = argparse.ArgumentParser(description=__doc__)
+    parser.add_argument("-f", "--target-file",
+            type=str,
+            dest="target_files",
+            default=[],
+            action="append",
+            help="Path to file to be tokenized; option may be specified multiple times for multiple files.")
+    parser.add_argument("-t", "--target-type",
+            type=str,
+            dest="target_types",
+            default=[],
+            choices=["trees", "chars", "all"],
+            action="append",
+            help="Input data file types (default='all' if '-f'/'--file' argument not given); option may be specified multiple times.")
+    parser.add_argument("-r", "--repeat",
+            type=int,
+            default=10,
+            help="Repeat each tokenization this number of times (default=%(default)s).")
+    parser.add_argument("--delimited-output",
+            action="store_true",
+            default=False,
+            help="Output in tab-delimited instead of aligned format")
+    args = parser.parse_args()
+
+    messenger = messaging.ConsoleMessenger(name="-benchmark")
+
+    src_descs = []
+    src_paths = []
+    results = []
+
+    if args.target_files:
+        for f in args.target_files:
+            ff = os.path.expanduser(os.path.expandvars(f))
+            src_paths.append(ff)
+            src_descs.append( ("User", f) )
+
+    if not args.target_types and not args.target_files:
+        messenger.info("No sources specified: adding default benchmark target set")
+        args.target_types = ["all"]
+
+    if "all" in args.target_types or "trees" in args.target_types:
+        for f in TREE_FILENAMES:
+            ff = pathmap.tree_source_path(f)
+            src_paths.append(ff)
+            src_descs.append( ("Trees", f) )
+
+    if "all" in args.target_types or "chars" in args.target_types:
+        for f in CHAR_FILENAMES:
+            ff = pathmap.char_source_path(f)
+            src_paths.append(ff)
+            src_descs.append( ("Alignment", f) )
+
+    for src_path, src_desc in zip(src_paths, src_descs):
+        messenger.info("Processing: '{}'".format(src_desc[1]))
+        t = timeit.Timer(tokenizing_fn_factory([src_path]))
+        result = min(t.repeat(args.repeat, 1))
+        messenger.info("Best time (of {} repetions): {:.10f} seconds".format(args.repeat, result))
+        results.append(result)
+
+    messenger.info("Benchmarking complete: all files processed")
+
+    if args.delimited_output:
+        result_template = "{}\t{}\t{:.10f}\n"
+        header_template = "{}\t{}\t{}\n"
+    else:
+        max_len1 = max(len(r[0]) for r in src_descs)
+        max_len2 = max(len(r[1]) for r in src_descs)
+        col1 = "{{:{}}}".format(max_len1)
+        col2 = "{{:{}}}".format(max_len2)
+        result_template = "[" + col1 + "]  " + col2 + "  {:.10f}\n"
+        header_template = col1 + "    " + col2 + "  {}\n"
+    sys.stdout.write(header_template.format("Type", "File", "Seconds"))
+    for result, src_desc in zip(results, src_descs):
+        sys.stdout.write(result_template.format(src_desc[0], src_desc[1], result))
+
+if __name__ == "__main__":
+    main()
+
+
+
+
diff --git a/dendropy/test/support/__init__.py b/dendropy/test/support/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/dendropy/test/support/compare_and_validate.py b/dendropy/test/support/compare_and_validate.py
new file mode 100644
index 0000000..89990ac
--- /dev/null
+++ b/dendropy/test/support/compare_and_validate.py
@@ -0,0 +1,286 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+try:
+    from StringIO import StringIO # Python 2 legacy support: StringIO in this module is the one needed (not io)
+except ImportError:
+    from io import StringIO # Python 3
+from . import pathmap
+from dendropy.datamodel import basemodel
+from dendropy.datamodel import charmatrixmodel
+
+class ValidateWriteable(object):
+
+    def write_out_validate_equal_and_return(self,
+            writeable,
+            schema,
+            kwargs):
+        self.maxDiff = None
+        sio = StringIO()
+        writeable.write(file=sio, schema=schema, **kwargs)
+        s0 = sio.getvalue()
+        s1 = writeable.as_string(schema=schema, **kwargs)
+        self.assertEqual(s0, s1)
+        with pathmap.SandboxedFile() as tempf:
+            writeable.write(path=tempf.name, schema=schema, **kwargs)
+            tempf.flush()
+            tempf.close()
+            with open(tempf.name, "r") as src:
+                s2 = src.read()
+        self.assertEqual(s0, s2)
+        return s0
+
+class ValidateReadable(object):
+
+    def get_from(self,
+            object_type,
+            source_path,
+            schema,
+            kwargs):
+        results = []
+        with open(source_path, "r") as s0:
+            obj0 = object_type.get(stream=s0, schema=schema, **kwargs)
+            results.append(obj0)
+        obj1 = object_type.get(path=source_path, schema=schema, **kwargs)
+        results.append(obj1)
+        with open(source_path, "r") as s0:
+            s = s0.read()
+            obj2 = object_type.get(data=s, schema=schema, **kwargs)
+        results.append(obj2)
+        return results
+
+    def read_from(self,
+            object_factory,
+            source_path,
+            schema,
+            kwargs):
+        results = []
+        obj0 = object_factory()
+        with open(source_path, "r") as s0:
+            obj0.read(stream=s0, schema=schema, **kwargs)
+            results.append(obj0)
+        obj1 = object_factory()
+        obj1.read(path=source_path, schema=schema, **kwargs)
+        results.append(obj1)
+        obj2 = object_factory()
+        with open(source_path, "r") as s0:
+            s = s0.read()
+            obj2.read(value=s, schema=schema, **kwargs)
+        results.append(obj2)
+        return results
+
+class Comparator(object):
+
+    def compare_distinct_taxon(self,
+            x1, x2,
+            taxon_namespace_scoped=True,
+            compare_annotations=True):
+        if taxon_namespace_scoped:
+            self.assertIs(x1, x2)
+        else:
+            if x1 is None or x2 is None:
+                self.assertIs(x1, None)
+                self.assertIs(x2, None)
+            else:
+                self.assertIsNot(x1, x2)
+                self.assertEqual(x1.label, x2.label)
+                if compare_annotations:
+                    self.compare_distinct_annotables(x1, x2)
+
+    def compare_distinct_taxon_namespace(self,
+            x1, x2,
+            taxon_namespace_scoped=True,
+            compare_annotations=True,):
+        if taxon_namespace_scoped:
+            self.assertIs(x1, x2)
+        else:
+            if x1 is None or x2 is None:
+                self.assertIs(x1, None)
+                self.assertIs(x2, None)
+            else:
+                self.assertIsNot(x1, x2)
+                self.assertEqual(x1.label, x2.label)
+                self.assertEqual(len(x1._taxa), len(x2._taxa))
+                for t1, t2 in zip(x1._taxa, x2._taxa):
+                    self.compare_distinct_taxon(t1, t2,
+                            taxon_namespace_scoped=taxon_namespace_scoped,
+                            compare_annotations=compare_annotations)
+                if compare_annotations:
+                    self.compare_distinct_annotables(x1, x2)
+
+    def compare_distinct_sequences(self,
+            x1, x2,
+            compare_annotations=True):
+        self.assertIsNot(x1, x2)
+        self.assertEqual(len(x1), len(x2))
+        for c1, c2 in zip(x1, x2):
+            self.assertEqual(c1, c2)
+        if compare_annotations:
+            self.compare_distinct_annotables(x1, x2)
+
+    def compare_distinct_char_matrix(self,
+            x1, x2,
+            taxon_namespace_scoped=True,
+            compare_matrix_annotations=True,
+            compare_sequence_annotations=True,
+            compare_taxon_annotations=True):
+        self.assertIsNot(x1, x2)
+        self.assertEqual(len(x1), len(x2))
+        self.compare_distinct_taxon_namespace(x1.taxon_namespace, x2.taxon_namespace,
+                taxon_namespace_scoped=taxon_namespace_scoped,
+                compare_annotations=compare_taxon_annotations,
+                )
+        self.assertEqual(x1.label, x2.label)
+        self.assertEqual(x1.data_type, x2.data_type)
+        if isinstance(x1, charmatrixmodel.DiscreteCharacterMatrix):
+            # print(x1.data_type, x1.datatype_alphabet)
+            self.assertEqual(len(x1.state_alphabets), len(x2.state_alphabets))
+            for sa1, sa2 in zip(x1.state_alphabets, x2.state_alphabets):
+                self.assertIs(sa1, sa2)
+            self.assertIs(x1._default_state_alphabet, x2._default_state_alphabet)
+        for t1, t2 in zip(x1, x2):
+            if taxon_namespace_scoped:
+                self.assertIs(t1, t2)
+            else:
+                self.assertIsNot(t1, t2)
+            s1 = x1[t1]
+            s2 = x2[t2]
+            self.compare_distinct_sequences(s1, s2,
+                    compare_annotations=compare_sequence_annotations)
+        if compare_matrix_annotations:
+            self.compare_distinct_annotables(x1, x2)
+
+    def compare_distinct_tree_list(self,
+            x1, x2,
+            taxon_namespace_scoped=True,
+            compare_tree_annotations=True,
+            compare_taxon_annotations=True):
+        self.assertIsNot(x1, x2)
+        self.assertEqual(len(x1), len(x2))
+        self.compare_distinct_taxon_namespace(x1.taxon_namespace, x2.taxon_namespace,
+                taxon_namespace_scoped=taxon_namespace_scoped,
+                compare_annotations=compare_taxon_annotations,
+                )
+        self.assertEqual(x1.label, x2.label)
+        for t1, t2 in zip(x1, x2):
+            self.assertIsNot(t1, t2)
+            self.compare_distinct_trees(t1, t2,
+                    taxon_namespace_scoped=taxon_namespace_scoped,
+                    compare_tree_annotations=compare_tree_annotations,
+                    compare_taxon_annotations=compare_taxon_annotations)
+
+    def compare_distinct_trees(self,
+            x1, x2,
+            taxon_namespace_scoped=True,
+            compare_tree_annotations=True,
+            compare_taxon_annotations=True):
+        self.assertIsNot(x1, x2)
+        self.assertEqual(x1.label, x2.label)
+        self.compare_distinct_taxon_namespace(x1.taxon_namespace, x2.taxon_namespace,
+                taxon_namespace_scoped=taxon_namespace_scoped,
+                compare_annotations=compare_taxon_annotations,
+                )
+        self.compare_distinct_nodes(x1.seed_node, x2.seed_node,
+                taxon_namespace_scoped=taxon_namespace_scoped,
+                compare_tree_annotations=compare_tree_annotations,
+                compare_taxon_annotations=compare_taxon_annotations)
+
+    def compare_distinct_nodes(self,
+            x1, x2,
+            taxon_namespace_scoped=True,
+            compare_tree_annotations=True,
+            compare_taxon_annotations=True,
+            check_children=True):
+        self.assertIsNot(x1, x2)
+        self.assertEqual(x1.label, x2.label)
+        taxon1 = x1.taxon
+        taxon2 = x2.taxon
+        self.compare_distinct_taxon(x1.taxon, x2.taxon,
+                taxon_namespace_scoped=taxon_namespace_scoped,
+                compare_annotations=compare_taxon_annotations)
+        self.assertIsNot(x1.edge, x2.edge)
+        self.assertEqual(x1.edge.label, x2.edge.label)
+        self.assertEqual(x1.edge.length, x2.edge.length)
+        self.assertIs(x1.edge.head_node, x1)
+        self.assertIs(x2.edge.head_node, x2)
+        if x1._parent_node is None or x2._parent_node is None:
+            self.assertIs(x1._parent_node, None)
+            self.assertIs(x2._parent_node, None)
+            self.assertIs(x1.edge.tail_node, None)
+            self.assertIs(x2.edge.tail_node, None)
+        else:
+            self.compare_distinct_nodes(x1._parent_node, x2._parent_node,
+                    taxon_namespace_scoped=taxon_namespace_scoped,
+                    compare_tree_annotations=compare_tree_annotations,
+                    compare_taxon_annotations=compare_taxon_annotations,
+                    check_children=False)
+            self.assertIn(x1, x1._parent_node._child_nodes)
+            self.assertNotIn(x1, x2._parent_node._child_nodes)
+            self.assertIn(x2, x2._parent_node._child_nodes)
+            self.assertNotIn(x2, x1._parent_node._child_nodes)
+        if check_children:
+            self.assertEqual(len(x1._child_nodes), len(x2._child_nodes))
+            for c1, c2 in zip(x1._child_nodes, x2._child_nodes):
+                self.compare_distinct_nodes(c1, c2,
+                        taxon_namespace_scoped=taxon_namespace_scoped,
+                        compare_tree_annotations=compare_tree_annotations,
+                        compare_taxon_annotations=compare_taxon_annotations,
+                        check_children=True)
+        if compare_tree_annotations:
+            self.compare_distinct_annotables(x1, x2)
+            self.compare_distinct_annotables(x1.edge, x2.edge)
+
+    def compare_distinct_annotables(self, x1, x2):
+        self.assertIsNot(x1, x2)
+        if not x1.has_annotations:
+            self.assertTrue( (not hasattr(x1, "_annotations")) or len(x1._annotations) == 0 )
+            self.assertFalse(x2.has_annotations)
+            self.assertTrue( (not hasattr(x2, "_annotations")) or len(x2._annotations) == 0 )
+            return
+        self.assertTrue( hasattr(x1, "_annotations") and len(x1._annotations) > 0 )
+        self.assertTrue(x2.has_annotations)
+        self.assertTrue( hasattr(x2, "_annotations") and len(x2._annotations) > 0 )
+        self.assertIs(x1._annotations.target, x1)
+        self.assertIs(x2._annotations.target, x2)
+        self.assertIsNot(x1._annotations, x2._annotations)
+        self.assertEqual(len(x1._annotations), len(x2._annotations))
+        for a1, a2 in zip(x1._annotations, x2._annotations):
+            self.assertIsNot(a1, a2)
+            if a1.is_attribute:
+                self.assertTrue(a2.is_attribute)
+                self.assertEqual(a1._value[1], a2._value[1])
+            else:
+                self.assertEqual(a1._value, a2._value)
+            for k in a1.__dict__:
+                if k == "_value":
+                    continue
+                self.assertIn(k, a2.__dict__)
+                v1 = a1.__dict__[k]
+                v2 = a2.__dict__[k]
+                if isinstance(v1, basemodel.DataObject):
+                    self.assertTrue(isinstance(v2, basemodel.DataObject))
+                    self.assertIsNot(v1, v2)
+                elif isinstance(v1, basemodel.AnnotationSet):
+                    self.assertTrue(isinstance(v2, basemodel.AnnotationSet))
+                    self.assertIs(v1.target, a1)
+                    self.assertIs(v2.target, a2)
+                    for s1, s2 in zip(v1, v2):
+                        self.compare_distinct_annotables(s1, s2)
+                else:
+                    self.assertEqual(v1, v2)
+                self.compare_distinct_annotables(a1, a2)
diff --git a/dendropy/test/support/coverage_analysis.py b/dendropy/test/support/coverage_analysis.py
new file mode 100644
index 0000000..d155b2c
--- /dev/null
+++ b/dendropy/test/support/coverage_analysis.py
@@ -0,0 +1,123 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Support for coverage analysis.
+"""
+
+import unittest
+import shutil
+import sys
+from optparse import OptionParser
+from dendropy.utility import messaging
+_LOG = messaging.get_logger(__name__)
+
+DENDROPY_COVERAGE_ANALYSIS_AVAILABLE = False
+try:
+    from setuptools import Command
+except ImportError:
+    _LOG.warn("setuptools.Command could not be imported: setuptools extensions not available")
+else:
+    try:
+        import coverage
+    except ImportError:
+        _LOG.warn("coverage could not be imported: test coverage analysis not available")
+    else:
+        _LOG.info("coverage imported successfully: test coverage analysis available")
+        DENDROPY_COVERAGE_ANALYSIS_AVAILABLE = True
+
+        from dendropy.test.support.dendropytest import get_test_suite
+        from dendropy.test.support import pathmap
+
+        class CoverageAnalysis(Command):
+            """
+            Code coverage analysis command.
+            """
+
+            description = "run test coverage analysis"
+            user_options = [
+                ('erase', None, "remove all existing coverage results"),
+                ('branch', 'b', 'measure branch coverage in addition to statement coverage'),
+                ('test-module=', 't', "explicitly specify a module to test (e.g. 'dendropy.test.test_containers')"),
+                ('no-annotate', None, "do not create annotated source code files"),
+                ('no-html', None, "do not create HTML report files"),
+            ]
+
+            def initialize_options(self):
+                """
+                Initialize options to default values.
+                """
+                self.test_module = None
+                self.branch = False
+                self.erase = False
+                self.no_annotate = False
+                self.no_html = False
+                self.omit_prefixes = ['dendropy/test']
+
+            def finalize_options(self):
+                pass
+
+            def run(self):
+                """
+                Main command implementation.
+                """
+
+                if self.erase:
+                    _LOG.warn("removing coverage results directory: '%s'" % pathmap.TESTS_COVERAGE_DIR)
+                    try:
+                        shutil.rmtree(pathmap.TESTS_COVERAGE_DIR)
+                    except:
+                        pass
+                else:
+                    _LOG.info("running coverage analysis ...")
+                    if self.test_module is None:
+                        test_suite = get_test_suite()
+                    else:
+                        test_suite = get_test_suite([self.test_module])
+                    runner = unittest.TextTestRunner()
+                    cov = coverage.coverage(branch=self.branch)
+                    cov.start()
+                    runner.run(test_suite)
+                    cov.stop()
+                    if not self.no_annotate:
+                        cov.annotate(omit_prefixes=self.omit_prefixes,
+                                directory=pathmap.TESTS_COVERAGE_SOURCE_DIR)
+                    if not self.no_html:
+                        cov.html_report(omit_prefixes=self.omit_prefixes,
+                                directory=pathmap.TESTS_COVERAGE_REPORT_DIR)
+                    cov.report(omit_prefixes=self.omit_prefixes)
+
+#if __name__ == "__main__":
+#    if DENDROPY_COVERAGE_ANALYSIS_AVAILABLE:
+#        parser = OptionParser(add_help_option=True)
+#        parser.add_option('--erase', dest='erase', action="store_true", default=False, help="remove all existing coverage results")
+#        parser.add_option('--branch', '-b', dest='branch', action="store_true", default=False, help='measure branch coverage in addition to statement coverage')
+#        parser.add_option('--test-file', '-t', dest='test_module', default=None, help="explicitly specify a module to test (e.g. 'dendropy.test.test_containers')")
+#        parser.add_option('--no-annotate', dest='no_annotate', action="store_true", default=False, help="do not create annotated source code files"),
+#        parser.add_option('--no-html', dest='no_html', action="store_true", default=False, help="do not create HTML report files"),
+#        (opts, args) = parser.parse_args()
+#        cov = CoverageAnalysis()
+#        cov.erase = opts.erase
+#        cov.branch = opts.branch
+#        cov.test_module = opts.test_module
+#        cov.no_annotate = opts.no_annotate
+#        cov.no_html = opt.no_html
+#        cov.run()
+#    else:
+#        sys.stderr.write("Coverage command extension not available: either setuptools or coverage or both could not be imported.\n")
+#        sys.exit(1)
diff --git a/dendropy/test/support/curated_dataset.py b/dendropy/test/support/curated_dataset.py
new file mode 100644
index 0000000..8a64fe3
--- /dev/null
+++ b/dendropy/test/support/curated_dataset.py
@@ -0,0 +1,50 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+DataSet test data generation and verification.
+"""
+
+import dendropy
+from dendropy.test.support import standard_file_test_chars
+
+class CuratedDataSetGenerator(object):
+
+    def get_dataset(self):
+        ds = dendropy.DataSet()
+        tns1 = dendropy.TaxonNamespace(label="tns1")
+
+    def get_taxon_namespace(self):
+        pass
+
+    def get_tree_list(self, taxon_namespace):
+        pass
+
+    def get_standard_char_matrix(self, taxon_namespace):
+        pass
+
+    def get_dna_char_matrix(self, taxon_namespace):
+        pass
+
+    def get_rna_char_matrix(self, taxon_namespace):
+        pass
+
+    def get_protein_char_matrix(self, taxon_namespace):
+        pass
+
+
diff --git a/dendropy/test/support/curated_test_tree.py b/dendropy/test/support/curated_test_tree.py
new file mode 100644
index 0000000..393114b
--- /dev/null
+++ b/dendropy/test/support/curated_test_tree.py
@@ -0,0 +1,355 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Tree test data generation and verification.
+"""
+
+import sys
+import dendropy
+
+###############################################################################
+## Test Tree
+# Following tree:
+#
+#                  a
+#                 / \
+#                /   \
+#               /     \
+#              /       \
+#             /         \
+#            /           \
+#           /             c
+#          b             / \
+#         / \           /   \
+#        /   e         /     f
+#       /   / \       /     / \
+#      /   /   \     g     /   h
+#     /   /     \   / \   /   / \
+#    i    j     k  l  m  n   o   p
+#
+#  Can be specified as:
+#
+#      a -> b -> i;
+#      b -> e -> j;
+#      e -> k;
+#      a -> c;
+#      c -> g;
+#      c -> f;
+#      g -> l;
+#      g -> m;
+#      f -> n;
+#      f -> h -> o;
+#      h -> p;
+class CuratedTestTree(object):
+    dot_str = "a -> b -> i; b -> e -> j; e -> k; a -> c; c -> g; c -> f; g -> l; g -> m; f -> n; f -> h -> o; h -> p;"
+    newick_unweighted_edges_str = "((i, (j, k)e)b, ((l, m)g, (n, (o, p)h)f)c)a;"
+    newick_weighted_edges_str = "((i:17, (j:3, k:3)e:14)b:33, ((l:6, m:6)g:30, (n:23, (o:11, p:11)h:12)f:13)c:14)a:15;"
+    preorder_sequence = ("a", "b", "i", "e", "j", "k", "c", "g", "l", "m", "f", "n", "h", "o", "p",)
+    postorder_sequence = ("i", "j", "k", "e", "b", "l", "m", "g", "n", "o", "p", "h", "f", "c", "a",)
+    leaf_sequence = ("i", "j", "k", "l", "m", "n", "o", "p",)
+    levelorder_sequence = ("a", "b", "c", "i", "e", "g", "f", "j", "k", "l", "m", "n", "h", "o", "p",)
+    internal_levelorder_sequence = ("a", "bc", "egf", "h",)
+    inorder_sequence = ("i", "b", "j", "e", "k", "a", "l", "g", "m", "c", "n", "f", "o", "h", "p",)
+    ageorder_sequence = ("i", "j", "k", "l", "m", "n", "o", "p", "e", "g", "h", "b", "f", "c", "a",)
+    leaf_labels = ( "i", "j", "k", "l", "m", "n", "o", "p",)
+    internal_labels = ( "a", "b", "c", "e", "f", "g", "h",)
+    all_labels = ( "a", "b", "c", "e", "f", "g", "h", "i", "j", "k", "l", "m", "n", "o", "p", )
+    node_children = {
+            "a" : ("b", "c",),
+            "b" : ("i", "e",),
+            "c" : ("g", "f",),
+            "e" : ("j", "k",),
+            "f" : ("n", "h",),
+            "g" : ("l", "m",),
+            "h" : ("o", "p",),
+            "i" : (),
+            "j" : (),
+            "k" : (),
+            "l" : (),
+            "m" : (),
+            "n" : (),
+            "o" : (),
+            "p" : (),
+            }
+    node_siblings = {
+            "a": (),
+            "b": ("c",),
+            "c": (),
+            "e": (),
+            "f": (),
+            "g": ("f",),
+            "h": (),
+            "i": ("e",),
+            "j": ("k",),
+            "k": (),
+            "l": ("m",),
+            "m": (),
+            "n": ("h",),
+            "o": ("p",),
+            "p": (),
+            }
+    node_edge_lengths = {
+            "a": 15.0,
+            "b": 33.0,
+            "c": 14.0,
+            "e": 14.0,
+            "f": 13.0,
+            "g": 30.0,
+            "h": 12.0,
+            "i": 17.0,
+            "j":  3.0,
+            "k":  3.0,
+            "l":  6.0,
+            "m":  6.0,
+            "n": 23.0,
+            "o": 11.0,
+            "p": 11.0,
+            }
+    node_ages = {
+            "a": 50.0,
+            "b": 17.0,
+            "c": 36.0,
+            "e":  3.0,
+            "f": 23.0,
+            "g":  6.0,
+            "h": 11.0,
+            "i":  0.0,
+            "j":  0.0,
+            "k":  0.0,
+            "l":  0.0,
+            "m":  0.0,
+            "n":  0.0,
+            "o":  0.0,
+            "p":  0.0,
+            }
+    node_ancestors = {
+            "a": (),
+            "b": ("a",),
+            "c": ("a",),
+            "e": ("b", "a",),
+            "f": ("c", "a",),
+            "g": ("c", "a",),
+            "h": ("f", "c", "a",),
+            "i": ("b", "a",),
+            "j": ("e", "b", "a",),
+            "k": ("e", "b", "a",),
+            "l": ("g", "c", "a",),
+            "m": ("g", "c", "a",),
+            "n": ("f", "c", "a",),
+            "o": ("h", "f", "c", "a",),
+            "p": ("h", "f", "c", "a",),
+            }
+
+    # def get_tree(self):
+    #     tree = dendropy.Tree()
+    #     def add_child_node(parent, label, edge_length):
+    #         nd = tree.node_factory()
+    #         nd.label = label
+    #         nd.edge.length = edge_length
+    #         parent.add_child(nd)
+    #         return nd
+    #     a = tree.seed_node
+    #     a.label = "a"
+    #     a.edge.length = 15.0
+    #     b = add_child_node(a, label="b", edge_length=xxx["b"])
+    #     c = add_child_node(a, label="c", edge_length=xxx["c"])
+    #     e = add_child_node(b, label="i", edge_length=xxx["i"])
+    #     i = add_child_node(b, label="c", edge_length=xxx["c"])
+    #     j = add_child_node(e, label="j", edge_length=xxx["j"])
+    #     k = add_child_node(e, label="k", edge_length=xxx["k"])
+    #     f = add_child_node(c, label="f", edge_length=xxx["f"])
+    #     g = add_child_node(c, label="g", edge_length=xxx["g"])
+    #     l = add_child_node(g, label="l", edge_length=xxx["l"])
+    #     m = add_child_node(g, label="m", edge_length=xxx["m"])
+    #     h = add_child_node(f, label="h", edge_length=xxx["h"])
+    #     n = add_child_node(f, label="n", edge_length=xxx["n"])
+    #     o = add_child_node(h, label="o", edge_length=xxx["o"])
+    #     p = add_child_node(h, label="p", edge_length=xxx["p"])
+    #     return tree
+
+    def get_tree(self,
+            suppress_internal_node_taxa=True,
+            suppress_leaf_node_taxa=True,
+            taxon_namespace=None,
+            node_taxon_label_map=None,
+            ):
+        if node_taxon_label_map is None:
+            node_taxon_label_map = {}
+        tree = dendropy.Tree(taxon_namespace=taxon_namespace)
+        a = tree.seed_node
+        a.label = "a"
+        a.edge.length = 15.0
+        b = a.new_child(label="b", edge_length=self.node_edge_lengths["b"])
+        assert b.label == "b"
+        assert b.edge.length == self.node_edge_lengths[b.label]
+        assert b.parent_node is a
+        assert b.edge.tail_node is a
+        assert b in a._child_nodes
+        c = a.new_child(label="c", edge_length=self.node_edge_lengths["c"])
+        assert c.label == "c"
+        assert c.edge.length == self.node_edge_lengths[c.label]
+        assert c.parent_node is a
+        assert c.edge.tail_node is a
+        assert c in a._child_nodes
+        i = b.new_child(label="i", edge_length=self.node_edge_lengths["i"])
+        assert i.label == "i"
+        assert i.edge.length == self.node_edge_lengths[i.label]
+        assert i.parent_node is b
+        assert i.edge.tail_node is b
+        assert i in b._child_nodes
+        e = b.new_child(label="e", edge_length=self.node_edge_lengths["e"])
+        assert e.label == "e"
+        assert e.edge.length == self.node_edge_lengths[e.label]
+        assert e.parent_node is b
+        assert e.edge.tail_node is b
+        assert e in b._child_nodes
+        j = e.new_child(label="j", edge_length=self.node_edge_lengths["j"])
+        assert j.label == "j"
+        assert j.edge.length == self.node_edge_lengths[j.label]
+        assert j.parent_node is e
+        assert j.edge.tail_node is e
+        assert j in e._child_nodes
+        k = e.new_child(label="k", edge_length=self.node_edge_lengths["k"])
+        assert k.label == "k"
+        assert k.edge.length == self.node_edge_lengths[k.label]
+        assert k.parent_node is e
+        assert k.edge.tail_node is e
+        assert k in e._child_nodes
+        g = c.new_child(label="g", edge_length=self.node_edge_lengths["g"])
+        assert g.label == "g"
+        assert g.edge.length == self.node_edge_lengths[g.label]
+        assert g.parent_node is c
+        assert g.edge.tail_node is c
+        assert g in c._child_nodes
+        f = c.new_child(label="f", edge_length=self.node_edge_lengths["f"])
+        assert f.label == "f"
+        assert f.edge.length == self.node_edge_lengths[f.label]
+        assert f.parent_node is c
+        assert f.edge.tail_node is c
+        assert f in c._child_nodes
+        l = g.new_child(label="l", edge_length=self.node_edge_lengths["l"])
+        assert l.label == "l"
+        assert l.edge.length == self.node_edge_lengths[l.label]
+        assert l.parent_node is g
+        assert l.edge.tail_node is g
+        assert l in g._child_nodes
+        m = g.new_child(label="m", edge_length=self.node_edge_lengths["m"])
+        assert m.label == "m"
+        assert m.edge.length == self.node_edge_lengths[m.label]
+        assert m.parent_node is g
+        assert m.edge.tail_node is g
+        assert m in g._child_nodes
+        n = f.new_child(label="n", edge_length=self.node_edge_lengths["n"])
+        assert n.label == "n"
+        assert n.edge.length == self.node_edge_lengths[n.label]
+        assert n.parent_node is f
+        assert n.edge.tail_node is f
+        assert n in f._child_nodes
+        h = f.new_child(label="h", edge_length=self.node_edge_lengths["h"])
+        assert h.label == "h"
+        assert h.edge.length == self.node_edge_lengths[h.label]
+        assert h.parent_node is f
+        assert h.edge.tail_node is f
+        assert h in f._child_nodes
+        o = h.new_child(label="o", edge_length=self.node_edge_lengths["o"])
+        assert o.label == "o"
+        assert o.edge.length == self.node_edge_lengths[o.label]
+        assert o.parent_node is h
+        assert o.edge.tail_node is h
+        assert o in h._child_nodes
+        p = h.new_child(label="p", edge_length=self.node_edge_lengths["p"])
+        assert p.label == "p"
+        assert p.edge.length == self.node_edge_lengths[p.label]
+        assert p.parent_node is h
+        assert p.edge.tail_node is h
+        assert p in h._child_nodes
+        tree._debug_check_tree()
+        leaf_nodes = set([i, j, k, l, m, n, o, p])
+        internal_nodes = set([b, c, e, f, g, h])
+        all_nodes = leaf_nodes | internal_nodes | set([a])
+        if not suppress_internal_node_taxa:
+            for nd in internal_nodes | set([a]):
+                label = node_taxon_label_map.get(nd.label, nd.label) # default to same label as node
+                t = tree.taxon_namespace.require_taxon(label=label)
+                nd.taxon = t
+                assert t in tree.taxon_namespace
+        if not suppress_leaf_node_taxa:
+            for nd in leaf_nodes:
+                label = node_taxon_label_map.get(nd.label, nd.label) # default to same label as node
+                t = tree.taxon_namespace.require_taxon(label=label)
+                nd.taxon = t
+                assert t in tree.taxon_namespace
+        return tree, all_nodes, leaf_nodes, internal_nodes
+
+    def verify_curated_tree(self,
+            tree,
+            suppress_internal_node_taxa=True,
+            suppress_leaf_node_taxa=False,
+            suppress_edge_lengths=False,
+            node_taxon_label_map=None):
+        if node_taxon_label_map is None:
+            node_taxon_label_map = {}
+        for nd, exp_nd in zip(tree, self.preorder_sequence):
+            if ( (nd.is_leaf() and suppress_leaf_node_taxa)
+                    or ((not nd.is_leaf()) and suppress_internal_node_taxa) ):
+                label = nd.label
+            else:
+                self.assertIsNot(nd.taxon, None)
+                label = nd.taxon.label
+            self.assertEqual(label, node_taxon_label_map.get(exp_nd, exp_nd))
+            if suppress_edge_lengths:
+                self.assertIs(nd.edge.length, None)
+            else:
+                self.assertEqual(nd.edge.length, self.node_edge_lengths[exp_nd])
+            nd.canonical_label = exp_nd
+        for nd in tree:
+            children = [c.canonical_label for c in nd.child_node_iter()]
+            self.assertCountEqual(children, self.node_children[nd.canonical_label])
+            if nd.parent_node is None:
+                self.assertEqual(len(self.node_ancestors[nd.canonical_label]), 0)
+            else:
+                self.assertEqual(nd.parent_node.canonical_label,
+                        self.node_ancestors[nd.canonical_label][0])
+
+    def get_newick_string(self,
+            suppress_edge_lengths=False,
+            node_taxon_label_map=None,
+            edge_label_compose_fn=None,
+            tree_preamble_tokens=None):
+        node_tag = {}
+        if node_taxon_label_map is None:
+            node_taxon_label_map = {}
+        if edge_label_compose_fn is None:
+            edge_label_compose_fn = lambda e: "{:6.5E}".format(e)
+        node_tag = {}
+        for nd in self.preorder_sequence:
+            label = node_taxon_label_map.get(nd, nd) # default to same label as node
+            if suppress_edge_lengths:
+                node_tag[nd] = label
+            else:
+                node_tag[nd] = "{}:{}".format(label, edge_label_compose_fn(self.node_edge_lengths[nd]))
+        if tree_preamble_tokens is None:
+            node_tag["preamble"] = ""
+        else:
+            node_tag["preamble"] = tree_preamble_tokens
+        s = "{preamble}(({i}, ({j}, {k}){e}){b}, (({l}, {m}){g}, ({n}, ({o}, {p}){h}){f}){c}){a};".format(
+                **node_tag
+                )
+        return s
+
diff --git a/dendropy/test/support/curated_test_tree_list.py b/dendropy/test/support/curated_test_tree_list.py
new file mode 100644
index 0000000..0bad57a
--- /dev/null
+++ b/dendropy/test/support/curated_test_tree_list.py
@@ -0,0 +1,118 @@
+# !/usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Generates lists of trees and dendropy.TreeList for tests.
+"""
+
+import dendropy
+
+DEFAULT_NUM_TREES = 5
+_TREE_COUNTER = 0
+
+def get_tree(taxon_namespace=None,
+        label=None,
+        suppress_internal_node_taxa=False,
+        suppress_leaf_node_taxa=False):
+    global _TREE_COUNTER
+    _TREE_COUNTER += 1
+    if taxon_namespace is None:
+        taxon_namespace = dendropy.TaxonNamespace()
+    if label is None:
+        label = "Tree{}".format(_TREE_COUNTER)
+    t1 = dendropy.Tree(label=label,
+            taxon_namespace=taxon_namespace)
+    t1.seed_node.label = "i0"
+    c1 = t1.seed_node.new_child(label="i1")
+    c2 = t1.seed_node.new_child(label="i2")
+    c1.new_child(label="t1")
+    c1.new_child(label="t2")
+    c2.new_child(label="t3")
+    c2.new_child(label="t4")
+    tax_labels = set()
+    for nd in t1:
+        is_leaf = nd.is_leaf()
+        if is_leaf and not suppress_leaf_node_taxa:
+            tax1 = t1.taxon_namespace.require_taxon(nd.label)
+            nd.taxon = tax1
+            tax_labels.add(nd.label)
+        elif (not is_leaf) and not suppress_internal_node_taxa:
+            tax1 = t1.taxon_namespace.require_taxon(nd.label)
+            nd.taxon = tax1
+            tax_labels.add(nd.label)
+    t1.tax_labels = tax_labels
+    try:
+        t1.taxon_namespace.tax_labels.update(tax_labels)
+    except AttributeError:
+        t1.taxon_namespace.tax_labels = set(tax_labels)
+    return t1
+
+def get_trees(
+        num_trees,
+        taxon_namespace=None,
+        label=None,
+        suppress_internal_node_taxa=False,
+        suppress_leaf_node_taxa=False):
+    trees = []
+    for idx in range(num_trees):
+        t1 = get_tree(
+                taxon_namespace=taxon_namespace,
+                label=label,
+                suppress_internal_node_taxa=suppress_internal_node_taxa,
+                suppress_leaf_node_taxa=suppress_leaf_node_taxa)
+        trees.append(t1)
+    return trees
+
+def get_tree_list(
+        num_trees,
+        taxon_namespace=None,
+        label=None,
+        suppress_internal_node_taxa=False,
+        suppress_leaf_node_taxa=False):
+    if taxon_namespace is None:
+        taxon_namespace = dendropy.TaxonNamespace()
+    tlist1 = dendropy.TreeList(label="1",
+            taxon_namespace=taxon_namespace)
+    for idx in range(num_trees):
+        t1 = get_tree(
+                taxon_namespace=taxon_namespace,
+                label=label,
+                suppress_internal_node_taxa=suppress_internal_node_taxa,
+                suppress_leaf_node_taxa=suppress_leaf_node_taxa)
+        assert t1.taxon_namespace is tlist1.taxon_namespace
+        tlist1.append(t1)
+    return tlist1
+
+def get_tree_list_and_list_of_trees(
+        num_trees,
+        tree_list_taxon_namespace=None,
+        list_of_trees_taxon_namespace=None):
+    tlist = get_tree_list(
+            num_trees=0,
+            taxon_namespace=tree_list_taxon_namespace,
+            label=None,
+            suppress_internal_node_taxa=False,
+            suppress_leaf_node_taxa=False)
+    trees = get_trees(
+            num_trees=num_trees,
+            taxon_namespace=list_of_trees_taxon_namespace,
+            label=None,
+            suppress_internal_node_taxa=False,
+            suppress_leaf_node_taxa=False)
+    return tlist, trees
+
diff --git a/dendropy/test/support/dendropytest.py b/dendropy/test/support/dendropytest.py
new file mode 100644
index 0000000..304dbd7
--- /dev/null
+++ b/dendropy/test/support/dendropytest.py
@@ -0,0 +1,98 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Extension to the basic unittest TestCase.
+"""
+
+import collections
+import sys
+import re
+import os
+import unittest
+from distutils.util import strtobool
+from dendropy.utility import metavar
+from dendropy.utility import messaging
+
+# Defining this here means that unittest will exclude all lines from this
+# module in the traceback report when an assertion fails, allowing
+# for the starting point of the traceback to be the point where the assertion
+# was made, rather than the point where an exception was raised because
+# the assertion was false.
+__unittest = True
+
+def discover_test_module_paths(filter_patterns=None):
+    """
+    Discovers test modules. If ``filter_patterns`` is `None`, then
+    all files in *immediate* directory that begin with 'test' will
+    be added to the set returned. If ``filter_patterns`` is not `None`, then it
+    should be a list of regular expression patterns, and only files that match
+    at least one of the patterns will be returned.
+    """
+    test_module_pattern = re.compile("^test.*\.py$", re.IGNORECASE)
+    if filter_patterns:
+        filter_pattern = re.compile("(" + r"\|".join(filter_patterns) + ")")
+    else:
+        filter_pattern = None
+    path = os.path.dirname(os.path.dirname(__file__))
+    filenames = os.listdir(path)
+    test_modules = []
+    for filename in filenames:
+        if test_module_pattern.match(filename):
+            if filter_pattern is None or filter_pattern.match(filename):
+                # test_modules.append("" + os.path.splitext(filename)[0])
+                test_modules.append("dendropy.test." + os.path.splitext(filename)[0])
+    return test_modules
+
+def get_test_suite(test_names=None):
+    """
+    If ``test_names`` is not `None`, creates a test suite out of those
+    modules. Otherwise, creates a test suite from all of the modules in
+    ``dendropy.test`` using the discovery.
+    """
+    if test_names is None:
+        test_names = discover_test_module_paths()
+    tests = unittest.defaultTestLoader.loadTestsFromNames(test_names)
+    return unittest.TestSuite(tests)
+
+class ExtendedTestCase(unittest.TestCase):
+    """
+    Extends unittest.TestCase with various new assertion tests.
+    """
+
+    def _get_logger(self):
+        if not hasattr(self, "_logger") or self._logger is None:
+            self._logger = messaging.get_logger(self.__class__.__name__)
+        return self._logger
+    def _set_logger(self, logger):
+        self._logger = logger
+    logger = property(_get_logger, _set_logger)
+
+    def assertCountEqual(self, *args, **kwargs):
+        if sys.hexversion >= 0x03020000:
+            super(ExtendedTestCase, self).assertCountEqual(*args, **kwargs)
+        else:
+            self.assertEqual(collections.Counter(args[0]), collections.Counter(args[1]))
+
+    def fail_incomplete_tests(self):
+        return bool(strtobool(os.environ.get(metavar.FAIL_INCOMPLETE_TESTS_ENVAR, "0")))
+
+    def assertEqualUnorderedSequences(self, x1, x2):
+        c1 = collections.Counter(x1)
+        c2 = collections.Counter(x2)
+        return self.assertEqual(c1, c2)
diff --git a/dendropy/test/support/mockrandom.py b/dendropy/test/support/mockrandom.py
new file mode 100644
index 0000000..6c0578e
--- /dev/null
+++ b/dendropy/test/support/mockrandom.py
@@ -0,0 +1,411 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Fixed, cyclical "random" numbers.
+"""
+
+import random
+
+class MockRandom(random.Random):
+    """
+    An overload of Random that returns numbers from a circular list of 1000
+    numbers. This is useful for testing.
+    """
+    def __init__(self, x=None):
+        """
+        __init__ creates an instance with the optional argument ``x`` controls seeding,
+        as for Random.seed().
+        """
+        self.period = 1000
+        self.index = 0
+        self.rand_nums = [
+0.31443191705262208, 0.2271212707069612, 0.49820681873991857,
+0.44722415062740772, 0.92936218264281234, 0.59873171836809469,
+0.19982411078265749, 0.98903094700215477, 0.026814421675923739,
+0.24553294072971865, 0.90473066455214113, 0.63500099590551295,
+0.062485878633049552, 0.40324970479428146, 0.75995556949919885,
+0.98363548217857744, 0.54357505325205402, 0.85219514190867551,
+0.61210275315338891, 0.74896253672556812, 0.70873501751854029,
+0.87114422189514784, 0.92753457916157112, 0.60789956222627695,
+0.34263919254268715, 0.33226005584940799, 0.13578382005587253,
+0.23424699675198624, 0.50146300494834883, 0.77817054412039099,
+0.15334683515195979, 0.38000295395822037, 0.80658234859686051,
+0.52900928702132466, 0.16707757903062137, 0.9906812734146403,
+0.068901748752614456, 0.49099641208134737, 0.47428802125713176,
+0.043623746806798258, 0.27436313661767664, 0.6062541793704701,
+0.46702497674258392, 0.010313222742326711, 0.045727337925076994,
+0.096940257042402833, 0.8451024639104916, 0.10813661350545056,
+0.36447286770120435, 0.17255941523459573, 0.4458754882710313,
+0.56730670677709205, 0.66242313381279672, 0.2411633825018501,
+0.64646607162710512, 0.51004102559263365, 0.14276617568106564,
+0.49980274479883058, 0.56829545095899059, 0.14666139536796241,
+0.27122315425029742, 0.062076706517107283, 0.29784232388014031,
+0.46937153843131396, 0.39662305708213441, 0.77502963788221912,
+0.34431246165606266, 0.12406976774146794, 0.77954233508627691,
+0.98442357618953502, 0.65870863138879365, 0.76317430726323587,
+0.32193320354888499, 0.41567072312058628, 0.49826478349801484,
+0.94425296232063916, 0.40038568591413637, 0.83149782296288244,
+0.50820834756269906, 0.45158625835932231, 0.87137420135576282,
+0.18969443418561149, 0.70471518687224011, 0.43044950270947491,
+0.14105841021622767, 0.46819020426556379, 0.28429748379509456,
+0.33490273723410269, 0.92074700534564446, 0.85633367206412803,
+0.44757299380703963, 0.45672655371946225, 0.70731920284944627,
+0.75300537008067314, 0.2232676076061364, 0.61512018215155906,
+0.62469408147020833, 0.70676695570246395, 0.90236199735102374,
+0.079786365777333557, 0.13031703283041907, 0.69722911855322822,
+0.065641265674688865, 0.14489805312547432, 0.50224362737950645,
+0.44567510180376657, 0.89500463323088286, 0.90910794360813263,
+0.100405084345489, 0.55357619376108902, 0.2631932106857483,
+0.5368157483119983, 0.99717971859556964, 0.90080123405742396,
+0.8919749597060378, 0.96319408512128946, 0.72104763380068249,
+0.33794570782212341, 0.92887336720989366, 0.57094598529869378,
+0.81251580554905201, 0.49833566254350214, 0.074578046896970562,
+0.15742891672534376, 0.56030040624723298, 0.62341890462128202,
+0.41169999530912615, 0.80360387569467229, 0.24478587276981478,
+0.53034050960413304, 0.14555402088453773, 0.57197267277344321,
+0.7492744117968001, 0.40784211694021266, 0.11232415851698097,
+0.29253785687755374, 0.20417762476978685, 0.45634845668850621,
+0.97792963901394625, 0.7152084884227089, 0.27861419201158755,
+0.62803675215958377, 0.85168636397984088, 0.6597634578414231,
+0.078721939023294496, 0.1349959738362746, 0.27468049112529636,
+0.02946495780617886, 0.42358091820270471, 0.77729283466797494,
+0.02988444242940469, 0.83123858689441787, 0.76934867134442064,
+0.64741431523640336, 0.3912084129095641, 0.6045302455094459,
+0.34111190327675178, 0.55496514485688919, 0.60933955006653029,
+0.54722840485533641, 0.10093773352434698, 0.58381620951367252,
+0.076014095573963547, 0.4796220011714295, 0.10066813035566546,
+0.9886687525572585, 0.73671841735259536, 0.77680789701691333,
+0.50699813617100875, 0.16008125326001144, 0.87823152126474469,
+0.14575661058543921, 0.60250564670920836, 0.61902906767558685,
+0.049351327842071746, 0.49953084340237608, 0.41093474250555473,
+0.97552953639011253, 0.51676060749834307, 0.93071762821570492,
+0.24874925056574937, 0.060860377679168187, 0.39232281732286711,
+0.80356313595940954, 0.786440293404014, 0.64625142562190385,
+0.94871068219852805, 0.84212491538939049, 0.36449282817053852,
+0.82234210696581844, 0.61187529740994917, 0.94733352117304892,
+0.92772114792736216, 0.26248960149311207, 0.41887757379152035,
+0.12936694229983947, 0.57001710049713061, 0.010005635615027653,
+0.68070258603920941, 0.55882732694910564, 0.87295001985704523,
+0.36658461453172919, 0.23237288683316459, 0.64910872189673208,
+0.084974460323642309, 0.34096916028967794, 0.47612236632740101,
+0.24552896768330368, 0.042646747131294127, 0.020766228915284457,
+0.35707004990778979, 0.40859001679046225, 0.48306480955720965,
+0.64214193409924802, 0.81819075869930569, 0.69944147181886729,
+0.31408848949179724, 0.50536430567600688, 0.40477779512142853,
+0.59493090819332872, 0.0089839382967883408, 0.67524592540041495,
+0.63002984647419125, 0.177094541575886, 0.1749310289571564,
+0.1932982649036219, 0.21408296727424447, 0.20929754851582516,
+0.46529022624963767, 0.78707697700412727, 0.41333186638156294,
+0.11574974641989877, 0.53867238158603425, 0.82175328149463944,
+0.67639453631223012, 0.25014369054860197, 0.49854609920360005,
+0.24030238095944334, 0.48457535655726824, 0.25746386922489906,
+0.46106628357880619, 0.81808647586913241, 0.15491536835125597,
+0.79565090323486443, 0.72113371275143234, 0.15892353894428957,
+0.83542074414500689, 0.45886961733146725, 0.43162455397970179,
+0.50755071650515615, 0.37972028616229669, 0.69097183736373469,
+0.3392880552007751, 0.56391954383553367, 0.51596972483937542,
+0.3350085000360179, 0.76262188690414423, 0.04059566986953278,
+0.72096512147672664, 0.73804031506128298, 0.68867017527258689,
+0.4839824851362271, 0.56147196547531775, 0.8322849786127503,
+0.41930393927188214, 0.95129787073939975, 0.077389437920532544,
+0.73312136710456399, 0.85968301610141051, 0.84910535174694934,
+0.67672577614868568, 0.39840657520227629, 0.94849298075019639,
+0.73238765873246292, 0.65963864561479935, 0.83040338914322942,
+0.84313679569585709, 0.74823189764524778, 0.26818361235787402,
+0.64491449933958134, 0.29926105221964849, 0.40622193728965883,
+0.43894970523034738, 0.37205376178932681, 0.68288316787791026,
+0.51152834706969119, 0.040094666318732486, 0.51835551855297335,
+0.69914133860734295, 0.64041327950776628, 0.53911859102535886,
+0.47419203100048946, 0.98833247945952285, 0.45942391149428241,
+0.48638367210938549, 0.29548147882844189, 0.5516776265496679,
+0.80321163510972327, 0.86442523514958547, 0.47263593924626335,
+0.30319463616137066, 0.36245407246198802, 0.75558505880342719,
+0.95739479621470969, 0.82587358532163613, 0.11247960960382175,
+0.46419576976880894, 0.90535923412978114, 0.18505075271874682,
+0.065084859736668332, 0.41088413144481051, 0.1163853084536014,
+0.71752303978381093, 0.9696211810020936, 0.82196703884289235,
+0.72049774195929861, 0.7752354427755731, 0.09388893717118818,
+0.1325673610022654, 0.25435966713715885, 0.61212919784239284,
+0.99337146179390368, 0.93141574366900326, 0.82812399481728349,
+0.4428708918637918, 0.98219708766421743, 0.59186175672774433,
+0.21553895655929745, 0.18919035141314622, 0.37778541717248304,
+0.70150248593439224, 0.038493068549859011, 0.45017106304425603,
+0.012349355068150825, 0.057210716141382956, 0.36131218967977441,
+0.30368749532763406, 0.76748032875773708, 0.074477506286847683,
+0.36360791444330609, 0.9623640161286835, 0.80248013595119039,
+0.64840527058023545, 0.27315029782770062, 0.26570186631332293,
+0.032536083555708806, 0.48705081387400317, 0.59687133594383202,
+0.62350625415636296, 0.28500931577707378, 0.75038812771746921,
+0.63642564096896859, 0.50344381173183284, 0.046246962255789281,
+0.65096776559704583, 0.99335669174451846, 0.60052902908479966,
+0.91310341608001355, 0.26914409939868555, 0.98510339278263348,
+0.46710139889858215, 0.39325219794139132, 0.52623165641816971,
+0.96037264125943966, 0.79579351771376217, 0.25476456445703821,
+0.10940016989945811, 0.99020083999370367, 0.22552307457113296,
+0.71042251786619393, 0.039664945075574831, 0.96216550041626125,
+0.10016594114658017, 0.96625136816262613, 0.040562665307392276,
+0.75347472744879929, 0.56515679140674746, 0.27644225575309123,
+0.89878952868547379, 0.83261330046450299, 0.15034835204416519,
+0.11408844903704218, 0.9576585991414176, 0.5366198880109444,
+0.35420905430064598, 0.68357000407099699, 0.94642589611259054,
+0.56475741360759735, 0.77754449030481632, 0.88616730803987898,
+0.65641106985679387, 0.67437950243977707, 0.47221622506846839,
+0.50062117520556204, 0.91970430648981094, 0.18131053303862033,
+0.21272268806612415, 0.05377388641792813, 0.31007890951373962,
+0.32618319789265249, 0.34290429703351155, 0.21549459205241117,
+0.2524280170570633, 0.81709409505752828, 0.75815705258870691,
+0.47234205496003512, 0.78357212776826923, 0.53932071107791935,
+0.85077682027667501, 0.83311361991293376, 0.90786178925224947,
+0.15239185151498524, 0.55034674406410433, 0.75795968586040596,
+0.65221986813104249, 0.65040096790719815, 0.83824965080922464,
+0.43799507441117524, 0.87074085705683202, 0.94866130510988389,
+0.52427380088194253, 0.75466867079311706, 0.59072719583099642,
+0.75196616934637517, 0.86124226022283668, 0.4083157725740546,
+0.18255991491456602, 0.8094909841226301, 0.9238223349425172,
+0.3489557609572872, 0.63121018395963568, 0.17545841707657228,
+0.49821878413082843, 0.80327811868126764, 0.28922419662100818,
+0.096050290301955776, 0.538241792459762, 0.1713976104609598,
+0.42123504272249834, 0.070501770640446049, 0.10049193448270621,
+0.40017124813453819, 0.49753518158427923, 0.9886569977137456,
+0.0036223759804981936, 0.72003196594546082, 0.49450652913802473,
+0.85446043489120027, 0.27576601630834741, 0.69832406474399744,
+0.84560364778003083, 0.51772138467128959, 0.39288177031969562,
+0.96205954073664657, 0.01036595300701304, 0.20813795619564024,
+0.58211793695016312, 0.093444683710641629, 0.56200191781191788,
+0.23670382060870676, 0.11486440233716622, 0.95144899622178924,
+0.16548179609792213, 0.749604411358611, 0.8489642046684549,
+0.74481664127095337, 0.14323986526663668, 0.57678405290507173,
+0.70737322386084223, 0.67618648905068834, 0.2465839168756111,
+0.78710019275683885, 0.28787543298114859, 0.097255946101287516,
+0.52633018343137772, 0.13397988165453845, 0.34982242740349179,
+0.30083568024580221, 0.23002499396511533, 0.39455145880055487,
+0.53909282369214884, 0.5098939390012095, 0.99519630939119719,
+0.65245304515657243, 0.6278994145755239, 0.28974657946211091,
+0.60721277396019713, 0.89345895195698755, 0.29467240439427766,
+0.85481899776248127, 0.52193388182858658, 0.9133801554519243,
+0.50427500446909879, 0.85002896645153558, 0.87105707616743488,
+0.88274737164353945, 0.39826295447590065, 0.97178570093923,
+0.59557258786570377, 0.85176449380549812, 0.63033913649146966,
+0.85138887004600872, 0.84230334700094767, 0.54577129632427634,
+0.066205916782848928, 0.055465695420991001, 0.34186500465807157,
+0.83648536922433414, 0.50566045826603923, 0.075635038536681964,
+0.83140814268128949, 0.93468963034722885, 0.59507556482872892,
+0.99365469502662762, 0.058149318098988489, 0.54942554831926116,
+0.1649351536556477, 0.36156873463203754, 0.48927371055917002,
+0.46630723876444657, 0.43494631989319743, 0.068712919541944251,
+0.017189475299067558, 0.0088556842001139557, 0.36141417615913152,
+0.85308467014593914, 0.62582981485046285, 0.9581126836643491,
+0.61919083481355885, 0.27660431371386407, 0.69629612158347465,
+0.1882410189585707, 0.88515805020035998, 0.22963170832643232,
+0.26232029298479531, 0.37470837626189657, 0.23675339810004337,
+0.69090831156040311, 0.80140513633505306, 0.92279857466770476,
+0.6065036854306598, 0.018178829690600251, 0.093450588123038192,
+0.76349911808481519, 0.31488524475447477, 0.22338899889386088,
+0.34791728969913149, 0.28557610843267833, 0.35156310782500766,
+0.92534773820401084, 0.82482329374533125, 0.21731554294414435,
+0.36144672624854923, 0.65796790552131279, 0.19540075868896012,
+0.16884193855701868, 0.36038857167145644, 0.59040341628371806,
+0.88059940221166688, 0.96497956855411582, 0.081138766376561589,
+0.21706711129437584, 0.090823322263668516, 0.2088945616604938,
+0.073581231114019929, 0.46847688801420184, 0.075993449653287581,
+0.61216656870467434, 0.76075674317013886, 0.61140551802038356,
+0.16549265935398116, 0.96911925303347668, 0.60625163307811458,
+0.72399365784065461, 0.002644877214834751, 0.4420448631751952,
+0.27100053245915956, 0.01049476392458959, 0.34914797964595035,
+0.62620571336031605, 0.99174582921261834, 0.10282266856087319,
+0.42527709634381805, 0.61827337279979699, 0.042628563903215788,
+0.99425805906104536, 0.18835802817121261, 0.51663287801309388,
+0.00071687243225870834, 0.19224356026637945, 0.35424929691079809,
+0.27771613021877772, 0.1702926412836574, 0.57744611720777628,
+0.09411402810943903, 0.55800959037312126, 0.6755403793946978,
+0.63971514472872804, 0.84208699726913872, 0.73924273979392963,
+0.31797310932926437, 0.43560559324304082, 0.72822006078137302,
+0.34021010712720468, 0.65800890008991064, 0.92506411624434082,
+0.87027885934255056, 0.65165487411908385, 0.57590949220775711,
+0.5305811800645196, 0.21300563938823236, 0.85319355429992927,
+0.41813336013349511, 0.6402346817441521, 0.12696860712654501,
+0.47007546629474051, 0.48542408063649056, 0.075530125640996482,
+0.51893066398146626, 0.81844064972408193, 0.10747498923675491,
+0.1096282809806246, 0.25375564153884489, 0.36633088159110827,
+0.15801256597931457, 0.922151809927651, 0.031768569931787893,
+0.43576325573288455, 0.21113431542672856, 0.31596674335427177,
+0.85599318804265878, 0.81208478273596529, 0.10816056162310417,
+0.90119858816078713, 0.44899529012597006, 0.33724510161922039,
+0.88408157989409231, 0.087297142667446925, 0.45339378304422251,
+0.92291152162924073, 0.37146410762513915, 0.28635300349987958,
+0.52531368082180052, 0.19987533551229164, 0.28515195833401197,
+0.78696780334000849, 0.18409735841751462, 0.63115068875856151,
+0.014052055220890813, 0.75537970046662439, 0.69667760101543752,
+0.53849726798980924, 0.69966987907192613, 0.68409265434583921,
+0.96233996652121068, 0.33707239023588242, 0.78097691869862418,
+0.77797849881511727, 0.80270691051387077, 0.48887933213516466,
+0.66342940395001104, 0.75499746294870507, 0.19781780665223792,
+0.18272687155761635, 0.96623704587636516, 0.5238656866056709,
+0.44386011541475057, 0.64522237359183865, 0.50810450980814414,
+0.76823725686412425, 0.47406197139443873, 0.41873386804615276,
+0.47922922274530522, 0.31067765642017786, 0.59285344557631647,
+0.40805366505854601, 0.81430345633987966, 0.71662142693747621,
+0.85685183873929738, 0.14406177373290485, 0.51104814471267757,
+0.14071252290705238, 0.06969316390364011, 0.2583719584556573,
+0.68451057254140613, 0.0016165163630846857, 0.080538867172593731,
+0.82231364544818153, 0.16851979870461298, 0.25747974558536035,
+0.45300989528105839, 0.98526738767557143, 0.97451169000984994,
+0.038117904404271652, 0.01765330749662497, 0.6744420693012253,
+0.71078268782386889, 0.55552919512325083, 0.19525732457142908,
+0.98498115572050793, 0.94424033466702706, 0.30238482785473619,
+0.51735289069888513, 0.0218844360276651, 0.37095525152876008,
+0.98856409873656281, 0.81171161518835877, 0.62205790570841402,
+0.13903995589268614, 0.24317363346222531, 0.28691664626948732,
+0.41529758456162613, 0.58102138159149475, 0.29511103490158153,
+0.81032771243549684, 0.54664215124727655, 0.86806884248043081,
+0.40148973967126578, 0.72785143234314909, 0.83093687762801249,
+0.68138259848769756, 0.76590081936459165, 0.50823725006607534,
+0.67456323322438305, 0.39077640138345837, 0.42605548372487245,
+0.27751254873155762, 0.19227397148004932, 0.43513135333712738,
+0.012639800127961398, 0.24034640152670483, 0.10027447128694145,
+0.89318562867730877, 0.50849414601968046, 0.20657343439508324,
+0.57147540212723991, 0.47361978176002362, 0.11753188924212987,
+0.98670021096046046, 0.8207811703836605, 0.66086365421202642,
+0.60966298584013634, 0.94363899598878753, 0.70628481771581986,
+0.17426291736596278, 0.024325687803500751, 0.40140066436409716,
+0.89813827762266019, 0.23245614268034809, 0.41721013902193649,
+0.74346705142425296, 0.69053314604711236, 0.55492823893072951,
+0.87243520608233738, 0.8312181578062765, 0.97684515195591803,
+0.35216590061944664, 0.69012293976323458, 0.66421283923840491,
+0.88350728730414396, 0.76583235834404084, 0.88512324584587998,
+0.28611466957123011, 0.64601337868076381, 0.14372944791838049,
+0.78288264166083665, 0.2487441999079687, 0.4718699501149034,
+0.52975394857724545, 0.33705125203762321, 0.090787594259293392,
+0.31240428763858863, 0.90097506501517788, 0.59462802131684955,
+0.49988656761918837, 0.96370898732399146, 0.56268934292939077,
+0.36938414960144983, 0.7883504205258377, 0.25721869603089698,
+0.28997277985430103, 0.84515931522936061, 0.81404163063999102,
+0.10469687422346452, 0.4395925152686998, 0.87762561700081865,
+0.68571872122065192, 0.15240218006329653, 0.080152884004887737,
+0.29408742629437801, 0.43230665934130696, 0.95930050828503155,
+0.88164080303001868, 0.014228459746536415, 0.54117421597118331,
+0.3093599606166112, 0.2253169883201902, 0.91886214568858338,
+0.1911888563036076, 0.95893282911333255, 0.42194833819579569,
+0.64958524906175685, 0.27438310864894144, 0.47728534096729958,
+0.72632268640363284, 0.44452695053483104, 0.43552220277519649,
+0.40980432172030312, 0.77230248591055173, 0.9077817668261553,
+0.61445585506231115, 0.82788913948136988, 0.36586382522685101,
+0.13940582072259522, 0.66770701082795725, 0.62362384669896942,
+0.15927430003372178, 0.90459467071989419, 0.13333361766042984,
+0.11466869457171014, 0.28276573967969287, 0.21175722591814683,
+0.73761579576599035, 0.46162293964555479, 0.53931182885661688,
+0.19346800949975318, 0.59273833997939807, 0.85593387242226937,
+0.20636310091981991, 0.74740172028853169, 0.0097987635504059867,
+0.53220187213375414, 0.10477473988968611, 0.42588178132469767,
+0.49116443200629045, 0.99047240022140914, 0.68964358689657168,
+0.036585644664419381, 0.38343278241796397, 0.78422102834621799,
+0.53000667898091136, 0.56832676501516, 0.1618813939818381,
+0.78120478334302312, 0.81318259290902628, 0.40539491724822418,
+0.55822463931191346, 0.48382947242366969, 0.013999510513232449,
+0.30319124448812262, 0.36263529307915177, 0.97471886280882758,
+0.89616494921964651, 0.84141953467994779, 0.27375941926765657,
+0.76617869775906056, 0.96900963369293314, 0.43640341590511755,
+0.037322202213532885, 0.64673899008553493, 0.35767838217965664,
+0.7889272154535828, 0.20109932743575909, 0.68153577205564464,
+0.91221429179121627, 0.086009200734357472, 0.59721147029656663,
+0.27523447803309387, 0.73045391968310958, 0.024866069256412437,
+0.86107232375125931, 0.91463723977350431, 0.87186138748263264,
+0.39324191303855216, 0.096161379593851071, 0.32068370878413976,
+0.78897571163642655, 0.78474613759719869, 0.34331601538487666,
+0.40828309056141754, 0.15694516839134898, 0.83789401420436682,
+0.80883177334456757, 0.21466083788682122, 0.24325789921874352,
+0.83206267445430881, 0.38798636193955949, 0.95060347757178409,
+0.24085962895484037, 0.27990366953894874, 0.56592755582224441,
+0.18271630447568943, 0.33717455928632301, 0.9592910233932247,
+0.69853867366898292, 0.45801598729692572, 0.87473822819048719,
+0.20057204411755836, 0.41238641408676235, 0.48833064608189392,
+0.84049561182553301, 0.42519968407784081, 0.43566093025099217,
+0.041244638889872287, 0.97655130365177889, 0.28173859355824082,
+0.98771197197012328, 0.81858850686964801, 0.26580819922534471,
+0.45944722778299152, 0.3291016001586401, 0.31426388220315016,
+0.13855305849387545, 0.74478342127839636, 0.43227250519402238,
+0.94387295682438122, 0.45541032020373573, 0.77009137570947128,
+0.31092801905533718, 0.66941813407075434, 0.4268626820379563,
+0.20020363748681624, 0.92365265242403805, 0.23553797096803619,
+0.4799416350221184, 0.94229554199687893, 0.8205115632610912,
+0.15081497950722322, 0.056775689990967493, 0.52364515059006689,
+0.61167866114344005, 0.49131714402095361, 0.27172462716061663,
+0.34401473726073106, 0.1309776400644278, 0.1155305645196063,
+0.64795318728951179, 0.05890399840832139, 0.3826763630244947,
+0.72087369958143899, 0.27041817171916704, 0.6326836251042649,
+0.75987299662928076, 0.52755368073763942, 0.87262870339772391,
+0.6556925798696458, 0.67540950390279975, 0.14720672733677387,
+0.81844445101944563, 0.59474364657015155, 0.16749016121926463,
+0.62166156198457123, 0.4558918955138721, 0.17287013915801952,
+0.56204353906568527, 0.94645178075956382, 0.83732842723959022,
+0.98291695415637659, 0.29899190475031368, 0.98472724100866804,
+0.17415887677241315, 0.36897659959880857, 0.48239959673737964,
+0.62276551493865662, 0.030010365540096617, 0.7689900566409642,
+0.032718717678784426, 0.062869321963432312, 0.11209046647623322,
+0.84948915240213352, 0.83212575464788829, 0.061793679140391355,
+0.22412336143745482, 0.55303755867428062, 0.79750889129943847,
+0.80870433937874264, 0.62110916094845803, 0.23896504325793966,
+0.20101301051309206, 0.68067301304891681, 0.30134264414403189,
+0.91596006365773341, 0.89156690577779907, 0.6733373469244831,
+0.4752275437610407, 0.37783271284483444, 0.45649144239182682,
+0.33465933618787669, 0.18984314534253122, 0.31178645703848173,
+0.35169927993508443, 0.34634536877091138, 0.56400514652008393,
+0.1061010450319515, 0.78804117314026834, 0.29742453358636112,
+0.99729169101350168, 0.14889988220740147, 0.57392391117992947,
+0.18823074515724048]
+
+        self.seed(x)
+        self.gauss_next = None
+
+    def seed(self, a=None):
+        """Initialize internal state."""
+        if a is None:
+            a = 1
+        self.index = a % self.period
+        super(random.Random, self).seed(a)
+        self.gauss_next = None
+
+    def getstate(self):
+        """Return internal state; can be passed to setstate() later."""
+        gs = list(random.Random.getstate(self)[1])
+        gs.append(self.index)
+        return -1, tuple(gs), self.gauss_next
+
+    def setstate(self, state):
+        """Restore internal state from object returned by getstate()."""
+        version = state[0]
+        if version == -1:
+            version, internalstate, self.gauss_next = state
+            l_is = list(internalstate)
+            self.index = l_is.pop(-1)
+            t = (random.Random.VERSION, tuple(l_is), self.gauss_next)
+            random.Random.setstate(self, t)
+        else:
+            raise ValueError("state with version %s passed to "
+                             "random.Random.setstate() of version %s" %
+                             (version, -1))
+    def jumpahead(self, n):
+        self.index = (self.index + n) % self.period
+    def random(self):
+        r = self.rand_nums[self.index]
+        self.index += 1
+        if self.index >= self.period:
+            self.index = 0
+        #_LOG.debug("Repeatedrandom.Random returning %f" % r)
+        return r
diff --git a/dendropy/test/support/mockreader.py b/dendropy/test/support/mockreader.py
new file mode 100644
index 0000000..5fa99ec
--- /dev/null
+++ b/dendropy/test/support/mockreader.py
@@ -0,0 +1,31 @@
+from dendropy.dataio import ioservice
+from dendropy import dataio
+
+class MockReader(ioservice.DataReader):
+
+    def __init__(self):
+        self.stream = None
+        self.taxon_namespace_factory = None
+        self.tree_list_factory = None
+        self.char_matrix_factory = None
+        self.state_alphabet_factory = None
+        self.global_annotations_target = None
+
+    def _read(self,
+            stream,
+            taxon_namespace_factory,
+            tree_list_factory,
+            char_matrix_factory,
+            state_alphabet_factory,
+            global_annotations_target):
+        self.stream = stream
+        self.taxon_namespace_factory = taxon_namespace_factory
+        self.tree_list_factory = tree_list_factory
+        self.char_matrix_factory = char_matrix_factory
+        self.state_alphabet_factory = state_alphabet_factory
+        self.global_annotations_target = global_annotations_target
+        return self.process_read_call()
+
+    def process_read_call(self):
+        return None
+
diff --git a/dendropy/test/support/pathmap.py b/dendropy/test/support/pathmap.py
new file mode 100644
index 0000000..a9b5032
--- /dev/null
+++ b/dendropy/test/support/pathmap.py
@@ -0,0 +1,140 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Path mapping for various test resources.
+"""
+
+import os
+import sys
+import tempfile
+from dendropy.utility import messaging
+_LOG = messaging.get_logger(__name__)
+
+try:
+    import pkg_resources
+    # TESTS_DIR = pkg_resources.resource_filename("dendropy", os.path.join(os.pardir, "tests"))
+    TESTS_DIR = pkg_resources.resource_filename("dendropy", "test")
+    APPLICATIONS_DIR = pkg_resources.resource_filename("dendropy", os.path.join(os.pardir, "applications"))
+    _LOG.info("using pkg_resources path mapping")
+except:
+    LOCAL_DIR = os.path.dirname(__file__)
+    TESTS_DIR = os.path.join(LOCAL_DIR, os.path.pardir)
+    PACKAGE_DIR = os.path.join(TESTS_DIR, os.path.pardir)
+    APPLICATIONS_DIR = os.path.join(PACKAGE_DIR, os.path.pardir, "applications")
+    _LOG.info("using local filesystem path mapping")
+TESTS_DATA_DIR = os.path.join(TESTS_DIR, "data")
+TESTS_OUTPUT_DIR = os.path.join(TESTS_DIR, "output")
+TESTS_COVERAGE_DIR = os.path.join(TESTS_DIR, "coverage")
+TESTS_COVERAGE_REPORT_DIR = os.path.join(TESTS_COVERAGE_DIR, "report")
+TESTS_COVERAGE_SOURCE_DIR = os.path.join(TESTS_COVERAGE_DIR, "source")
+
+def tree_source_stream(filename):
+    if not (sys.version_info.major >= 3 and sys.version_info.minor >= 4):
+        return open(tree_source_path(filename), "rU")
+    else:
+        return open(tree_source_path(filename), "r")
+
+def tree_source_path(filename=None):
+    if filename is None:
+        filename = ""
+    return os.path.join(TESTS_DATA_DIR, "trees", filename)
+
+def char_source_stream(filename):
+    return open(char_source_path(filename), "rU")
+
+def char_source_path(filename=None):
+    if filename is None:
+        filename = ""
+    return os.path.join(TESTS_DATA_DIR, "chars", filename)
+
+def mixed_source_stream(filename):
+    return open(mixed_source_path(filename), "rU")
+
+def mixed_source_path(filename=None):
+    if filename is None:
+        filename = ""
+    return os.path.join(TESTS_DATA_DIR, "mixed", filename)
+
+def splits_source_stream(filename):
+    if not (sys.version_info.major >= 3 and sys.version_info.minor >= 4):
+        return open(splits_source_path(filename), "rU")
+    else:
+        return open(splits_source_path(filename), "r")
+
+def splits_source_path(filename=None):
+    if filename is None:
+        filename = ""
+    return os.path.join(TESTS_DATA_DIR, "splits", filename)
+
+def data_source_stream(filename):
+    return open(data_source_path(filename), "rU")
+
+def data_source_path(filename=None):
+    if filename is None:
+        filename = ""
+    elif isinstance(filename, list):
+        filename = os.path.sep.join(filename)
+    return os.path.join(TESTS_DATA_DIR, filename)
+
+def named_output_stream(filename=None, suffix_timestamp=True):
+    return open(named_output_path(filename=filename, suffix_timestamp=suffix_timestamp), "w")
+
+def named_output_path(filename=None, suffix_timestamp=True):
+    if filename is None:
+        filename = ""
+    else:
+        if isinstance(filename, list):
+            filename = os.path.sep.join(filename)
+        if suffix_timestamp:
+            filename = "%s.%s" % (filename, textutils.pretty_timestamp(style=1))
+    if not os.path.exists(TESTS_OUTPUT_DIR):
+        os.makedirs(TESTS_OUTPUT_DIR)
+    return os.path.join(TESTS_OUTPUT_DIR, filename)
+
+def application_source_path(filename=None):
+    if filename is None:
+        filename = ""
+    return os.path.join(APPLICATIONS_DIR, filename)
+
+class SandboxedFile(object):
+
+    def __init__(self, mode="w"):
+        self.mode = mode
+        self.fileobj = None
+        self.filepath = None
+
+    def __enter__(self):
+        self.fileobj = tempfile.NamedTemporaryFile(
+                mode=self.mode,
+                delete=False)
+        self.filepath = self.fileobj.name
+        return self.fileobj
+
+    def __exit__(self, exc_type, exc_value, traceback):
+        try:
+            self.fileobj.flush()
+            self.fileobj.close()
+        except ValueError:
+            # If client code closes:
+            #   ValueError: I/O operation on closed file.
+            pass
+        try:
+            os.remove(self.filepath)
+        except OSError:
+            pass
diff --git a/dendropy/test/support/paupsplitsreference.py b/dendropy/test/support/paupsplitsreference.py
new file mode 100644
index 0000000..36fc068
--- /dev/null
+++ b/dendropy/test/support/paupsplitsreference.py
@@ -0,0 +1,59 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+import collections
+import sys
+if not (sys.version_info.major >= 3 and sys.version_info.minor >= 4):
+    from dendropy.utility.filesys import pre_py34_open as open
+from dendropy.test.support import pathmap
+
+_SPLITS_REFERENCE_FIELD_TYPES = [str, int, int, float, float]
+
+def get_splits_reference(
+        splits_filename,
+        splits_dir=None,
+        key_column_index=0):
+    # Key columns are:
+    #     0   : PAUP* bipartition string representation '....**...' etc.
+    #     1   : unnormalized split bitmask (for rooted trees) == leafset_bitmask for all trees and split_bitmask for rooted trees
+    #     2   : normalized split bitmask (for unrooted trees) == split_bitmask for unrooted trees
+    #     3   : (weighted) counts
+    #     4   : (weighted) frequencies
+    if splits_dir is not None:
+        splits_filepath = os.path.join(splits_dir, splits_filename)
+    else:
+        splits_filepath = pathmap.splits_source_path(splits_filename)
+    d = collections.OrderedDict()
+    with open(splits_filepath, "r") as src:
+        for row in src:
+            content = row.split("#")[0]
+            if not content:
+                continue
+            fields = content.split("\t")
+            assert len(fields) == 5, "{}: {}".format(content, fields)
+            for idx, field in enumerate(fields):
+                fields[idx] = _SPLITS_REFERENCE_FIELD_TYPES[idx](fields[idx])
+            key = fields[key_column_index]
+            d[key] = {
+                "bipartition_string": fields[0],
+                "unnormalized_split_bitmask": fields[1],
+                "normalized_split_bitmask": fields[2],
+                "count": fields[3],
+                "frequency": fields[4]/100,
+            }
+    return d
diff --git a/dendropy/test/support/standard_file_test_chars.py b/dendropy/test/support/standard_file_test_chars.py
new file mode 100644
index 0000000..d235d46
--- /dev/null
+++ b/dendropy/test/support/standard_file_test_chars.py
@@ -0,0 +1,372 @@
+#! /usr/bin/env python
+
+import collections
+from dendropy.datamodel import charstatemodel
+from dendropy.datamodel import charmatrixmodel
+
+def general_verify_taxa(
+        test_case,
+        char_matrix,
+        checker_reference_class,
+        taxon_namespace,
+        check_annotations=True):
+    test_case.assertEqual(len(taxon_namespace), len(checker_reference_class.labels))
+    for taxon, label in zip(taxon_namespace, checker_reference_class.labels):
+        test_case.assertEqual(taxon.label, label)
+
+def general_verify_state_alphabet_symbols(
+        test_case,
+        state_alphabet,
+        checker_reference_class):
+    fundamental_symbols = list(state_alphabet.fundamental_symbol_iter())
+    test_case.assertEqual(len(fundamental_symbols), len(checker_reference_class.state_alphabet_fundamental_symbols))
+    for s1, s2 in zip(fundamental_symbols, checker_reference_class.state_alphabet_fundamental_symbols):
+        test_case.assertEqual(s1, s2)
+
+def general_verify_character_cell_states(
+        test_case,
+        char_matrix,
+        checker_reference_class,
+        c1, c2,
+        check_annotations=True):
+    if char_matrix.data_type != "continuous":
+        test_case.assertIs(c1, c2)
+    else:
+        test_case.assertEqual(c1, c2)
+    if check_annotations:
+        pass # not yet implemented
+
+def general_verify_sequences(
+        test_case,
+        char_matrix,
+        checker_reference_class,
+        s1, s2,
+        check_sequence_annotations=True,
+        check_cell_annotations=True):
+    # x1 = "".join(str(c) for c in s1)
+    # x2 = "".join(str(c) for c in s2)
+    # print("{}\n{}".format(x1, x2))
+    test_case.assertEqual(len(s1), len(s2))
+    idx = 0
+    for c1, c2 in zip(s1, s2):
+        idx += 1
+        general_verify_character_cell_states(
+                test_case=test_case,
+                char_matrix=char_matrix,
+                checker_reference_class=checker_reference_class,
+                c1=c1,
+                c2=c2,
+                check_annotations=check_cell_annotations)
+
+def general_char_matrix_checker(
+        test_case,
+        char_matrix,
+        checker_reference_class,
+        check_taxon_annotations=True,
+        check_matrix_annotations=True,
+        check_sequence_annotations=True,
+        check_column_annotations=True,
+        check_cell_annotations=True):
+    test_case.assertEqual(len(char_matrix), len(checker_reference_class.labels))
+    general_verify_taxa(
+        test_case=test_case,
+        char_matrix=char_matrix,
+        checker_reference_class=checker_reference_class,
+        taxon_namespace=char_matrix.taxon_namespace,
+        check_annotations=check_taxon_annotations)
+    for taxon, label in zip(char_matrix, checker_reference_class.labels):
+        test_case.assertEqual(taxon.label, label)
+        s1 = char_matrix[taxon]
+        s2 = checker_reference_class.label_sequence_map[label]
+        general_verify_sequences(
+            test_case=test_case,
+            char_matrix=char_matrix,
+            checker_reference_class=checker_reference_class,
+            s1=s1,
+            s2=s2,
+            check_sequence_annotations=check_sequence_annotations,
+            check_cell_annotations=check_cell_annotations)
+
+class CharacterTestChecker(object):
+
+    @staticmethod
+    def create_class_fixtures(cls, matrix_type, states_lists, labels=None):
+        cls.matrix_type = matrix_type
+        cls.create_class_fixtures_labels(cls, labels=labels)
+        cls.create_class_fixtures_label_sequence_map(cls, states_lists=states_lists)
+
+    @staticmethod
+    def create_class_fixtures_label_sequence_map(cls, states_lists):
+        assert len(cls.labels) == len(states_lists)
+        cls.label_sequence_map = collections.OrderedDict()
+        for label, ss in zip(cls.labels, states_lists):
+            cls.label_sequence_map[label] = ss
+
+    @staticmethod
+    def create_class_fixtures_labels(cls, labels=None):
+        if labels is None:
+            cls.labels = (
+                    "a",
+                    "b",
+                    "c",
+                    "e",
+                    "f",
+                    "g",
+                    "h",
+                    "i",
+                    "j",
+                    "k",
+                    "l",
+                    "m",
+                    "n",
+                    "o",
+                    "p",
+            )
+        else:
+            cls.labels = tuple(labels)
+
+    @classmethod
+    def get_char_matrix_from_class_data(cls, taxon_namespace=None):
+        c = cls.matrix_type.from_dict(source_dict=cls.label_sequence_map,
+                taxon_namespace=taxon_namespace)
+        return c
+
+    def verify_char_matrix(self,
+            char_matrix,
+            check_taxon_annotations=True,
+            check_matrix_annotations=True,
+            check_sequence_annotations=True,
+            check_column_annotations=True,
+            check_cell_annotations=True):
+        general_char_matrix_checker(self,
+                char_matrix,
+                self.__class__,
+                check_taxon_annotations=check_taxon_annotations,
+                check_matrix_annotations=check_matrix_annotations,
+                check_sequence_annotations=check_sequence_annotations,
+                check_column_annotations=check_column_annotations,
+                check_cell_annotations=check_cell_annotations,)
+
+    def verify_get_from(self,
+            matrix_type,
+            src_filepath,
+            schema,
+            factory_kwargs,
+            check_taxon_annotations=True,
+            check_matrix_annotations=True,
+            check_sequence_annotations=True,
+            check_column_annotations=True,
+            check_cell_annotations=True):
+        char_matrix = matrix_type.get(
+                path=src_filepath,
+                schema=schema,
+                **factory_kwargs)
+        self.verify_char_matrix(char_matrix,
+            check_taxon_annotations=check_taxon_annotations,
+            check_matrix_annotations=check_matrix_annotations,
+            check_sequence_annotations=check_sequence_annotations,
+            check_column_annotations=check_column_annotations,
+            check_cell_annotations=check_cell_annotations)
+
+class GenericDiscreteCharacterTestChecker(CharacterTestChecker):
+
+    @staticmethod
+    def create_class_fixtures(cls,
+            matrix_type,
+            state_alphabet_fundamental_symbols,
+            seq_symbols,
+            labels=None):
+        cls.matrix_type = matrix_type
+        cls.state_alphabet_fundamental_symbols = list(state_alphabet_fundamental_symbols)
+        CharacterTestChecker.create_class_fixtures_labels(cls, labels=labels)
+        cls.states_symbols_lists = []
+        for ss in seq_symbols:
+            cls.states_symbols_lists.append(list(ss))
+
+    @staticmethod
+    def create_class_fixtures_label_sequence_map_based_on_state_alphabet(cls, state_alphabet):
+        states = []
+        for ss in cls.states_symbols_lists:
+            states.append(state_alphabet.get_states_for_symbols(ss))
+        CharacterTestChecker.create_class_fixtures_label_sequence_map(cls, states_lists=states)
+
+    def verify_char_matrix(self,
+            char_matrix,
+            check_taxon_annotations=True,
+            check_matrix_annotations=True,
+            check_sequence_annotations=True,
+            check_column_annotations=True,
+            check_cell_annotations=True):
+        general_verify_state_alphabet_symbols(
+                self,
+                char_matrix.default_state_alphabet,
+                self.__class__)
+        GenericDiscreteCharacterTestChecker.create_class_fixtures_label_sequence_map_based_on_state_alphabet(self.__class__, char_matrix.default_state_alphabet)
+        general_char_matrix_checker(self,
+                char_matrix,
+                self.__class__,
+                check_taxon_annotations=check_taxon_annotations,
+                check_matrix_annotations=check_matrix_annotations,
+                check_sequence_annotations=check_sequence_annotations,
+                check_column_annotations=check_column_annotations,
+                check_cell_annotations=check_cell_annotations,)
+
+class Standard01234TestChecker(GenericDiscreteCharacterTestChecker):
+
+    @classmethod
+    def build(cls, labels=None, state_alphabet_fundamental_symbols="01234-"):
+        seq_symbols = (
+                "43?2423132-330-0??21334--33?330?32--??-40?2442343??20--???0314020222401?-04?14-4434--20--3343142240124243-3300?-43-1?203-130414232?3143-1143",
+                "03334?3?102220-44?300??0304-3?-04?-0?1-43?22424433?002-0??33-4024--010010402-2?4441-?22203-?4300?1013--33--303342?3040213-?101043313-42011-3",
+                "-30-2423323?143011?23341-330-24?204---?424?4410330--00-3-0043?3-32321-1--40-3421-13-2132??23024?01-0342201041?1-4??320??-11-314?221-112-?-43",
+                "?3??142?01???0?1--02?13?--0-014-?232??431-2200?01?121--21243111233?-4001-??-2??4413-?414033-3342--040-043-2??13212-2320--33?1-?0221-1213?1-1",
+                "4042-0330003-00--2433320-4??33043133-1-42-2212141??-231?1?2-34??0422401-4?--10032210-2??2402012--40320?244443?202332??434-?011?4224-122?2?--",
+                "?-101213-31230-0-0020-44?40?-???321212--012-4034201300?-3213?140-34?4013?04024311?4?02344?-131002?213444-2?-04--43?1?-02303-3-4-0?-3421-1-?3",
+                "1-03-130-0-34?-02331234011130322-?-4-?024?0142?334?2--0??-13242101-2144404--?1-?33?44300---3?241220-?4-43333303?43?1113411?04?0?1300-0213--3",
+                "0341323-0?3-33-00040--22?314301-42?-?4?431?43?24??-301-0-??22--2414-331?-34?-?-440?--03---431303140122023-4-10?0-44412--?3?-?14110313-1412-3",
+                "?1?24231?3210-34?1-?0410122??-2?32-?12-43320213441441004230??20210-2021?001??43413--40-2--003121122--0011-3-2-3-4303?2?13131014233?1--3???30",
+                "330023-24-032-0131-102412310431203-4104-3--2-?43130?102?2143?3020?124441224?241413--430-?344?4144144214-4000?03--3-0020221144?23?2?1?4032-03",
+                "4244000?042??0-214?2-2-4?-31?00424-04-1?400412230?11204?-104?-421422?-42?44121?-203442213031?31223-3043214?104333?04-4?3-13311-40?3?003311-?",
+                "?432?-312?2433-03?40?-40-2?0--0100--?-?413?201244?422?3-?133-?0422311000-0---00?43--12---3?41-423000241004-343?443?044??4?124042--?01313122?",
+                "43?332?00?-4?24024-0331-210?233302213?-402?3?20414000--4?12-34324441433?304?11?4431?-24-341104332-2-1-44313320?213302?023142403--440223014?0",
+                "4243331132-?204-?22-3-?4230110313-4-?2?333-24114330402024?-3?200-?1-0?3??04104-303---03-032431042-3223-13-4013?3-3-40441-4?0244-32?33130103?",
+                "43202-14024240?3???13-4?210404?3-3-4-41-01-340?422333--32123?40?02034303?0?1123?01--?0?-03-42??42011042?333324?-31?1140323?2333?30??13302221",
+                )
+        GenericDiscreteCharacterTestChecker.create_class_fixtures(
+                cls,
+                matrix_type=charmatrixmodel.StandardCharacterMatrix,
+                state_alphabet_fundamental_symbols=state_alphabet_fundamental_symbols,
+                seq_symbols=seq_symbols,
+                labels=labels)
+
+class FixedStateAlphabetCharacterTestChecker(CharacterTestChecker):
+
+    @staticmethod
+    def create_class_fixtures(cls,
+            matrix_type,
+            state_alphabet,
+            seq_symbols,
+            labels=None):
+        cls.state_alphabet = state_alphabet
+        states_lists = []
+        for ss in seq_symbols:
+            seq_states = tuple(cls.state_alphabet.get_states_for_symbols(ss))
+            states_lists.append(seq_states)
+        CharacterTestChecker.create_class_fixtures(
+                cls,
+                matrix_type=matrix_type,
+                states_lists=states_lists,
+                labels=labels)
+
+class DnaTestChecker(FixedStateAlphabetCharacterTestChecker):
+
+    @classmethod
+    def build(cls, labels=None):
+        seq_symbols = (
+                "CaY?dbdNYWkKN?kHMXtKWXRYYwATrRbD-?hTacgSRNbkdgNSRymmNWwnKVkYGAkmRAXXhNtgwnmGramYH-ms?vNb-Ca?DNAsdwRDc-KYd-HTCvNyRsXVwmDbBYRaXd?mcRdyKwgrbdswmSGAYndNYVbbtAKCgK-KykswasSYmmNGYGNNKR-dwgMGGTgBdhSdsWvAGKBnMK--TVc-KMWkHdXgBAVTtwVntrcRNwCRVdVamYXYvhbh-GnswKRYtscr",
+                "kvYYWbTKYCMKNHkdsXbHsaCTYHtXN-TDSk?AaggSrAhrX?YbBWNmWVtBGGSXBARmYAVMcwtgadmsraWtwXasTYX-s?asDmXDAbRDb?RYWNwRdYXyMGdB?R-ABKCRbMaacBcSyw-adddBvGKAYddNVcAHWAwtVAbKycXgVKdYAkNWDGXAvrvsHgBtGGsBKh-RnBRmr??RTCwAmwDBbGVarMCkBADChh-cbAc?NGCdtdwCcmHYvmVtRTccSScYtYcV",
+                "rNarTVWSTXTKDhNSGbGKhXwwTsGvwDMDN?agmcXBNKbVdRN-K?mtwWBnWDks-hkBwaS-kvcDRVmarVmRX-s?raSAhDaGDmmgRwRScnWYwvmTnSNyRsXVtMYbBdRKSAywhRdKwyVBAAAwDNaySrvNyTYYVAWCXymKSssNVsmGvdNkYrywYRBvwTNBGDgwRhMBBW?TrKVgVSG-?BcvKdanrvDgSAVvMrvCwybwNhMG?WVagBXwNwrcytWmgBgSt-kk",
+                "sgC?dsaBYhkrV?kYwmDKaXRYYwvT-?YD-mXhVmnKGMTkdSKsKkrmcdskKRkRAATSwCYXvNWkbSNGrSXdtvC??kcgMGaWMyAWdBHHvMYad-HVyGNCHVXDWWwrrYHtanYNcM?yKSVrdWsVrKSnT?tXvgbDrMaCgKarDrKwskHmgyXGYhmRK-asm-Xnt-WbG-gGTKvcNKgntNrnNHBWMrMkHGg?d?VVGvcTt-cgbwdHndsAmXYYAVdt-GnkgvRYdRcH",
+                "kaAcdGnNwmgcGkXXMKNyAXwRV-KTsRyTWw?TMwgSRvRVtyycGYAtHWmhXVWYkAcMtCybwNtmWw-sKRXrydmdrvX?NkY?cscYmKtdc-KaKr-NTkNydKGRthSbTyRmkdbv?RvrKwBDS--wBdDARndba-S-tKRygKcKWraSsmXvbgmdmbMmcshAwnbGSTsCXrSGsTkWnKasRyhMTcgWYBTSsdmwSAVM?tmhWScRn?kRBcHcmSkYccdhSGwr-?SRC-Bn",
+                "CgH?dcCwY?VTWvgCACShvNRXY?dHs?BTSYHdaMgcRSCdYgcSt???VWTXhDvdyAgmAsWDnASNRn-GWttSKkGN?yND-SXYYbAcdmR-mwmaCsHnmkbyBmHDKXDkWYGtYDcTvCvcSvGwbkbHymAkBKhNYayhckYhcy-GXhRnGBSKVANdAgDSnDmdH?MGNbnBXhThKHvaMNnGDKNKdAMSKMSTgHwcvyYcwYdrtDVVtDAHVdkyWyCsvAGWtGkVVMya-Gbr",
+                "DRcwhbdwsDs-yMKsHThKdwKYsvAArcvwT?MTAMgmRntHdTNknwSCNvnrWKkXMCKtvrXYknSwMWvarRnAsVHsbvNbKHcXKbHMdGDTBW?w-dGcsvHWXsKVdKVnBvnNcYCWks-kyyWwaCsrdGXNvsNHyVDaHAtCbnXThAnwaBSXHhVskXNtygVdTdMcsNWnaMkwckaXwt-hWKWBwG?-rMb-HAvyDArbtRVNBRkggmNHHd-DXYasWASd-rmVkKwVthsa",
+                "raYsbGsYBaWKkgkHSWCKGkmCWwwTvRbD?RhwvcNkR-GGbYASVDRmg?vn-?CCGHtmWgXghCtmhdHGWNtHKdmkchDbSSNgDMAsnrRDYdkhDC?HXCgRdmXKSyKrWdYVGYBmBaGNkmgrchWdmXGdNGGnkThStWmNkV-mVKasCnBYRmN-YkyGHCW-wwM?XGVaMkSvgWAAnKwMkWM?DHyaKR-GAYXkmRCVtRbywCvVwGWT-C?aHnhtWHkHmCysXXRbtMcr",
+                "cHgKdtkgKKXygnsCtTGmVdRHXD?TAkmDrHsTdhVnwstkdXCGwy?ms?wnsVkYkYkYccXXTNtbwr?GM?yXn-msbvBwWRB?XNSSmcRHB--RbHywBaNnrMgrDD-n-wrKXy?mcN-MrnDrnKgs?StSBbVGtNaBvvYCw-VrM?sGasSCm?WdYXsWmABCwgMYBmhXMBtwmANyNGydGcsgyVXVrDRwXwX?BGXKtrkCtWKm?wCrhDgvmwSWsCCk-GmsKWVYtaca",
+                "ryTBakDTwVwd-?kHMArVrXGbrGbCAVbaH?nTrygYMCbRhsaSby?rgBBngVCNmAk?KRdXb?VgGnNGgatYGBmsmvNS-Mw-wHACSvAKcgvWWNHtbvaaTbXXwcDYktgaXc-KXRMCacgybR--k-KNvAdNYVrTCGDv-d-KwwKKayRGDmn-VyDHnNWdnSkGGhSwAkNdhWAGCKwnhBw-TYXBKkW-HdXhdhnmdCGnTTcRwrXs?mmnXwVNrgmhkgMRgKantsct",
+                "gvYXAkgCVryKdaYRHTdKW?crbwaTYnbDSbgcmRgXaNw-YkRS?dsBNWWwgmsNVGkwHHyBvbhXwH-Dr-SGDSwN-hvbMVawDTSYgwRWV-WYvTHTBvKTrdXVvbgbckTWhN?R?ndygwg-mdsVghV-YcKNsVcbTAR--mhTyAcwMbnCsAyKYMw-bVAdwmBXGKMyahSAAWSVTvB-MAGYmSs-KXykHt-gkKVvSdrnKHAaGwTRXnVrdwmHvamcvHtawYNTksSc",
+                "GgYXWRTtbGhw??r?MyGHWcRwM?mRrkbm-mhTWvgMgNvgygMScyAwNYknnKaGmVGYRAX?htMT?kb?GCkbMVHs?NTysCD?hvHgwVTDkRWnmyHSyvDDRHRVXnRY-HRaCd?XMRYYTS?dHHVmmYbvaRRNDKXGtbhWsakrrakYDsKYmCGvtnNybhAhyNVgkydsctDkTnvAHtyAmXWtaCmGKGBNH-SMMKVTsNtrVbYRyccSWdhKCMV-a?sY-RnSWydYWWSr",
+                "CrCAsCndkdmcSmvyTtdTWXdTAtAHTABXWhhGKcgaMcY?N??SGMbsncwvKVDWMXhmDMN-hNvXKHtNWGmYStm??aVX-GSYmS?sdnyDAMmvtXnnCrKynT?WwkydXYDaBcCTGrCvVm-RMwsTmbsyy-sWYwBmtDKhVKrX?a?masbYTmdYkGNADbXdKSygMKmrdMgdb?SGAKBnHK--TVckKSksgDvgAA-wkwR?mHXgswCbgkVY?YHAAhNkvygnd?RYdsht",
+                "tWB?d?akmWw?ARCHVTKDCvarAvr-YabdbmrTawYDVNwkHXNNdhWrkW?nkBBTkSgcHHyXctwgwDSnGhVSH-yCgghbhgyMgmCtXwRdrRYVdMvYbVcBgRwyHmbvSdXAb-?GwsYtHwWAbDtkWrGw-KVvYhabM-WCWbBKbBsTaySGTsKGvGYtHHbdGYybW-gBAdWdhCKCKYanswvbsgsvSnhHRKAgb?yKtwKmtYMBHYHWMB?RmYSYnmWhCmVswdGSVsHM",
+                "hWYKRNdvRWHKHWwHywkdXXKYYmbByWbbmThTvcySRnrk-sDYGV?sMHMkwKYdtcDVdVrTtSnCRnHyR?YYbNGGNvWbvagvDHTgMS?cc-KYv-HTCbDyRCwVaHCTYKsarAvKcygycVyhNKDyraGgH?mNMTDbD?NTgYtbtckwSbWCaaWtYDNCKRTdwKKwSnhyBASHsWbWRKKrW-bBdbS-ahnkHmXvdAVnmwVgBTGR?VgXCAHVmYX-VcNhVhgHwrgBkwtd",
+                )
+        FixedStateAlphabetCharacterTestChecker.create_class_fixtures(
+                cls,
+                matrix_type=charmatrixmodel.DnaCharacterMatrix,
+                state_alphabet=charstatemodel.DNA_STATE_ALPHABET,
+                seq_symbols=seq_symbols,
+                labels=labels)
+
+class RnaTestChecker(FixedStateAlphabetCharacterTestChecker):
+
+    @classmethod
+    def build(cls, labels=None):
+        seq_symbols = (
+                "Dg-aRmgGyUAVdrahhAbXbUnmvURSUuwyDWUwRSWykcvcAMXnmMhhYcvKAGbuDvXRDwnYSVGUudSNnBsUNAVSXAGYNGDVvDhdSBMygCmgUv?cdB-DRwdbcwvKG?XvYAWBavVWSdcDuXmuuvyCWnBCAw-CcksNKHYHUaV-UXdbdcmbYXGGmnyNVXKvukvbRCuWvKCVdBwvHKaSSNcDwD?VbYWHbkCrSDhnAwVc-S-NGhurbhrsKusWWyMHYRuuYWRV",
+                "DB-nRnBGruAVnWMhaGbXhVumAMD-UXmXKXwDCuaysBsGAGnnduGhYcbbMHBuUcHcgCnhSVsNumWwSYADcwsShMrGDYDXwNAYNBrmg-vbbUNndu?yyXW-?ArKYDXcWAWByvK-Suy?cXWbHHCWgnMScVNsUwNNWmvUMaVNAXvCBaaXUGDannDNVYuC-MmMHSugRBwVdCBMswNXRKnGADhVXcWSbbhrghSnAnSuhSYNbngdbUH-VuuCWvSndM-uMWcM",
+                "MWCasmyngUBbyrWUCHbmUbnnyWWmUnbSDbYmUgWBkSRcVhXnBMGhshWRMRAmmuBkAUnHaVHMDnRaYWsUnRuaVAGYNgm?vNhdCs?VgkXgyVUCd?kDmRMV?Uvkw?rGdAWgUmgh-WgUgAmdyHGskBRCUCv-CrsSdvmYUw?GHvwYdwmhYDbsmDMrcKUsukvRuMVWXSCUUuwWkKvK?rcDXXMNdYXcBkCUSDVrYHhcscVNVhdrbXnRUkHWXhrVYRMbYWwX",
+                "dRswabUBmUnndsahDBunbhc-vhRSUSUHDXDSbaVykr-mVgcnBMChvcYBMmhunCDRgNGYCaHUgcmrBn-UDMUaXMgKBGDrvDWXSWmRsdNgwvrNdBBDWgSwwkDUsXCWCcWBABynSHbR-yRYmWmgwvaUdddncC?RygNnyyUkyDdbsyBAcUNubckHVAUNuMwayCuuRKbVdubvcVaDVXsDNr?KAUAabRCXknscAyVrsSNXY-YKbNYsUWUScbASrRcUYWa?",
+                "BRdGb-yvaanKbgbhdcCXbXnVYvRSUuMvcWRw?GWuWcD?AcXvGBhhUDvM?GbmDMXygYmmunWCsd?CyBskGCVHXYvCC?RAvYVsSAKDgywwkcacwBhDuGBkWwNkCnDBWbNA-bKWkWRDCHrKXVmCCwHcARNCsDAKVHuYgkV-sXdN-cmWkvXkuKBSb?mgCuXnSUCWNnCkrgw-vV?vANRnVdhkHrWBvUBwR-hn-wkUCGANGNRrCBrsuusbNWvXcDkudgRn",
+                "DgurCDgCyYgWGuDhhADXMUSmvCRGSuraDNnhwsCN?WvcdNWADBhW-bUgnSbXuRvkS?KhSYyMyKSNAhMsCAsggAVYNrrAYwhgAhMGwdmyUyRddSSUdVsHDSYKYKknSDGSMRdXNRdgKGmC?Ay-CgvBnXkMdauwKHY?waGYAXNDdkRMhXkcmWyryBsvnmrbdCURky-V?gBs-K?HHC?nG-yw-aKAACGvwGM-?wVkcDXg?dyDBCCbKNaGvyYUaCWYaaRc",
+                "RHdYR-gskuabW?GhHHXDhUKdrUgcUu?yD?gsuSayccvSWAVHCXAhgMh-VgRy-BnRd-XH-AGKudcCmnUDvKVVnAGSDHDSkbNwSSkSgbmUNHhad-GcRwVhywAKGwasWbyBycyWadcXNWvHsMyHVnBDsnbVVDGnhrBHDHA-nHdcrAHXWBGGKnkgdCKXYYYrgCSucKCXmmgBnBs-YNXVScdrbYWuWsMc?DhXnwVgaa-dCvb-bgdDmusWkWCUGRuuWSHH",
+                "DsYgDWKArwA-dAKgGAkHWyAsrkCRWaCWDgUwnUdDkMUcAyKysMhhYHAbAWsuwYSRXKUGmVvCDhGmvDVagAYSnGGYrwDWBr?ySBcyrCXWkDrXknDDbKNHKwbgN?svkAABrWrwSrURuXRWRvyCcCSCMGgKVGVNKmwdVasnrYdbnMmbYRGDYnc-KXHD-yW?SgXWkUbVKvwGduaMgvcDGM?VbdYgnGy?XsaKUYVmvA-dgMCANaXshusWBHYSunCuYvRr",
+                "rWagAhsnyhrVHrRRRmbbWBDhdURHdgnummUNHVNhHavcKSbKKguhkAYDAGbWVvdNDwrvhubhRKnNDaB-NmBHkDG-nUrgvDhDHCSvyAMvKnDSdMwcNgDRcs-kG?YWvyWryvmWcAcDWwyGuvR-dGdK-rDCuBWrNYgHMVWvc?mdbMmbhXGVny-NcCCkV?HbSWbWUYKMrBcmbcHSSVdDas?kSAWMbDvWCDnnAbNVYRADADNyrCyYHCBWAyDGcvVubKcw",
+                "vhDAwmY?knSgGNyarAAVHgsXVU?uRbyuwsUkD?AkkcvYBMsvmCb?VBkrAGv?HUXvWBWYaHGvvAuKnDsUXXVcMMHYbBDMMYXH-mMUXWMBaCHSYm-DRwBbUbVKkVsYVCmrkvVUMdHgUWhunmyMVVnC?BXmSXsAMHnngUV--XNns?vbg-XDnXCNMsBWGAgdVhrwsdNVNcbBCMarsRcsaVSVUYN-au-GW?dMgCvAkDKUSBVkWBhgvuSCyuvNYCsvCmkV",
+                "mmBybMhGmvGVGNVWhhnWNdUSsVvDcywyXyRXrNayGkGUAMBsKMCWM?AKbWnuarghDannUVSBurSdHBXsNCmRYNGdYwnhakXdSYMYyChgBs?mdb-ARX-USSDnRkaYwcWbaySUBdAhCs-huvgwWDBCVr-rckvmKMYnUdCkYghwrvyDMKdkrmrNSY?HDkGRHCVWXyCbrKG-cSmXXdANwnCrsuraRcGBSDMUWWKXAb?kCnnrbhmmKuKGhgMvGRVhYVDy",
+                "-baX-rKwGbAHVaaC?HKXbcSNHUBBKwwHDuUrCGWyvKgNVvabYMRA?GvVasKGDARdkwncSVbmAVyvnBHCkMWSXrVGYUCcvnDdYBsVwsYcCncchUYYWwuVcDRrS?sHBGwHBvhMAbdDuXucmXycubMCXgSkKg--BhRBUaUGMRNvaHNcVDyBbnkHVCKHXkNhGdAGvKCbYbsKVKMkXVmVuUVVbYaCWbnDhaRHhyrcaNVg-hcRh?rdKhsWWCUYXXcddCYV",
+                "?dSa-ugKUUHkAUAGbAnnbCnmc?GSscwVDWWBKs?HkcvaM-CmaKca-HvNdGbuHNsRGAdYSWBVacSRUS-NKrvSXNCYNGnYCShHca-ygYHSyAcHCw-mYVNKcHvcsuuvmUbBHUNrrRHDhXRKGA?duWKvWycCwaUMKvnhRah-UXamdYmGKYnGNnggVXDv?WCnCsbHvnhC-uwRHKDWNmKWKDrVmXSsbk?vasawUwgXr?bNGhcrchXUmKmwWKMaRRCSYNVs",
+                "WnkUsdhGyR?VHUyhD?SXbKUmRUrMvCwAdbUVDKnHdcVwHBXKKRWmvDmKkacSg?YHHsXwSbmXaGm?nhc?Nd?rX?GkmrvmNDCnNC?ygruCMvHUABSHRvYYc?bWARXRVBamAgVdMdUDuDW-aWCdXmkHd?KCcssu-ruDUGgw-XSCdawUW?yANdrRYHvcVCnVvSbgkwduKBwGHRKRXvDyMa?yUgWDBcCyvcRvVshU-S-GVHugbWbUrrYBnSBrURwu-s-r",
+                "DgkVwmrSdhABWvaMWbdXuCvHKRHXCuwXrW?mvWuuCAvgAMnwrSRhGdWkrbbHmhrw-wwdvBanuCSNMkuVsMU?BgrYnBhWvaumSYXydnmHGGNy?M?CHWdbb-vrBYSMHRWBavXXgBcua?mMhsysdngyBgVScwHAcHsDHCVHvBASYcHbRmXg-byRnRMhAmRwD?w?cKCwdVYhC-hgwghWND-nrgSsWBmUXsWbnWVVVYdyGYucYh?vUHrNW?YRUSrGyWun",
+                )
+        FixedStateAlphabetCharacterTestChecker.create_class_fixtures(
+                cls,
+                matrix_type=charmatrixmodel.RnaCharacterMatrix,
+                state_alphabet=charstatemodel.RNA_STATE_ALPHABET,
+                seq_symbols=seq_symbols,
+                labels=labels)
+
+class ProteinTestChecker(FixedStateAlphabetCharacterTestChecker):
+
+    @classmethod
+    def build(cls, labels=None):
+        seq_symbols = (
+                "PGSD*nCdNZ-Pa?kDyNxtPdiXh-zNnmYyAgEqmZyXrkYXzhAWMWSwyRSTRGDkItcmXNhyKAnfYlvTRqwQXrapfmLkvibbXQGleGksliC-EmNewqgcARSlZDWnAzXwEaSGK*Vz?yVYDvMQABZNrCCrqEmCFmiShDrpWZBqCSQyblBEVSlCHhl?FITpyXklyxihtA*mcbCLnMq*pZprTSGRDregeEC-SIGNwZb?tHRxqCibZeAEAGnYeCk-TVSDgNDegPcXfwKCMziIPbnAltrFzwxFksB?yChwaZ-XpLEyDZdPB*qFip-dGtWHeyLGm?iPPvdkRnbEaht?yIVCrXhQswSwgCykeYChEgVKDLLKzmgxqmhkQ*KAbfKiFVXkt?HveNZbDExKWdctQDQvpVHIYHtXzNWNE?lFtenNrMAnX*XkrtVRvpptYQeKwDKxgcnXKtzB-TDHyHd-MiqaDvAFqatQ*?? [...]
+                "ReMQIDvVAvwAavnawMZhglSXYnkYnfVyNgpiviyyNkffrhGf*cSwVHzELYhkDGDmeZmzLKBHPDvztkDaHa*NAmLBv-bNfEcTM*LszffdFHNfwWFIntSAaqAzDKcfEAHMaiQlfqpwlKdYglZlCCtPbEEXEWfShLVqCZfqCSKyygBMBaDQFBlaFYYCKCDXyfLHHqINemCvXaqaxXprwLeBhpezzEEtPcWqtWMgBf-NKPPtZWvErFlqewkKTTrDwcsflScXBN?AMtZfHYPtanrEDwTiqHGWlaW-an-ZDrEazQdiBqBgtEXlrwWteIgnS-KPPfXmzzXrWiCnywVC*EhlrxAPgCyZRYCTxNWKpVWKwmaz*KM*Qt-LAi-SbIziTxzAtacFD-xKWmcHYDexieHcAIwHTGNNRTAqwyrGrwHdXTSvKtVwBHpVmQSCmNkKl*nbKpLKITWWyi?acQsmLvzNqQRkb?? [...]
+                "PdpZ*BxHsZA*arThBiwtkd*XhWSmnyY?SVaqmpWeREG?*hDWVASbyXWLnGWXWnePREzyxpnfHivmvrCQKZrkcgpGvVbln*PylgfsrLZhRtsizYyVIFYfPDvnMqKnamSAK*VrWyE?-GzysNrcRNCzyCTKFmfahsrayrzbCqQyC*BCyIAZNWR?Ta*TpXAlGxT-iVBmcbNLwXQvpcIieqMPXlygWd*-NnqQbvXVahRxqgHsY?baAGnICskRLdiDaymegkhVQyix*zcw?YqFYwMCqwrFLxQKgCWHasii*QixDfMPVLqQlpRvrWZgenfLRCiyPvpfNBZeSvse?XrICDcwZqSwvCrtlY*KEgVCAfzDImQMInhfTAqRpLaZvVXkXgPiwMqqELQKdDrQgElKzVLaywtEGegsa?CCtAcQpPAGRcGlkHQevsntbmWiiXKxgcGkIIbm-LRHyndSM-DwNhXbqPzmnrF [...]
+                "PwSDvnadzFfEby-HyeptwmLPh*cNxmYCkgEqkcCdkzYgeiTgbQsTiRSTBnDEnL?iehcXmA?fBdCwbDwQFpadedBkikzbVQrRmzkGxCCPlpRTCqHdhRDLZQHyXyXvPe-Gx*aVlBavM-ZQW-xNhi*MrZbmwmQRhaqNkZal?vhDwpExyrlCGPlstsTW?ZLNXFSGBH*QnYbwPMIFpzxri*PlnrwlyEviRImww?XsydAxxNdbvzAAGAQBvxTEHh?ygWDLgQarfxrLMLnBXsnIlVbGztTITsRXysfYa*isaRgderEdy*GadxbZydFVbbN-v?qMnvcLKhNWdBLhHIgrrvAQZwSyzTyWpBecACsnBxLdhmggqKhcQiKnIIKlNzE-y*lA-qEblETKFdsdwXcATDaYYdtdWAWvWzaW-PHksD?WWQVPiHVINnwYtQmFeFixeWQdAbBV-TiQfHaYriIDbvnAncCcm?F [...]
+                "DGAQnnZdPkEkapki*SxmF-qXmZpaEmyvPTPTeVMqPkztVnbTQQddyRLMRC?kmSTMyTgaBKsfzSHxwLtPn?NpswBrAQeSXQGqIGMYCPmrGiCbwhcCqmSrZqWlMYvwhavtMBMCaFVyaqFNqgWNrzCAImVbbhySvDxq?rWvbSfhbmEEadrCwzGxPmppNgPl?dVYqeWvSTqBIYqPsqbrrSWqHrmgeIdwEVhXp-XttHAnWtbfFkglYGKnKtMtCMHDtG*eNYCKbnPHM?rygDWAFt*??wbxfNAHyAhAeZXayLEyMStPPVF*BiwsbDntfqZcqsiyETwaSNIreikWXkDIreFEsZihaIPxenvpEgMRlkssxhxPRQhksXvRLXqcSavdC?HvRQBElMVkx-ztKbMvpsGrHzmezeLNE?aFP?nHDvpv*xd*S*meaiXmYcatSDkxmbnbdBQa-pnHaGEsPiAEEviAScrbyBD [...]
+                "sWzDCnCiNyRPesGQMNgGcsHXa?FXnhXIAlEvphYAAmYGDaXwRPBRyRSIQEDrIMQlqXIyexIGaAxTkxwdn*zcTmgmLZ*bElnBBXgsziYLiDNFPBpfAvSlhtSvg?Nwh?YvCkIy?yvnYPmmpygLtVQighxqFzASwxrp-ZDdsRPbtlKEPGAgqHlyYIndLXTltFRmwnhDcyLLgTw*p*NpDfPpszaRfsCvSIGWEZSeyktHbAibRlwLdtDxDkVNdYiPvQW*Qfib*AEemyikEbSbzZtVNYItEsBGXCrtTD*Xpp-IDzMYrXIEidntMQvHryTEsgIIkShqdKBEQsYhVTQFM?zQTwarSL*iftErEg-BDqkPDDgxQmTrQGXAqfwDV*xkhEKBLeSvcstYNdttgpRAbGrCYmFnmyikk?fqtsnbrMDnW*WVpnaKtpiwIQbSwXxxycIZKtz*-hQpQHW-?iRIDEILqevYFtG [...]
+                "RfSkGggaVrATryLDANbsidmkSP-LnYMyAgEWahZ*FkYtvhh?-BnarWGK*KakIIvTkNYyFan*YbvTl?VBXPaxfDFkmyCeGkGBsklsxiCZKmNXtqaczrrZWD?a-V?AslNwEKyzweaeDSMKGSXvracMFagnWAiThDhdWegtdP-skl?ZVRSccnlQtG*--XGFldixQRr-LwCRMEZrpZhGnYtkMEkgBqC-SGVNzsb?y*RWQciLkvvEfwExqziwdFfsCyvepivLEPACMPR*HlxnxprdZtbv-DQ?lCkrAZCXyGH-bZZSPrqkmtvMGPW*EmL-NBTtIEHZeNhHMcP?yGvpakhELfGwlGbsCwdhx*lzSyxPylLvqfFkQXLnGAKNKXcMd?BdmzxSbgxRPEPMiIZlYtsmrHtXyVlSbaBctdcNcwicz-XZeAEkdbepzEYEG-KxptnWYQnLNTpghVItIyqailAF?nEr**- [...]
+                "yEws*nlbes?Px?ISyWBBAyt-drGgmmLyaZEmNZyEcZLXphAlsWQPHLSLGGHqCIyDwQIMvAGfYzvZtVTfpWyhPgwvxPzGXBmI*NyslVZSaTNeZDgmyELMxlcEvzdVEaSAK*Tz?yVYmRMeALEYGsryDVQFFkHMXkapDfYkWXQqQcAEzSStDhnQyLHXyQMwyviAZHRiNWILF*W*XkCrSwiTDx?CQiCpvTfNDvqbHx*pdCiHrbWEAiSQG?BHTfXgcwDeMPYXfw*WwiixTlYAbtTwpwXLXsl?WxlygwliptmyzZdnYmqtiGbVYQlyYhnRdEdvPGBzknZ*xmpzyeCLXXXQLwSlSByk?YNhEKprmskFCmQbmghNk*KesAQcmWskb?LagwsDLPCPWqawHyKvaTBI?HkwMPqlTcP-GXKzkMEFG*-*MBDRvpH?tQpNmkFwctWbiHtg-vMGYHN-fiCYrvXGlheH*iM [...]
+                "ixAgxnctNkzIVpkDkXDVBADeZWfNVEYwVXbyIZqBPkrAQvEKMMqg-wETqDDhqAaLdYZgKLnbxlvcRxwQXMaRfWHkexVsSeGlwGBy-iCdErgFpwGckeSDVFZMLZX*laxggInX?kfYcGq?qlbNrIkMhEmiYirVwXrPHidvBPVLbQbEQClbabs?FITpqP-YyvyaYsZmNCmLIMdhLZfHTS-RIcGbwkpHSEkmtnr?bnmwBdRbveZKyGQhYC?fT*SIxQDBZmdZpHKDYgZXHbdAmtcztw?FvQTcDCzFeZcgNFyNVpqahMPSfz-dGNdIpKLKG?dHPwdMRMXKaGtXyFmCrXZQSBAH*XhMbbKXEymKntLYCqcaqANCWQYPVipvnVZbDMHDeEiMekLKWkcPLMdnhPDIhydCzTVNTP?zreFLcXpMeWpTytaEgpptYpMKwbKHgWZXBt*z-LGvCI*thyweqvfYq-dQdLd [...]
+                "FcfIDnLdQZ-hQw*DypxNNNiYhxpNVyYgCsHPnEyXrTcXDDQyaaVYI*sSRdYfICgly?hwhAgNiFHyPTzYQMYpFGhxqihYXmDbeKLbNirXkENeNEgWFQLZImZXVSlwEaSarK?DdyQY*wM*VhgNlnCSIE-l*BicSryZFYbtpVKxrlDIZFKCtykQXIFpt?zlyHiKwmw?KZNSwMS*Zl?TZkGqErpgVEvqSBTNnKhhtYp*TrDbCg-EAbpYWeM-HsSsikDQyLQMBExCihvIeGEkeeaLYhSbHVsTqwSzZrAgpmySDNaP*hqezMmzeRfDeFZaRRqPtdlkRvTEHWaNF*fWlgfxbNnwhqKkslbWVFV?IYtyzcGxqrYMR*bX*MWiFdLiFyHBfewsMgxNWdgtCFkwZHHexB*nINanE?AFAQ?drSG*xdkkEZsLvpMbYyaTwcsGmKwyMTtwQLgcNf?-GSmXvLbHmTpgBTP [...]
+                "xyVIPHSMEt-V*rCv-rsthbAYEWVNvvWFAtELZZItrPybcFxIHfd?DRHwp*fkGZxiaBDxFAINYl*YlLwPWfbpfcvgLvBfdrT*e-ENYil*mVkvmFMNApfKix-bAebwwRSGXaVF?ReGRrMFARdvlIHcqEmSXE*khKalYgnqIhCyfDNqnZsEsNlEEIPIrZklSCNK-?*mmDDZnfQZpZQCrSLrpP?y-yG*WrNWwRw?-HzxzlYpZRlbfaefeeGYYYkxqvNp*zlFbVqLtzYbZYnABQrQz?kQwh*WGbXDN*sXECEygEBwLrylwsrdGiDHNRdGS-nprNXrLnbLCXmcyiKxQLBQsbSFewyEeKlmNghKVRrZFyFb*lRmXsKA-f*PIpBLzebitNZYLl*KKdQtgHfvpRWI?vtqtNFRmyvtQXnFwbAnIAXRm*ADvVzIfwCKweyCK*YXyeKDLTvXKyzVEiNmK?W-nvLs*?l [...]
+                "hb-t*LxhiqptnHX-eQayxaVsMzyPnmsTxhBMgBNXhIfRzYAwcWQSqHdMWHdkvKYNbLhvvAlfmlsTnzIQ?XipsYhIEcrelCqlhSksMiC-TVftPGgTC*cTAmv*LkRGEaMGbBVAFyfQDsrBA*aqFXPwwYtCCwhSLgCVnppHCWeFsaH-IhlBFhl?lQ--MiCr?V*fkCQmDRCaHKffdZtr-yGRi?etfSmIZI?DgRVlYtIxBbzbZeHYEHaiaCiPTSyfNiDewPp?GcWCRCiGtvABllrgPDbZBDizMKZnsFX-KB?wiZqa-btyTATYBegHVmLVAVPYPvdkLWTQTXtkxIVPGXHFfAIWpCEkFPfhxHyMwDxtTmbxmihGq*WHmfyCniX?EtYSGNfpnLxiPerxcDQhxYDIcCQLbilIyAThPFqCCXeGytXkgnVwIkptddIlw*nYITwWCtMXybHEslNLk?fK**AFqBTQeyi [...]
+                "lQqDMRndBArwrnyDMFhtDlLM?crYtkYmAHHqaXT-RkbNQxhhMtmXyYSxVGLhreqdvwvSVAGznlNKIHcLX-atBZLkhabDXAhxeAMPyGA-ChmqzRgXAAD?YkXZD*WRyaMcwBWPvQVrDiFQcEZgrsPlYyfCFqXsSVPhWxVqpmDyTYBgFXGvZhl-RnTXmXFlQkw?yzVdcvc*KzsZEC*Wf?MYHceXMEXttIGNeZb*vFafqDqLVIVBA-nKe?arTVSCBNIHCPatziIqkzGsIWzEllzvDlxSHfQFvWMNitKXeytsDddPRkq-dw-dteFNexLLggiPaIYCRSbwFht?ZI?cc*?QHISweCykfyLhAyfxsLPKLmYIgbckB*KqcfcDX?tzkXHeIDabBEdFbkyMQcZvyhkrFflGIdMNbcnFBLyXdXqNiFpLmxBsvmxVVNBKzfsxagKCnMWQVprHVmhFCtqaRWCBQ*tpft? [...]
+                "HhPiTngnKhBItWtDfezSLdePWpzaVFyvqEftLMtq?-*AZRwTWCew*RyHRGgAIDCskXgskpnvYyCmlmgbAyPRw*IklvbdXsvdZdasexwBEFnELqNcQEQlZDtMtScSyaSipGqn-RXQD*srtW-NEtnIGEwCBCxakWrIkxbfIIIcKh*sVSMZThwcHnTdyrWyPdlqghdDnbwkkKQ*NZVVyCYI?DEdeEEpeCL-nLdCVW*MWCxRtEAWBXvBcCxsTVllmi-mTIbpxIbDqIix??XPRfbyzGavkHt?NClHibXSSLtylNGMsrVqiAXqQHWr*-CGerqqhvY-mnBgEct?ErRQbSlSq*QihCIKfPCT?gBNVIkbzaIG*mXDpKKAhfcyFXCsg?WDLSgyDfWyWR*yeRhYXxVVYVZZkWWMb?IFtgnNrmFiFGv?PWhevqVpQQDKZkzAMsnyMtL*iTnpyfMqpGq-hHAkeaLlnlE [...]
+                "PkSKIPpeNRheawRmfkRtsBxXhkHXGKYdgFEtmkyX?GeXzzacbYaFytKwngtkvFymkwBpnRycWAtx-Iz?SNPfdwrDDiRDlCfXgmHyH-Y-tKZiTGKTzARlxdbH*XQ*P*fciQZM?mPKKNLLiBZHwKmQMBTIYvWmhIr-LZ?AaQAXMlgfPElDWizAfpypvakeQCbEtRgRcKMWNPqHNeTdTiwl?ZTwflCEFelxTVZLtZRxZF*bKXa?Ag?Zk-tATbyHFenAgwcN*wKbNniCP-n-StrFzgkmQhyPEhrFly-XQiCycZlPFilriILeISChLRHqHFiPx*hGqnAEHmKWMVRCMshQGaYwgnybeEC?embKD-sgXazxgyYpQzppQeKvFlrktW-vTwlNDrYSbpMydlDyp?AVYmPSzDtPigi*tFnfTmAQbI-kSty?MqmtBTeBADKPpkbNwdhyYTaHSHL-LiIScqAK*CmQcts [...]
+                )
+        FixedStateAlphabetCharacterTestChecker.create_class_fixtures(
+                cls,
+                matrix_type=charmatrixmodel.ProteinCharacterMatrix,
+                state_alphabet=charstatemodel.PROTEIN_STATE_ALPHABET,
+                seq_symbols=seq_symbols,
+                labels=labels)
+
+class ContinuousTestChecker(CharacterTestChecker):
+
+    @classmethod
+    def build(cls, labels=None):
+        states_lists = (
+                (-231.6391 ,  +972.4189 ,  +626.6717 ,  -328.6811 ,  -213.5738 ,  +464.3897 ,  -91.3483  ,  +349.8176 ,  +333.4800 ,  +521.4970 ,  -371.4108 ,  -821.4290 ,  -86.9872  ,  -804.4891 ,  +275.3547),
+                (+104.4199 ,  +669.7402 ,  -68.6082  ,  +975.4302 ,  -874.4510 ,  -191.3305 ,  -179.8437 ,  +655.5611 ,  -657.4532 ,  -563.7863 ,  +39.0321  ,  +317.0017 ,  +887.7048 ,  +342.7651 ,  -184.7631),
+                (-613.2947 ,  -600.7053 ,  -700.5140 ,  +438.6092 ,  +615.5268 ,  +640.7933 ,  +503.8948 ,  -159.7922 ,  +866.8036 ,  +274.0275 ,  +462.5738 ,  -506.4329 ,  -445.4251 ,  -343.7987 ,  -285.2830),
+                (-654.7695 ,  +103.3806 ,  -971.8866 ,  +853.9164 ,  +653.5797 ,  +823.6672 ,  -476.6859 ,  +325.9331 ,  +456.0902 ,  -399.7095 ,  -930.6770 ,  +762.7456 ,  +851.4525 ,  +66.4253  ,  +914.3272),
+                (-762.4904 ,  +808.3665 ,  +522.5775 ,  +250.6523 ,  -287.9786 ,  -995.4612 ,  +571.9263 ,  -793.3975 ,  -42.7027  ,  +186.8869 ,  -1.5874   ,  -758.0643 ,  -69.9948  ,  -395.9015 ,  -109.9725),
+                (+399.7389 ,  +31.6152  ,  +372.6323 ,  -573.5724 ,  -505.0045 ,  -375.2316 ,  +454.9046 ,  -217.4422 ,  -434.9173 ,  -454.7752 ,  -597.9571 ,  -47.6864  ,  +326.4957 ,  +545.6246 ,  +437.5032),
+                (+988.9931 ,  -654.4159 ,  -767.1182 ,  -91.8658  ,  +588.7146 ,  +184.9196 ,  +115.9319 ,  -52.5935  ,  -418.1644 ,  +633.3638 ,  +736.8064 ,  -967.5157 ,  -107.9049 ,  +352.9680 ,  -70.6195),
+                (-962.4392 ,  -453.7332 ,  -451.0608 ,  +341.5584 ,  +394.6056 ,  -923.0757 ,  -746.9843 ,  -965.4329 ,  -947.0617 ,  -773.1573 ,  +730.2412 ,  +375.6009 ,  +915.0743 ,  +359.0937 ,  -399.3825),
+                (-296.7620 ,  +410.3270 ,  -350.8748 ,  -780.7118 ,  -175.8489 ,  +309.5648 ,  +423.9918 ,  +969.7167 ,  -244.8730 ,  -373.5084 ,  -604.9683 ,  -897.4527 ,  -534.2310 ,  -281.9905 ,  -869.7215),
+                (+911.3894 ,  -989.9314 ,  +749.8109 ,  +137.4197 ,  -586.6819 ,  -153.6740 ,  -380.2316 ,  +916.4085 ,  -999.7195 ,  -893.5339 ,  -89.0466  ,  -35.9522  ,  -90.6951  ,  +776.4369 ,  -537.6624),
+                (+557.6844 ,  +582.3857 ,  -284.0375 ,  -93.6579  ,  +832.2125 ,  +417.9069 ,  +597.3379 ,  +617.9041 ,  +256.8092 ,  -174.4282 ,  +489.6727 ,  +657.5248 ,  +216.8542 ,  -751.1718 ,  -398.9243),
+                (-520.3224 ,  +76.4691  ,  +440.5944 ,  -55.3567  ,  -53.8396  ,  -936.0467 ,  -609.4023 ,  -120.8625 ,  +679.0885 ,  -647.1835 ,  -864.9598 ,  -470.3704 ,  +153.9019 ,  +844.2103 ,  +567.2804),
+                (-332.0693 ,  +332.9506 ,  -955.3792 ,  -988.5815 ,  -603.7497 ,  +691.1748 ,  -20.4197  ,  +436.6508 ,  +31.5305  ,  -791.6890 ,  -796.3053 ,  +309.7812 ,  -138.6499 ,  +866.3018 ,  -657.6447),
+                (-501.6849 ,  +249.9836 ,  +389.5901 ,  -239.3963 ,  -701.4142 ,  -25.8607  ,  -56.0275  ,  +531.3697 ,  -133.7667 ,  -973.9617 ,  +480.1729 ,  +776.4012 ,  +413.0529 ,  -456.9224 ,  -772.2399),
+                (-708.9703 ,  +374.4682 ,  +688.2557 ,  -818.8122 ,  +111.4564 ,  -770.8261 ,  -838.9334 ,  -483.0598 ,  +335.7136 ,  +650.4290 ,  -957.3401 ,  -773.5307 ,  +539.6006 ,  +321.6839 ,  +366.9738),
+                )
+        CharacterTestChecker.create_class_fixtures(
+                cls,
+                matrix_type=charmatrixmodel.ContinuousCharacterMatrix,
+                states_lists=states_lists,
+                labels=labels)
+
+
diff --git a/dendropy/test/support/standard_file_test_datasets.py b/dendropy/test/support/standard_file_test_datasets.py
new file mode 100644
index 0000000..9b26309
--- /dev/null
+++ b/dendropy/test/support/standard_file_test_datasets.py
@@ -0,0 +1,142 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+import dendropy
+import collections
+from dendropy.test.support import curated_test_tree
+from dendropy.test.support import standard_file_test_chars
+
+class StandardSingleTaxonNamespaceDataSet(curated_test_tree.CuratedTestTree):
+
+    @staticmethod
+    def build(cls):
+        standard_file_test_chars.DnaTestChecker.build()
+        standard_file_test_chars.RnaTestChecker.build()
+        standard_file_test_chars.ProteinTestChecker.build()
+        standard_file_test_chars.Standard01234TestChecker.build()
+
+    def verify_chars(self, ds):
+        self.assertEqual(len(ds.taxon_namespaces), 1)
+        tns = ds.taxon_namespaces[0]
+        checkers = (
+                standard_file_test_chars.RnaTestChecker,
+                standard_file_test_chars.ProteinTestChecker,
+                standard_file_test_chars.Standard01234TestChecker,
+                standard_file_test_chars.DnaTestChecker,
+                )
+        self.assertEqual(len(ds.char_matrices), len(checkers))
+        for idx, (char_matrix, checker) in enumerate(zip(ds.char_matrices, checkers)):
+            self.assertIs(char_matrix.taxon_namespace, tns)
+            if checker.matrix_type is dendropy.StandardCharacterMatrix:
+                checker.create_class_fixtures_label_sequence_map_based_on_state_alphabet(checker,
+                        char_matrix.default_state_alphabet)
+            standard_file_test_chars.general_char_matrix_checker(
+                    self,
+                    char_matrix,
+                    checker,
+                    check_taxon_annotations=self.check_taxon_annotations,
+                    check_matrix_annotations=self.check_matrix_annotations,
+                    check_sequence_annotations=self.check_sequence_annotations,
+                    check_column_annotations=self.check_column_annotations,
+                    check_cell_annotations=self.check_cell_annotations,)
+
+    def verify_trees(self, ds):
+        self.assertEqual(len(ds.taxon_namespaces), 1)
+        tns = ds.taxon_namespaces[0]
+        self.assertEqual(len(ds.tree_lists), 7)
+        for tree_list_idx, tree_list in enumerate(ds.tree_lists):
+            self.assertIs(tree_list.taxon_namespace, tns)
+            expected_labels = (
+                    'the first tree',
+                    'the SECOND tree',
+                    'The Third Tree',
+                    )
+            self.assertEqual(len(tree_list), len(expected_labels))
+            for tree_idx, (tree, expected_label) in enumerate(zip(tree_list, expected_labels)):
+                # print(tree_list_idx, tree_idx)
+                self.assertEqual(tree.label, expected_label)
+                self.verify_curated_tree(
+                        tree=tree,
+                        suppress_internal_node_taxa=False,
+                        suppress_leaf_node_taxa=False,
+                        suppress_edge_lengths=False,
+                        node_taxon_label_map=None)
+
+    def verify_dataset(self, ds):
+        self.verify_chars(ds)
+        self.verify_trees(ds)
+
+class MultipleTaxonNamespaceDataSet(object):
+
+    expected_taxa = collections.OrderedDict()
+    expected_taxa["X1"] = ("x1.1", "x1.2", "x1.3", "x1.4", "x1.5", )
+    expected_taxa["X2"] = ("x2.1", "x2.2", "x2.3", "x2.4", )
+    expected_taxa["X3"] = ("x3.1", "x3.2", "x3.3", "x3.4", "x3.5", "x3.6", )
+    expected_chars = collections.OrderedDict()
+    expected_chars["X1"] = ("x1.chars1", "x1.chars2", "x1.chars3", )
+    expected_chars["X2"] = ("x2.chars1", "x2.chars2")
+    expected_trees = collections.OrderedDict()
+    expected_trees["X1"] = ("x1.trees1", "x1.trees3", "x1.trees3",)
+    expected_trees["X2"] = ("x2.trees1", "x2.trees2")
+
+    def verify_attached_taxon_namespace_written(self, ds, taxon_namespace):
+        self.assertEqual(len(ds.taxon_namespaces), 1)
+        expected_tns_label = taxon_namespace.label
+        # self.assertEqual(ds.taxon_namespaces[0].label, expected_tns_label)
+        expected_tlsts = MultipleTaxonNamespaceDataSet.expected_trees.get(expected_tns_label, [])
+        self.assertEqual(len(ds.char_matrices), len(expected_tlsts))
+        expected_cms = MultipleTaxonNamespaceDataSet.expected_chars.get(expected_tns_label, [])
+        self.assertEqual(len(ds.tree_lists), len(expected_cms))
+
+    def verify_attached_taxon_namespace(self, ds, attached_taxon_namespace):
+        self.assertEqual(len(ds.taxon_namespaces), 1)
+        self.assertIs(ds.taxon_namespaces[0], attached_taxon_namespace)
+        self.assertEqual(len(ds.taxon_namespaces[0]),
+                sum(len(v) for v in MultipleTaxonNamespaceDataSet.expected_taxa.values()))
+        self.assertEqual(len(ds.char_matrices),
+                sum(len(v) for v in MultipleTaxonNamespaceDataSet.expected_chars.values()))
+        self.assertEqual(len(ds.tree_lists),
+                sum(len(v) for v in MultipleTaxonNamespaceDataSet.expected_trees.values()))
+        for block in (ds.tree_lists, ds.char_matrices):
+            for item in block:
+                self.assertIs(item.taxon_namespace, attached_taxon_namespace)
+                if hasattr(item[0], "taxon_namespace"):
+                    for subitem in item:
+                        self.assertIs(subitem.taxon_namespace, attached_taxon_namespace)
+
+    def verify_unrestricted(self, ds):
+        self.assertEqual(len(ds.taxon_namespaces), len(MultipleTaxonNamespaceDataSet.expected_taxa))
+        self.assertEqual(len(ds.char_matrices),
+                sum(len(v) for v in MultipleTaxonNamespaceDataSet.expected_chars.values()))
+        self.assertEqual(len(ds.tree_lists),
+                sum(len(v) for v in MultipleTaxonNamespaceDataSet.expected_trees.values()))
+        for tns, expected_tns_label in zip(ds.taxon_namespaces, MultipleTaxonNamespaceDataSet.expected_taxa):
+            self.assertEqual(tns.label, expected_tns_label)
+            expected_tns = MultipleTaxonNamespaceDataSet.expected_taxa[expected_tns_label]
+            self.assertEqual(len(tns), len(expected_tns))
+            for taxon, expected_label in zip(tns, expected_tns):
+                self.assertEqual(taxon.label, expected_label)
+            cms = [char_matrix for char_matrix in ds.char_matrices if char_matrix.taxon_namespace is tns]
+            expected_cms = MultipleTaxonNamespaceDataSet.expected_chars.get(expected_tns_label, [])
+            self.assertEqual(len(cms), len(expected_cms))
+            tlst = [tree_list for tree_list in ds.tree_lists if tree_list.taxon_namespace is tns]
+            expected_tlsts = MultipleTaxonNamespaceDataSet.expected_trees.get(expected_tns_label, [])
+            self.assertEqual(len(tlst), len(expected_tlsts))
+
+
+
diff --git a/dendropy/test/support/standard_file_test_trees.py b/dendropy/test/support/standard_file_test_trees.py
new file mode 100644
index 0000000..d8d2d9e
--- /dev/null
+++ b/dendropy/test/support/standard_file_test_trees.py
@@ -0,0 +1,487 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+import sys
+import json
+import random
+import dendropy
+import copy
+from dendropy.test.support import pathmap
+if sys.hexversion < 0x03040000:
+    from dendropy.utility.filesys import pre_py34_open as open
+from dendropy.utility.messaging import get_logger
+_LOG = get_logger(__name__)
+
+tree_file_titles = [
+    "dendropy-test-trees-multifurcating-rooted-annotated",
+    "dendropy-test-trees-multifurcating-rooted",
+    "dendropy-test-trees-multifurcating-unrooted",
+    "dendropy-test-trees-n10-rooted-treeshapes",
+    "dendropy-test-trees-n12-x2",
+    "dendropy-test-trees-n14-unrooted-treeshapes",
+    "dendropy-test-trees-n33-unrooted-annotated-x10a",
+    "dendropy-test-trees-n33-unrooted-x10a",
+    "dendropy-test-trees-n33-unrooted-x10b",
+    "dendropy-test-trees-n33-unrooted-x100a",
+]
+
+schema_extension_map = {
+    "nexml" : "nexml",
+    "newick" : "newick",
+    "nexus" : "nexus",
+    "json" : "json",
+    "nexus-metadata-comments" : "nexus-metadata-comments.json",
+}
+
+_TREE_FILEPATHS = {}
+_TREE_REFERENCES = {}
+_NEXUS_METADATA_COMMENTS = {}
+
+def setup_module():
+    for schema in schema_extension_map:
+        _TREE_FILEPATHS[schema] = {}
+        for tree_file_title in tree_file_titles:
+            tf = "{}.{}".format(tree_file_title, schema_extension_map[schema])
+            _TREE_FILEPATHS[schema][tree_file_title] = pathmap.tree_source_path(tf)
+    for tree_file_title in tree_file_titles:
+        with open(_TREE_FILEPATHS["json"][tree_file_title]) as src:
+            _TREE_REFERENCES[tree_file_title] = json.load(src)
+        if "annotated" in tree_file_title:
+            with open(_TREE_FILEPATHS["nexus-metadata-comments"][tree_file_title]) as src:
+                _NEXUS_METADATA_COMMENTS[tree_file_title] = json.load(src)
+setup_module()
+
+class StandardTestTreesChecker(object):
+
+    def preprocess_tree_to_be_checked(self, tree):
+        pass
+
+    def compare_annotations_to_json_metadata_dict(self,
+            item,
+            expected_metadata):
+        item_annotations_as_dict = item.annotations.values_as_dict()
+        if self.__class__.is_coerce_metadata_values_to_string:
+            for k in expected_metadata:
+                v = expected_metadata[k]
+                if isinstance(v, list):
+                    v = [str(i) for i in v]
+                elif isinstance(v, tuple):
+                    v = (str(i) for i in v)
+                else:
+                    v = str(v)
+                expected_metadata[k] = v
+            for k in item_annotations_as_dict:
+                v = item_annotations_as_dict[k]
+                if isinstance(v, list):
+                    v = [str(i) for i in v]
+                elif isinstance(v, tuple):
+                    v = (str(i) for i in v)
+                else:
+                    v = str(v)
+                item_annotations_as_dict[k] = v
+
+        # # for annote in item.annotations:
+        # #     print("{}: {}".format(annote.name, annote.value))
+        # # k1 = sorted(list(item_annotations_as_dict.keys()))
+        # # k2 = sorted(list(expected_metadata.keys()))
+        # # print("--")
+        # # for k in k1:
+        # #     print("'{}':'{}'".format(k, item_annotations_as_dict[k]))
+        # # print("--")
+        # # for k in k2:
+        # #     print("'{}':'{}'".format(k, expected_metadata[k]))
+        # # self.assertEqual(len(k1), len(k2))
+        # # self.assertEqual(set(k1), set(k2))
+        # for key in set(item_annotations_as_dict.keys()):
+        #     if item_annotations_as_dict[key] != expected_metadata[key]:
+        #         v = expected_metadata[key]
+        #         # if isinstance(v, list):
+        #         #     print("{}: {}".format(v, [type(i) for i in v]))
+        #         # elif isinstance(v, tuple):
+        #         #     print("{}: {}".format(v, (type(i) for i in v)))
+        #         # else:
+        #         #     print("{}: {}".format(v, type(v)))
+        #         print("**** {}:\t\t{} ({}) \t\t{} ({})".format(
+        #             key,
+        #             item_annotations_as_dict[key],
+        #             type(item_annotations_as_dict[key]),
+        #             expected_metadata[key],
+        #             type(expected_metadata[key]),
+        #             ))
+
+        self.assertEqual(item_annotations_as_dict, expected_metadata)
+
+    def check_metadata_annotations(self,
+            item,
+            reference):
+        expected_annotations = reference["metadata"]
+        self.compare_annotations_to_json_metadata_dict(item, expected_annotations)
+
+    def check_comments(self, item, reference):
+        reference_comments = list(reference["comments"])
+        item_comments = list(item.comments)
+        self.assertEqualUnorderedSequences(item_comments, reference_comments)
+
+    def compare_to_reference_by_title_and_index(self,
+            tree,
+            tree_file_title,
+            reference_tree_idx):
+        ref_tree = self.tree_references[tree_file_title][str(reference_tree_idx)]
+        self.compare_to_reference_tree(tree, ref_tree)
+
+    def compare_to_reference_tree(self, tree, ref_tree):
+        self.assertIs(tree.is_rooted, ref_tree["is_rooted"])
+        if self.__class__.is_check_comments:
+            self.check_comments(tree, ref_tree)
+        self.check_metadata_annotations(
+                item=tree,
+                reference=ref_tree)
+        obs_taxon_labels = []
+        obs_node_labels = []
+        obs_edge_labels = []
+        visited_nodes = []
+        self.preprocess_tree_to_be_checked(tree)
+        for node_idx, node in enumerate(tree):
+            visited_nodes.append(node)
+            ref_node = ref_tree["nodes"][node.label]
+            ref_node_label = ref_node["label"]
+            ref_node_taxon_label = ref_node["taxon_label"]
+            self.assertEqual(node.label, ref_node_label)
+            ref_node_children = ref_node["children"]
+            self.assertEqual(len(node._child_nodes), len(ref_node_children))
+            if node.taxon:
+                self.assertEqual(node.taxon.label, ref_node_taxon_label)
+                obs_taxon_labels.append(node.taxon.label)
+            else:
+                self.assertEqual(ref_node_taxon_label, None)
+            obs_node_labels.append(node.label)
+            if ref_node_children:
+                self.assertTrue(node.is_internal())
+                self.assertFalse(node.is_leaf())
+            else:
+                self.assertFalse(node.is_internal())
+                self.assertTrue(node.is_leaf())
+
+            if node.parent_node is None:
+                self.assertEqual(ref_node["parent"], None)
+            else:
+                self.assertEqual(node.parent_node.label, ref_node["parent"])
+                if node.parent_node.taxon:
+                    self.assertEqual(node.parent_node.taxon.label, ref_node["parent"])
+            child_labels = [ch.label for ch in node.child_node_iter()]
+            self.assertEqual(len(child_labels), len(ref_node["children"]))
+            self.assertEqual(set(child_labels), set(ref_node["children"]))
+
+            edge = node.edge
+            ref_edge = ref_tree["edges"][edge.label]
+            if edge.tail_node is None:
+                self.assertEqual(ref_edge["tail_node"], None)
+            else:
+                self.assertEqual(edge.tail_node.label, ref_edge["tail_node"])
+            self.assertEqual(edge.head_node.label, ref_edge["head_node"])
+            self.assertAlmostEqual(node.edge.length, float(ref_edge["length"]), 3)
+
+    def compare_to_reference_by_title_and_index2(self,
+            tree,
+            tree_file_title,
+            reference_tree_idx,
+            suppress_internal_node_taxa,
+            suppress_leaf_node_taxa,
+            is_metadata_extracted,
+            is_coerce_metadata_values_to_string,
+            is_distinct_nodes_and_edges_representation,
+            is_taxa_managed_separately_from_tree):
+        ref_tree = self.tree_references[tree_file_title][str(reference_tree_idx)]
+        self.assertIs(tree.is_rooted, ref_tree["is_rooted"])
+        self.check_comments(
+                tree,
+                ref_tree)
+        self.check_metadata_annotations(
+                item=tree,
+                reference=ref_tree,
+                is_coerce_metadata_values_to_string=False)
+        obs_taxa = []
+        obs_node_labels = []
+        obs_edge_labels = []
+        visited_nodes = []
+        for node in tree:
+            if node.taxon is not None:
+                node.canonical_label = node.taxon.label
+            else:
+                node.canonical_label = node.label
+        for node_idx, node in enumerate(tree):
+            visited_nodes.append(node)
+            ref_node = ref_tree["nodes"][node.label]
+            ref_node_label = ref_node["label"]
+            self.assertEqual(node.label, ref_node_label)
+            ref_edge = ref_tree["edges"][node.edge.label]
+            self.assertEqual(node.edge.label, ref_edge["label"])
+            ref_node_children = ref_node["children"]
+            if ref_node_children:
+                self.assertTrue(node.is_internal())
+                self.assertFalse(node.is_leaf())
+                self.assertEqual(len(node._child_nodes), len(ref_node_children))
+                if suppress_internal_node_taxa:
+                    self.assertEqual(node.label, ref_node_label)
+                    self.assertIs(node.taxon, None)
+                    obs_node_labels.append(node.label)
+                else:
+                    if not self.__class__.is_distinct_taxa_and_labels_on_tree:
+                        self.assertIsNot(node.taxon, None)
+                        self.assertEqual(node.taxon.label, ref_node_label)
+                        self.assertIs(ch.label, None)
+                        obs_taxa.append(node.taxon)
+                    else:
+                        obs_taxa.append(node.label)
+                        obs_node_labels.append(node.label)
+            else:
+                self.assertFalse(node.is_internal())
+                self.assertTrue(node.is_leaf())
+                self.assertEqual(len(node._child_nodes), len(ref_node_children))
+                if suppress_leaf_node_taxa:
+                    self.assertEqual(node.label, ref_node_label)
+                    self.assertIs(node.taxon, None)
+                    obs_node_labels.append(node.label)
+                else:
+                    self.assertIsNot(node.taxon, None)
+                    self.assertEqual(node.taxon.label, ref_node_label)
+                    if not self.__class__.is_distinct_taxa_and_labels_on_tree:
+                        self.assertIs(ch.label, None)
+                    obs_taxa.append(node.taxon)
+            if node.parent_node is not None:
+                if node.parent_node.is_internal:
+                    if suppress_internal_node_taxa:
+                        self.assertEqual(node.parent_node.label, ref_node["parent"])
+                        self.assertIs(node.parent_node.taxon, None)
+                    else:
+                        if not self.__class__.is_distinct_taxa_and_labels_on_tree:
+                            self.assertEqual(node.parent_node.taxon.label, ref_node["parent"])
+                            self.assertIs(node.parent_node.label, None)
+                        else:
+                            self.assertEqual(node.parent_node.label, ref_node["parent"])
+                else:
+                    if suppress_leaf_node_taxa:
+                        self.assertEqual(node.parent_node.label, ref_node["parent"])
+                        self.assertIs(node.parent_node.taxon, None)
+                    else:
+                        self.assertEqual(node.parent_node.taxon.label, ref_node["parent"])
+                        self.assertIs(node.parent_node.label, None)
+            else:
+                self.assertEqual(ref_node["parent"], "None")
+
+            child_labels = []
+            for ch in node.child_node_iter():
+                if ch.is_internal():
+                    if suppress_internal_node_taxa:
+                        self.assertIs(ch.taxon, None)
+                        child_labels.append(ch.label)
+                    else:
+                        if not self.__class__.is_distinct_taxa_and_labels_on_tree:
+                            self.assertIsNot(ch.taxon, None)
+                            child_labels.append(ch.taxon.label)
+                            self.assertIs(ch.label, None)
+                        else:
+                            self.assertEqual(node.label, ref_node_label)
+                            child_labels.append(ch.label)
+                else:
+                    if suppress_leaf_node_taxa:
+                        self.assertIs(ch.taxon, None)
+                        child_labels.append(ch.label)
+                    else:
+                        self.assertIsNot(ch.taxon, None)
+                        child_labels.append(ch.taxon.label)
+                        if not self.__class__.is_distinct_taxa_and_labels_on_tree:
+                            self.assertIs(ch.label, None)
+            self.assertEqual(len(child_labels), len(ref_node["children"]))
+            self.assertEqual(set(child_labels), set(ref_node["children"]))
+            edge = node.edge
+            ref_edge = ref_tree["edges"][node.canonical_label]
+            if edge.tail_node is None:
+                self.assertEqual(ref_edge["tail_node"], "None")
+            else:
+                self.assertEqual(edge.tail_node.canonical_label, ref_edge["tail_node"])
+            self.assertEqual(edge.head_node.canonical_label, ref_edge["head_node"])
+            self.assertAlmostEqual(node.edge.length, float(ref_edge["length"]), 3)
+
+            # This hackery because NEWICK/NEXUS cannot distinguish between
+            # node and edge comments, and everything gets lumped in as a
+            # node comment
+            if not is_distinct_nodes_and_edges_representation:
+                node.comments += edge.comments
+                d = {
+                        "comments": ref_node["comments"] + ref_edge["comments"],
+                        "metadata_comments": ref_node["metadata_comments"] + ref_edge["metadata_comments"],
+                        }
+                self.check_comments(node, d, is_metadata_extracted)
+                if is_metadata_extracted:
+                    obs_tuples = []
+                    for o in (node, edge):
+                        for a in o.annotations:
+                            # print("++ {}: {} = {} ({})".format(type(o), a.name, a.value, type(a.value)))
+                            v = a.value
+                            if isinstance(v, list):
+                                v = tuple(v)
+                            obs_tuples.append( (a.name, v) )
+                    exp_tuples = []
+                    for idx, o in enumerate((ref_node["metadata"], ref_edge["metadata"])):
+                        for k in o:
+                            v = o[k]
+                            # print("-- {}{}: {} = {}".format(type(o), idx+1, k, v))
+                            if isinstance(v, list):
+                                if is_coerce_metadata_values_to_string:
+                                    v = tuple(str(vx) for vx in v)
+                                else:
+                                    v = tuple(v)
+                            elif is_coerce_metadata_values_to_string:
+                                v = str(v)
+                            # print("-- {}{}: {} = {} ({})".format(type(o), idx+1, k, v, type(v)))
+                            exp_tuples.append( (k, v) )
+                    self.assertEqualUnorderedSequences(tuple(obs_tuples), tuple(exp_tuples))
+            else:
+                if self.__class__.is_check_comments:
+                    self.check_comments(node, ref_node, is_metadata_extracted)
+                    self.check_comments(edge, ref_edge, is_metadata_extracted)
+                if self.__class__.is_metadata_extracted:
+                    self.check_metadata_annotations(
+                            item=node,
+                            reference=ref_node,
+                            is_coerce_metadata_values_to_string=is_coerce_metadata_values_to_string)
+                    self.check_metadata_annotations(
+                            item=edge,
+                            reference=ref_edge,
+                            is_coerce_metadata_values_to_string=is_coerce_metadata_values_to_string)
+        self.assertEqual(len(visited_nodes), len(ref_tree["nodeset"]))
+        if self.__class__.is_taxa_managed_separately_from_tree:
+            self.assertEqual(len(obs_taxa), len(tree.taxon_namespace))
+            self.assertEqual(set(obs_taxa), set(tree.taxon_namespace))
+            obs_node_labels.extend([t.label for t in tree.taxon_namespace])
+        elif not self.__class__.is_distinct_taxa_and_labels_on_tree:
+            # node labels may have been interpreted as taxa depending on read mode
+            obs_node_labels.extend([t.label for t in tree.taxon_namespace if t.label not in obs_node_labels])
+        self.assertEqual(len(obs_node_labels), len(ref_tree["nodeset"]))
+        self.assertEqual(set(obs_node_labels), set(ref_tree["nodeset"]))
+
+    def verify_standard_trees(self,
+            tree_list,
+            tree_file_title,
+            tree_offset=0):
+        tree_reference = self.tree_references[tree_file_title]
+        expected_number_of_trees = tree_reference["num_trees"]
+        if tree_offset < 0:
+            if abs(tree_offset) > expected_number_of_trees:
+                tree_offset = 0
+            else:
+                tree_offset = expected_number_of_trees + tree_offset
+        self.assertEqual(len(tree_list), expected_number_of_trees-tree_offset)
+        # for tree_idx, (tree, ref_tree) in enumerate(zip(tree_list, tree_directory[tree_file_title])):
+        for tree_idx, tree in enumerate(tree_list):
+            _LOG.debug("{}: {}".format(tree_file_title, tree_idx))
+            self.assertIs(tree.taxon_namespace, tree_list.taxon_namespace)
+            self.compare_to_reference_by_title_and_index(
+                    tree=tree,
+                    tree_file_title=tree_file_title,
+                    reference_tree_idx=tree_idx + tree_offset)
+
+class NewickTestTreesChecker(StandardTestTreesChecker):
+
+    @staticmethod
+    def create_class_fixtures(cls,
+            schema="newick",
+            suppress_internal_node_taxa=True,
+            suppress_leaf_node_taxa=False,
+            is_metadata_extracted=False,
+            is_coerce_metadata_values_to_string=True,
+            is_taxa_managed_separately_from_tree=False,
+            is_check_comments=True):
+        cls.schema = schema
+        cls.schema_tree_filepaths = copy.deepcopy(_TREE_FILEPATHS[cls.schema])
+        cls.tree_references = copy.deepcopy(_TREE_REFERENCES)
+        for tree_file_title in cls.tree_references:
+            for reference_tree_idx in range(cls.tree_references[tree_file_title]["num_trees"]):
+                ref_tree = cls.tree_references[tree_file_title][str(reference_tree_idx)]
+                for ref_node_label in ref_tree["nodes"]:
+                    ref_node = ref_tree["nodes"][ref_node_label]
+                    ref_node_taxon_label = ref_node["taxon_label"]
+                    if ref_node["children"]:
+                        if suppress_internal_node_taxa and ref_node["taxon_label"]:
+                            ref_node["taxon_label"] = None
+                        elif not suppress_internal_node_taxa and ref_node["taxon_label"] is None:
+                            ref_node["taxon_label"] = ref_node["label"]
+                    if not ref_node["children"]:
+                        ref_node["taxon_label"] = None
+                        if suppress_leaf_node_taxa and ref_node["taxon_label"]:
+                            ref_node["taxon_label"] = None
+                        elif not suppress_leaf_node_taxa and ref_node["taxon_label"] is None:
+                            ref_node["taxon_label"] = ref_node["label"]
+                for ref_edge_label in ref_tree["edges"]:
+                    ref_edge = ref_tree["edges"][ref_edge_label]
+                    ref_edge["label"] = "None"
+        cls.suppress_internal_node_taxa = suppress_internal_node_taxa
+        cls.suppress_leaf_node_taxa = suppress_leaf_node_taxa
+        cls.is_metadata_extracted = is_metadata_extracted
+        cls.is_coerce_metadata_values_to_string = is_coerce_metadata_values_to_string
+        cls.is_taxa_managed_separately_from_tree = is_taxa_managed_separately_from_tree
+        cls.is_check_comments = is_check_comments
+
+    def preprocess_tree_to_be_checked(self, tree):
+        for nd in tree:
+            if nd.is_internal():
+                if self.__class__.suppress_internal_node_taxa:
+                    self.assertIs(nd.taxon, None)
+                else:
+                    self.assertIsNot(nd.taxon, None)
+            else:
+                if self.__class__.suppress_leaf_node_taxa:
+                    self.assertIs(nd.taxon, None)
+                else:
+                    self.assertIsNot(nd.taxon, None)
+            if nd.taxon is not None:
+                nd.label = nd.taxon.label
+            nd.edge.label = nd.label
+
+class NexusTestTreesChecker(NewickTestTreesChecker):
+
+    @staticmethod
+    def create_class_fixtures(cls,
+            schema="nexus",
+            suppress_internal_node_taxa=True,
+            suppress_leaf_node_taxa=False,
+            is_metadata_extracted=True,
+            is_coerce_metadata_values_to_string=True,
+            is_taxa_managed_separately_from_tree=True,
+            is_check_comments=True):
+        NewickTestTreesChecker.create_class_fixtures(cls,
+                schema=schema,
+                suppress_internal_node_taxa=suppress_internal_node_taxa,
+                suppress_leaf_node_taxa=suppress_leaf_node_taxa,
+                is_metadata_extracted=is_metadata_extracted,
+                is_coerce_metadata_values_to_string=is_coerce_metadata_values_to_string,
+                is_taxa_managed_separately_from_tree=is_taxa_managed_separately_from_tree,
+                is_check_comments=is_check_comments)
+
+class NexmlTestTreesChecker(StandardTestTreesChecker):
+
+    @staticmethod
+    def create_class_fixtures(cls):
+        cls.schema = "nexml"
+        cls.schema_tree_filepaths = copy.deepcopy(_TREE_FILEPATHS[cls.schema])
+        cls.tree_references = copy.deepcopy(_TREE_REFERENCES)
+        cls.is_coerce_metadata_values_to_string = False
+        cls.is_taxa_managed_separately_from_tree = True
+        cls.is_check_comments = False
+
diff --git a/dendropy/test/test_asciitree.py b/dendropy/test/test_asciitree.py
new file mode 100644
index 0000000..f0f6b2e
--- /dev/null
+++ b/dendropy/test/test_asciitree.py
@@ -0,0 +1,51 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Tests of ascii tree plots.
+"""
+
+import unittest
+from dendropy.test.support import curated_test_tree
+from dendropy.utility.messaging import get_logger
+_LOG = get_logger(__name__)
+
+class AsciiTreeTest(
+        curated_test_tree.CuratedTestTree,
+        unittest.TestCase):
+
+    def setUp(self):
+        tree1, anodes1, lnodes1, inodes1 = self.get_tree(
+                suppress_internal_node_taxa=False,
+                suppress_leaf_node_taxa=False)
+        self.tree = tree1
+
+    def test_plot_by_depth(self):
+        _LOG.debug(self.tree.as_ascii_plot(plot_metric='depth'))
+
+    def test_plot_by_level(self):
+        _LOG.debug(self.tree.as_ascii_plot(plot_metric='level'))
+
+    def test_plot_by_age(self):
+        _LOG.debug(self.tree.as_ascii_plot(plot_metric='age'))
+
+    def test_plot_by_length(self):
+        _LOG.debug(self.tree.as_ascii_plot(plot_metric='length'))
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/dendropy/test/test_birthdeath.py b/dendropy/test/test_birthdeath.py
new file mode 100644
index 0000000..7e32c80
--- /dev/null
+++ b/dendropy/test/test_birthdeath.py
@@ -0,0 +1,92 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Tests of birth-death model fitting.
+"""
+
+import unittest
+import dendropy
+from dendropy.test.support.mockrandom import MockRandom
+from dendropy.test.support import pathmap
+from dendropy.model import birthdeath
+
+class PureBirthModelEstimationTestCase(unittest.TestCase):
+
+    def testBasicEst(self):
+        # list of tuples, (birth-rate, log-likelihood)
+        expected_results = (
+                # birth rate               # log-likelihood
+            (0.02879745490817826186758, -59.41355682054444287132355),
+            (0.03074708092192806122012, -57.38280732060526645454956),
+            (0.02539588437187430269848, -63.31025321526630023072357),
+            (0.02261951969802362960582, -66.89924384677527768872096),
+            (0.02804607815688910446572, -60.23314120509648716961237),
+            (0.02748663302756114423797, -60.85775993426526042640035),
+            (0.02816256618562208019485, -60.10465085978295007862471),
+            (0.03592126646048716259729, -52.56123967307649991198559),
+            (0.02905144990609926855529, -59.14133401672411594063306),
+            (0.02703739196351075124714, -61.36860953277779628933786),
+            (0.01981322730236481297061, -71.00561162515919022553135),
+        )
+        trees = dendropy.TreeList.get_from_path(
+                pathmap.tree_source_path("pythonidae.reference-trees.newick"), "newick")
+        self.assertEqual(len(trees), len(expected_results))
+        for tree, expected_result in zip(trees, expected_results):
+            obs_result1 = birthdeath.fit_pure_birth_model(tree=tree, ultrametricity_precision=1e-5)
+            obs_result2 = birthdeath.fit_pure_birth_model(internal_node_ages=tree.internal_node_ages(ultrametricity_precision=1e-5))
+            for obs_result in (obs_result1, obs_result2):
+                self.assertAlmostEqual(obs_result["birth_rate"], expected_result[0], 5)
+                self.assertAlmostEqual(obs_result["log_likelihood"], expected_result[1], 5)
+
+class BirthDeathTreeTest(unittest.TestCase):
+    def testGSABD(self):
+        """test that the birth-death process produces the correct number of tips with GSA."""
+        _RNG = MockRandom()
+        for num_leaves in range(2, 15):
+            t = birthdeath.birth_death_tree(birth_rate=1.0, death_rate=0.2, ntax=num_leaves, gsa_ntax=3*num_leaves, rng=_RNG)
+            self.assertTrue(t._debug_tree_is_valid())
+            self.assertEqual(num_leaves, len(t.leaf_nodes()))
+
+    def testYule(self):
+        """test that the pure-birth process produces the correct number of tips."""
+        _RNG = MockRandom()
+        for num_leaves in range(2, 20):
+            t = birthdeath.birth_death_tree(birth_rate=1.0, death_rate=0.0, ntax=num_leaves, rng=_RNG)
+            self.assertTrue(t._debug_tree_is_valid())
+            self.assertEqual(num_leaves, len(t.leaf_nodes()))
+
+    def testGSA(self):
+        """test that the pure-birth process produces the correct number of tips with GSA."""
+        _RNG = MockRandom()
+        for num_leaves in range(2, 20):
+            t = birthdeath.birth_death_tree(birth_rate=1.0, death_rate=0.0, ntax=num_leaves, gsa_ntax=4*num_leaves, rng=_RNG)
+            self.assertTrue(t._debug_tree_is_valid())
+            self.assertEqual(num_leaves, len(t.leaf_nodes()))
+
+    def testBDTree(self):
+        """PureCoalescentTreeTest -- tree generation without checking [TODO: checks]"""
+        _RNG = MockRandom()
+        for num_leaves in range(2, 20):
+            t = birthdeath.birth_death_tree(birth_rate=1.0, death_rate=0.2, ntax=num_leaves, rng=_RNG)
+            self.assertTrue(t._debug_tree_is_valid())
+            self.assertEqual(num_leaves, len(t.leaf_nodes()))
+
+if __name__ == "__main__":
+    unittest.main()
+
diff --git a/dendropy/test/test_coalescent.py b/dendropy/test/test_coalescent.py
new file mode 100644
index 0000000..f89b0ee
--- /dev/null
+++ b/dendropy/test/test_coalescent.py
@@ -0,0 +1,76 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Tests of birth-death model fitting.
+"""
+
+import unittest
+import dendropy
+from dendropy.test.support.mockrandom import MockRandom
+from dendropy.test.support import pathmap
+from dendropy.model import coalescent
+from dendropy.simulate import popgensim
+
+class TruncatedCoalescentTreeTest(unittest.TestCase):
+
+    def get_species_tree(self, ntax=10):
+        _RNG = MockRandom()
+        ages = [_RNG.randint(1000,10000) for age in range(ntax)]
+        ages.sort()
+        pop_sizes = [_RNG.randint(1000,10000) for pop in range(2*ntax+1)]
+        taxon_namespace = dendropy.TaxonNamespace(["t{}".format(i+1) for i in range(ntax)])
+        species_tree = popgensim.pop_gen_tree(taxon_namespace=taxon_namespace,
+                                                 ages=ages,
+                                                 num_genes=4,
+                                                 pop_sizes=pop_sizes,
+                                                 rng=_RNG)
+        ages2 = []
+        for node in species_tree.postorder_node_iter():
+            distance_from_tip = node.distance_from_tip()
+            if distance_from_tip > 0:
+                ages2.append(distance_from_tip)
+        ages2.sort()
+        for index in range(len(ages2)):
+            assert (ages[index] - ages2[index]) < 10e-6
+
+        pop_sizes2 = []
+        for edge in species_tree.postorder_edge_iter():
+            pop_sizes2.append(edge.pop_size)
+        pop_sizes2.sort()
+
+        return species_tree
+
+    def runTest(self, ntax=10):
+        """TruncatedCoalescentTreeTest -- tree generation without checking [TODO: checks]"""
+        species_tree = self.get_species_tree(ntax)
+        gene_trees = []
+        while len(gene_trees) < 20:
+            gene_trees.append(coalescent.constrained_kingman_tree(species_tree)[0])
+
+class PureCoalescentTreeTest(unittest.TestCase):
+
+    def runTest(self):
+        """PureCoalescentTreeTest -- tree generation without checking [TODO: checks]"""
+        _RNG = MockRandom()
+        tns = dendropy.TaxonNamespace(["t{}".format(i+1) for i in range(100)])
+        t = coalescent.pure_kingman_tree(tns, rng=_RNG)
+        assert t._debug_tree_is_valid()
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/dendropy/test/test_container_frozen_ordered_dict.py b/dendropy/test/test_container_frozen_ordered_dict.py
new file mode 100644
index 0000000..b18bbca
--- /dev/null
+++ b/dendropy/test/test_container_frozen_ordered_dict.py
@@ -0,0 +1,87 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+ReadonlyOrdedr tests.
+"""
+
+import copy
+import collections
+import unittest
+from dendropy.utility import container
+
+class FrozenOrderedDictTest(unittest.TestCase):
+
+    def setUp(self):
+        self.src = collections.OrderedDict()
+        self.src["a"] = 1
+        self.src["b"] = 2
+        self.src["c"] = 3
+        self.d = container.FrozenOrderedDict(self.src)
+
+    def test_basic_readonly_iteration_and_access(self):
+        for key1, key2 in zip(self.src, self.d):
+            self.assertEqual(key1, key2)
+            key = key1
+            self.assertTrue(key in self.d)
+            self.assertEqual(self.d[key], self.src[key])
+
+    def test_keys(self):
+        k1 = list(self.src.keys())
+        k2 = list(self.d.keys())
+        self.assertEqual(k1, k2)
+
+    def test_values(self):
+        v1 = list(self.src.values())
+        v2 = list(self.d.values())
+        self.assertEqual(v1, v2)
+
+    def test_readonly(self):
+        k = list(self.d.keys())[0]
+        with self.assertRaises(container.FrozenOrderedDict.ImmutableTypeError):
+            self.d[k] = 1
+        with self.assertRaises(container.FrozenOrderedDict.ImmutableTypeError):
+            del self.d[k]
+        with self.assertRaises(container.FrozenOrderedDict.ImmutableTypeError):
+            self.d.pop(k)
+        with self.assertRaises(container.FrozenOrderedDict.ImmutableTypeError):
+            self.d.clear()
+        with self.assertRaises(container.FrozenOrderedDict.ImmutableTypeError):
+            self.d.update({})
+        with self.assertRaises(container.FrozenOrderedDict.ImmutableTypeError):
+            self.d.fromkeys([1,2,3])
+
+    def test_deepcopy(self):
+        d2 = copy.deepcopy(self.d)
+        self.assertIsNot(d2, self.d)
+        for key1, key2 in zip(d2, self.d):
+            self.assertEqual(key1, key2)
+            key = key1
+            self.assertTrue(key in self.d)
+            self.assertEqual(self.d[key], d2[key])
+
+    def test_copy(self):
+        d2 = copy.copy(self.d)
+        self.assertEqual(d2, self.d)
+
+    def test_copy_construction(self):
+        d2 = collections.OrderedDict(self.d)
+        self.assertEqual(d2, self.d)
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/dendropy/test/test_container_normalized_bitmask_dict.py b/dendropy/test/test_container_normalized_bitmask_dict.py
new file mode 100644
index 0000000..b0c8bbc
--- /dev/null
+++ b/dendropy/test/test_container_normalized_bitmask_dict.py
@@ -0,0 +1,58 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+NormalizedBitmaskDict tests.
+"""
+
+import unittest
+from dendropy.utility import container
+
+class TestNormalizedBitmaskDict(unittest.TestCase):
+
+    def runTest(self):
+        """Testing NormalizedBitmaskDict"""
+        fill_bitmask = 0xFF # 1111 1111
+        splits = [
+            ((0x03, '0000 0011'), (0x03, '0000 0011')),
+            ((0x34, '0011 0100'), (0xCB, '1100 1011')),
+            ((0x44, '0100 0100'), (0xBB, '1011 1011')),
+            ((0x12, '0001 0010'), (0xED, '1110 1101')),
+            ((0x75, '0111 0101'), (0x75, '0111 0101')),
+            ]
+        d = container.NormalizedBitmaskDict(fill_bitmask=fill_bitmask)
+        for s in splits:
+            d[s[0][0]] = s[0][1]
+
+        for s in splits:
+            self.assertIn(s[0][0], d)
+            self.assertIn(s[1][0], d)
+            self.assertEqual(d[s[0][0]], d[s[1][0]])
+
+        for k, v in d.items():
+            pass
+
+        del d[splits[0][0][0]]
+        del d[splits[1][1][0]]
+        self.assertNotIn(splits[0][0][0], d)
+        self.assertNotIn(splits[0][1][0], d)
+        self.assertNotIn(splits[1][0][0], d)
+        self.assertNotIn(splits[1][1][0], d)
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/dendropy/test/test_container_ordered_set.py b/dendropy/test/test_container_ordered_set.py
new file mode 100644
index 0000000..a07eb7d
--- /dev/null
+++ b/dendropy/test/test_container_ordered_set.py
@@ -0,0 +1,189 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+OrderedSet tests.
+"""
+
+import random
+import copy
+import unittest
+from dendropy.utility import container
+
+class A(object):
+    def __hash__(self):
+        return id(self)
+    def __eq__(self, other):
+        return self is other
+
+class B(object):
+    def __init__(self, v):
+        self.data = [v * random.randint(-1000, 100) for i in range(10)]
+    def __hash__(self):
+        return id(self)
+    def __eq__(self, other):
+        return self.data == other.data
+
+class TestOrderedSetIdentity(unittest.TestCase):
+
+    def setUp(self):
+        self.t1 = container.OrderedSet(["a", "b"])
+        self.t2 = container.OrderedSet(["a", "b"])
+
+    def test_hash_dict_membership(self):
+        k = {}
+        k[self.t1] = 1
+        k[self.t2] = 2
+        self.assertEqual(len(k), 2)
+        self.assertEqual(k[self.t1], 1)
+        self.assertEqual(k[self.t2], 2)
+        self.assertIn(self.t1, k)
+        self.assertIn(self.t2, k)
+        del k[self.t1]
+        self.assertNotIn(self.t1, k)
+        self.assertIn(self.t2, k)
+        self.assertEqual(len(k), 1)
+        k1 = {self.t1: 1}
+        k2 = {self.t2: 1}
+        self.assertIn(self.t1, k1)
+        self.assertIn(self.t2, k2)
+        self.assertNotIn(self.t2, k1)
+        self.assertNotIn(self.t1, k2)
+
+    def test_hash_set_membership(self):
+        k = set()
+        k.add(self.t1)
+        k.add(self.t2)
+        self.assertEqual(len(k), 2)
+        self.assertIn(self.t1, k)
+        self.assertIn(self.t2, k)
+        k.discard(self.t1)
+        self.assertNotIn(self.t1, k)
+        self.assertIn(self.t2, k)
+        self.assertEqual(len(k), 1)
+        k1 = {self.t1: 1}
+        k2 = {self.t2: 1}
+        self.assertIn(self.t1, k1)
+        self.assertIn(self.t2, k2)
+        self.assertNotIn(self.t2, k1)
+        self.assertNotIn(self.t1, k2)
+
+class TestOrderedSetCollectionsManagement(unittest.TestCase):
+
+    def test_basic_adding(self):
+        items = [42, 3.14, "hello", object(), A(), frozenset([1,2,3]), A()]
+        ordered_set = container.OrderedSet()
+        for item in items:
+            ordered_set.add(item)
+        for x in range(100):
+            ordered_set.add( random.choice(items) )
+        result = [item for item in ordered_set]
+        self.assertEqual(result, items)
+        self.assertEqual(len(result), len(items))
+
+    def test_constructor(self):
+        items = [42, 3.14, "hello", object(), A(), frozenset([1,2,3]), A()]
+        ordered_set = container.OrderedSet(items)
+        for x in range(100):
+            ordered_set.add( random.choice(items) )
+        result = [item for item in ordered_set]
+        self.assertEqual(result, items)
+        self.assertEqual(len(result), len(items))
+
+    def test_basic_getting(self):
+        items = [42, 3.14, "hello", object(), A(), frozenset([1,2,3]), A()]
+        ordered_set = container.OrderedSet()
+        for item in items:
+            ordered_set.add(item)
+        for idx in range(len(items)):
+            self.assertIs(ordered_set[idx], items[idx])
+
+    def test_del(self):
+        items = [42, 3.14, "hello", object(), A(), frozenset([1,2,3]), A()]
+        for x in range(len(items)):
+            ordered_set = container.OrderedSet(items)
+            zz = list(items)
+            while len(zz):
+                i = random.randint(0, len(zz)-1)
+                self.assertIs(ordered_set[i], zz[i])
+                del ordered_set[i]
+                del zz[i]
+                result = [item for item in ordered_set]
+                self.assertEqual(result, zz)
+                self.assertEqual(len(result), len(zz))
+
+    def test_discard(self):
+        items = [42, 3.14, "hello", object(), A(), frozenset([1,2,3]), A()]
+        for x in range(len(items)):
+            ordered_set = container.OrderedSet(items)
+            zz = list(items)
+            while len(zz):
+                k = random.choice(zz)
+                ordered_set.discard(k)
+                zz.remove(k)
+                result = [item for item in ordered_set]
+                self.assertEqual(result, zz)
+                self.assertEqual(len(result), len(zz))
+
+    def test_pop_back(self):
+        items = [42, 3.14, "hello", object(), A(), frozenset([1,2,3]), A()]
+        ordered_set = container.OrderedSet(items)
+        while ordered_set:
+            item1 = ordered_set.pop()
+            item2 = items.pop(-1)
+            self.assertIs(item1, item2)
+            self.assertEqual(len(ordered_set), len(items))
+            result = [item for item in ordered_set]
+            self.assertEqual(result, items)
+        self.assertEqual(len(ordered_set), 0)
+
+    def test_pop_front(self):
+        items = [42, 3.14, "hello", object(), A(), frozenset([1,2,3]), A()]
+        ordered_set = container.OrderedSet(items)
+        while ordered_set:
+            item1 = ordered_set.pop(False)
+            item2 = items.pop(0)
+            self.assertIs(item1, item2)
+            self.assertEqual(len(ordered_set), len(items))
+            result = [item for item in ordered_set]
+            self.assertEqual(result, items)
+        self.assertEqual(len(ordered_set), 0)
+
+class TestOrderedSetDeepCopy(unittest.TestCase):
+
+    def setUp(self):
+        self.s1 = container.OrderedSet([0,1,2,3,"a","b","c",])
+        items = [B(1), B(0), B(2), frozenset([1,2,3]), B(3),]
+        self.c1 = container.OrderedSet(items)
+
+    def test_simple_element_deepcopy(self):
+        s2 = copy.deepcopy(self.s1)
+        self.assertEqual(s2, self.s1)
+
+    def test_deepcopy(self):
+        c2 = copy.deepcopy(self.c1)
+        self.assertEqual(len(c2), len(self.c1))
+        for item2, item1 in zip(c2, self.c1):
+            self.assertIsNot(item2, item1)
+            self.assertEqual(item2, item1)
+        for k in self.c1.__dict__:
+            self.assertIn(k, c2.__dict__)
+            self.assertIsNot(c2.__dict__[k], self.c1.__dict__[k])
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/dendropy/test/test_continuous.py b/dendropy/test/test_continuous.py
new file mode 100644
index 0000000..04fd39a
--- /dev/null
+++ b/dendropy/test/test_continuous.py
@@ -0,0 +1,294 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Continuous character tests.
+"""
+
+import unittest
+import inspect
+import dendropy
+from dendropy.test.support import pathmap
+from dendropy.test.support.mockrandom import MockRandom
+from dendropy.test.support import dendropytest
+from dendropy.model import continuous
+
+class BounceConstrainTest(unittest.TestCase):
+
+    def runTest(self):
+        output = [continuous._bounce_constrain(0.5, .5 - 0.05*i, 0.0, 1.0) for i in range(100)]
+        ref = [(0.5, 0.5), (0.45, 0.47499999999999998), (0.4, 0.45), (0.34999999999999998, 0.42499999999999999), (0.29999999999999999, 0.4), (0.25, 0.375), (0.19999999999999996, 0.34999999999999998), (0.14999999999999997, 0.32499999999999996), (0.099999999999999978, 0.29999999999999999), (0.049999999999999989, 0.275), (0.0, 0.25), (0.05, 0.22954545454545455), (0.1, 0.21666666666666665), (0.15, 0.20961538461538459), (0.2, 0.20714285714285713), (0.25, 0.20833333333333331), (0.3, 0.2125), ( [...]
+        assert_mat_approx_equal(output, ref, tester=self)
+
+class KTBRatesTest(unittest.TestCase):
+
+    def runTest(self):
+        expected = [0.01, 0.00238612042332357, 0.00060046473606155989, 0.0032194690573412836, 0.0022181051458164411, 0.00017490090971284891, 0.022600788551531675, 0.014617722366202806, 0.00040617762499905189, 4.0911649525733303e-05, 0.51000000000000001, 0.12395537322536185, 0.41634777277597329, 0.018010492402013432, 0.077781889354748265, 0.039201420226780297, 0.0018067559608145154, 0.038064428026927236, 0.000468854154046173, 0.00027068933811426338, 1.01, 0.86242720407665974, 0.0628934616 [...]
+
+        rng = MockRandom()
+        y = [continuous._calc_KTB_rate(0.01+.5*i, 1, j, rng) for i in range(10) for j in range(10)]
+        assert_vec_approx_equal(y, expected, tester=self)
+
+        rng = MockRandom()
+        y = [continuous._calc_KTB_rate(0.01+.5*i, j, 1, rng) for i in range(10) for j in range(10)]
+        assert_vec_approx_equal(y, expected, tester=self)
+
+        self.assertRaises(ValueError, continuous._calc_KTB_rate, 0, 1 , 1 , rng)
+
+class TKPRatesTest(unittest.TestCase):
+
+    def runTest(self):
+        expected = [0.01, 0.0039340474963855641, 0.001632232380666595, 0.014428659286578023, 0.016389703355764434, 0.0021307292762983254, 0.45394897294494657, 0.48407248276349046, 0.022176546909804414, 0.0036827493270788596, 0.51000000000000001, 0.20436786045422725, 1.1317505850563239, 0.080717426949510368, 0.57473474392305157, 0.47757106516375003, 0.036289663562129378, 1.2605207376608101, 0.025598569446275875, 0.02436667769068299, 1.01, 1.4219020757916292, 0.17096215398172529, 1.0457715 [...]
+
+        rng = MockRandom()
+        y = [continuous._calc_TKP_rate(0.01+.5*i, 1, j, rng) for i in range(10) for j in range(10)]
+        assert_vec_approx_equal(y, expected, tester=self)
+
+        rng = MockRandom()
+        y = [continuous._calc_TKP_rate(0.01+.5*i, j, 1, rng) for i in range(10) for j in range(10)]
+        assert_vec_approx_equal(y, expected, tester=self)
+
+        try:
+            continuous._calc_TKP_rate(0, 1, 1, rng)
+        except ValueError:
+            pass
+        except OverflowError:
+            pass
+
+class KTBEvolveCrop(unittest.TestCase):
+
+    def runTest(self):
+        rng = MockRandom()
+        newick = "((t5:1611.75,t6:1611.75):3922.93,((t4:1043.81,(t2:754.11,t1:754.11):2896.9):6584.0,t3:1702.21):3832.47);"
+        tree = dendropy.Tree.get_from_string(newick, "newick")
+        root = tree.seed_node
+        root.mutation_rate = 1e-5
+        root.mean_edge_rate = root.mutation_rate
+        continuous.evolve_continuous_char(root, rng, roeotroe=0.01,
+                            min_rate=1.0e-6, max_rate=1.0e-3, model='KTB',
+                            time_attr='edge_length', val_attr='mutation_rate',
+                            mean_val_attr='mean_edge_rate',
+                            constrain_rate_mode="crop")
+        for i in tree.preorder_node_iter():
+            if i.edge_length is not None:
+                i.edge_length *= i.mean_edge_rate
+
+class KTBEvolveLinearBounce(unittest.TestCase):
+
+    def runTest(self):
+        rng = MockRandom()
+        newick = "((t5:1611.75,t6:1611.75):3922.93,((t4:1043.81,(t2:754.11,t1:754.11):2896.9):6584.0,t3:1702.21):3832.47);"
+        tree = dendropy.Tree.get_from_string(newick, "newick")
+        root = tree.seed_node
+        root.mutation_rate = 1e-5
+        root.mean_edge_rate = root.mutation_rate
+        continuous.evolve_continuous_char(root, rng, roeotroe=0.01,
+                            min_rate=1.0e-6, max_rate=1.0e-3, model='KTB',
+                            time_attr='edge_length', val_attr='mutation_rate',
+                            mean_val_attr='mean_edge_rate',
+                            constrain_rate_mode="linear_bounce")
+        for i in tree.preorder_node_iter():
+            if i.edge_length is not None:
+                i.edge_length *= i.mean_edge_rate
+
+class BifurcatingTreePICTest(dendropytest.ExtendedTestCase):
+
+    def setUp(self):
+        tree_str = "[&R] ((((Homo:0.21,Pongo:0.21)N1:0.28,Macaca:0.49)N2:0.13,Ateles:0.62)N3:0.38,Galago:1.00)N4:0.0;"
+        data_str = """
+    #NEXUS
+    BEGIN DATA;
+        DIMENSIONS  NTAX=5 NCHAR=2;
+        FORMAT DATATYPE = CONTINUOUS GAP = - MISSING = ?;
+        MATRIX
+            Homo      4.09434   4.74493
+            Pongo     3.61092   3.33220
+            Macaca    2.37024   3.36730
+            Ateles    2.02815   2.89037
+            Galago   -1.46968   2.30259
+        ;
+    END;
+    """
+        taxa = dendropy.TaxonNamespace()
+        self.tree = dendropy.Tree.get_from_string(tree_str, 'newick', taxon_namespace=taxa)
+        self.char_matrix = dendropy.ContinuousCharacterMatrix.get_from_string(data_str,
+                'nexus',
+                taxon_namespace=taxa)
+        self.pic = continuous.PhylogeneticIndependentConstrasts(tree=self.tree,
+                char_matrix=self.char_matrix)
+        self.expected_vals = []
+        self.expected_vals.append({
+            # state, corrected edge length, contrast, contrast_var
+            "N1": (3.852630000, 0.385000000, 0.483420000, 0.420000000),
+            "N2": (3.200378400, 0.345600000, 1.482390000, 0.875000000),
+            "N3": (2.780823579, 0.601905551, 1.172228400, 0.965600000),
+            "N4": (1.183724613, 0.375743470, 4.250503579, 1.601905551),
+            })
+        self.expected_vals.append({
+            # state, corrected edge length, contrast, contrast_var
+            "N1": (4.038565000, 0.385000000, 1.412730000, 0.420000000),
+            "N2": (3.743208400, 0.345600000, 0.671265000, 0.875000000),
+            "N3": (3.437967150, 0.601905551, 0.852838400, 0.965600000),
+            "N4": (3.011356599, 0.375743470, 1.135377150, 1.601905551),
+            })
+
+    def testTreeValues(self):
+        for cidx in range(self.char_matrix.vector_size):
+            ctree = self.pic.contrasts_tree(character_index=cidx,
+                    annotate_pic_statistics=True,
+                    state_values_as_node_labels=False,
+                    corrected_edge_lengths=False)
+            for nd in ctree.postorder_internal_node_iter():
+                vals = (nd.pic_state_value,
+                        nd.pic_corrected_edge_length,
+                        nd.pic_contrast_raw,
+                        nd.pic_contrast_variance)
+                exp_vals = self.expected_vals[cidx][nd.label]
+                for vidx, val in enumerate(vals):
+                    self.assertAlmostEqual(vals[vidx], exp_vals[vidx])
+
+class MultifurcatingTreePICTest(dendropytest.ExtendedTestCase):
+
+    def setUp(self):
+        tree_str = "[&R] ((((Homo:0.21,Bogus1:0.23,Pongo:0.21)N1:0.28,Bogus2:0.49,Macaca:0.49)N2:0.13,Bogus3:0.62,Ateles:0.62)N3:0.38,Galago:1.00)N4:0.0;"
+        data_str = """
+    #NEXUS
+    BEGIN DATA;
+        DIMENSIONS  NTAX=8 NCHAR=2;
+        FORMAT DATATYPE = CONTINUOUS GAP = - MISSING = ?;
+        MATRIX
+            Homo      4.09434   4.74493
+            Pongo     3.61092   3.33220
+            Macaca    2.37024   3.36730
+            Ateles    2.02815   2.89037
+            Galago   -1.46968   2.30259
+            Bogus1    2.15      2.15
+            Bogus2    2.15      2.15
+            Bogus3    2.15      2.15
+        ;
+    END;
+    """
+        taxa = dendropy.TaxonNamespace()
+        self.tree = dendropy.Tree.get_from_string(tree_str, 'newick', taxon_namespace=taxa)
+        self.char_matrix = dendropy.ContinuousCharacterMatrix.get_from_string(data_str,
+                'nexus',
+                taxon_namespace=taxa)
+
+    def testErrorOnDefault(self):
+        pic = continuous.PhylogeneticIndependentConstrasts(tree=self.tree,
+                char_matrix=self.char_matrix)
+        self.assertRaises(ValueError, pic.contrasts_tree, 1)
+
+    def testError(self):
+        pic = continuous.PhylogeneticIndependentConstrasts(tree=self.tree,
+                char_matrix=self.char_matrix,
+                polytomy_strategy="error")
+        self.assertRaises(ValueError, pic.contrasts_tree, 1)
+
+    def testIgnore(self):
+        pic = continuous.PhylogeneticIndependentConstrasts(tree=self.tree,
+                char_matrix=self.char_matrix,
+                polytomy_strategy="Ignore")
+        ctree = pic.contrasts_tree(1)
+
+    def testResolve(self):
+        pic = continuous.PhylogeneticIndependentConstrasts(tree=self.tree,
+                char_matrix=self.char_matrix,
+                polytomy_strategy="Resolve")
+        ctree = pic.contrasts_tree(1)
+
+def approx_equal(x, y, tol=1e-5):
+    "Returns True if x and y differ by less than tol"
+    return (abs(x - y) < tol)
+
+def vec_approx_equal(x, y, tol=1e-5):
+    """Returns True if each element in the iterable ``x`` differs by less than
+    ``tol`` from the corresponding element in ``y``
+    """
+    if len(x) != len(y):
+        return False
+    for i, j in zip(x, y):
+        if abs(i - j) >= tol:
+            return False
+    return True
+
+def mat_approx_equal(x, y, tol=1e-5):
+    """Returns True if each cell in 2D iterable matrix ``x`` differs by less than
+    ``tol`` from the corresponding element in ``y``
+    """
+    if len(x) != len(y):
+        return False
+    for row_x, row_y in zip(x, y):
+        if len(row_x) != len(row_y):
+            return False
+        for i, j in zip(row_x, row_y):
+            if abs(i - j) >= tol:
+                return False
+    return True
+
+def _failure(tester, msg):
+    calling_frame = inspect.currentframe().f_back.f_back
+    co = calling_frame.f_code
+    emsg = "%s\nCalled from file %s, line %d, in %s" % (msg, co.co_filename, calling_frame.f_lineno, co.co_name)
+    if isinstance(tester, unittest.TestCase):
+        tester.assertTrue(False, emsg)
+    else:
+        raise AssertionError(emsg)
+
+def assert_approx_equal(x, y, tester=None, tol=1e-5):
+    """Asserts that x and y are approximately equal.
+
+    If ``tester`` is a unittest.TestCase then assertTrue is used; otherwise
+    AssertionErrors are raised.
+    """
+    if abs(x - y) >= tol:
+        _failure(tester, "%f != %f" % (x, y))
+
+def assert_vec_approx_equal(x, y, tester=None, tol=1e-5):
+    """Returns True if each element in the iterable ``x`` differs by less than
+    ``tol`` from the corresponding element in ``y``
+
+    If ``tester`` is a unittest.TestCase then assertTrue is used; otherwise
+    AssertionErrors are raised.
+    """
+    if len(x) != len(y):
+        _failure(tester, "vectors of numbers differ in length (%d vs %d)" % (len(x), len(y)))
+    for n, itup in enumerate(zip(x, y)):
+        i, j = itup
+        if abs(i - j) >= tol:
+            _failure(tester, "%f != %f at element %d" % (i, j, n))
+
+def assert_mat_approx_equal(x, y, tester=None, tol=1e-5):
+    """Returns True if each cell in 2D iterable matrix ``x`` differs by less than
+    ``tol`` from the corresponding element in ``y``
+    If ``tester`` is a unittest.TestCase then assertTrue is used; otherwise
+    AssertionErrors are raised.
+    """
+    if len(x) != len(y):
+        _failure(tester, "Matrices differs in length (%d vs %d)" % (len(x), len(y)))
+    for n, row_tup in enumerate(zip(x, y)):
+        row_x, row_y = row_tup
+        if len(row_x) != len(row_y):
+            _failure(tester, "row %d of matrix differs in length (%d vs %d)" % (n, len(row_x), len(row_y)))
+        for col, cell_tup in enumerate(zip(row_x, row_y)):
+            i, j = cell_tup
+            if abs(i - j) >= tol:
+                _failure(tester, "%f != %f for column %d of row %d" % (i, j, col, n))
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/dendropy/test/test_dataio_basic_tree.py b/dendropy/test/test_dataio_basic_tree.py
new file mode 100644
index 0000000..c339c87
--- /dev/null
+++ b/dendropy/test/test_dataio_basic_tree.py
@@ -0,0 +1,39 @@
+
+from dendropy.test.support import mockreader
+from dendropy import dataio
+import dendropy
+
+import unittest
+
+class DendropyDataIOTestMockTreeReader(mockreader.MockReader):
+
+    def __init__(self):
+        mockreader.MockReader.__init__(self)
+
+    def process_read_call(self):
+        tns = self.taxon_namespace_factory(label="test1")
+        tree_list = self.tree_list_factory(label="test1", taxon_namespace=tns)
+        tree = tree_list.new_tree()
+        product = self.Product(
+                taxon_namespaces=[tns],
+                tree_lists=[tree_list],
+                char_matrices=None)
+        return product
+
+class MockTestTreeTypeDerivedFromDendropyTree(dendropy.Tree):
+    pass
+
+
+class TestCustomTreeType(unittest.TestCase):
+
+    @classmethod
+    def setUpClass(cls):
+        dataio.register_reader("dendropy_test_mock_tree_reader", DendropyDataIOTestMockTreeReader)
+
+    def test_get_from(self):
+        tree = MockTestTreeTypeDerivedFromDendropyTree.get_from_string("", "dendropy_test_mock_tree_reader")
+        self.assertEqual(type(tree), MockTestTreeTypeDerivedFromDendropyTree)
+
+if __name__ == "__main__":
+    unittest.main()
+
diff --git a/dendropy/test/test_dataio_fasta_reader.py b/dendropy/test/test_dataio_fasta_reader.py
new file mode 100644
index 0000000..a8cfd57
--- /dev/null
+++ b/dendropy/test/test_dataio_fasta_reader.py
@@ -0,0 +1,93 @@
+# !/usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Tests for general FASTA reading.
+"""
+
+import unittest
+import dendropy
+from dendropy.test.support import dendropytest
+from dendropy.test.support import pathmap
+from dendropy.test.support import standard_file_test_chars
+
+class FastaDnaReaderTestCase(
+        standard_file_test_chars.DnaTestChecker,
+        dendropytest.ExtendedTestCase):
+
+    @classmethod
+    def setUpClass(cls):
+        cls.build()
+
+    def test_basic_fasta(self):
+        src_path = pathmap.char_source_path("standard-test-chars-dna.fasta")
+        self.verify_get_from(
+                matrix_type=dendropy.DnaCharacterMatrix,
+                src_filepath=src_path,
+                schema="fasta",
+                factory_kwargs={},
+                check_taxon_annotations=False,
+                check_matrix_annotations=False,
+                check_sequence_annotations=False,
+                check_column_annotations=False,
+                check_cell_annotations=False)
+
+class FastaRnaReaderTestCase(
+        standard_file_test_chars.RnaTestChecker,
+        dendropytest.ExtendedTestCase):
+
+    @classmethod
+    def setUpClass(cls):
+        cls.build()
+
+    def test_basic_fasta(self):
+        src_path = pathmap.char_source_path("standard-test-chars-rna.fasta")
+        self.verify_get_from(
+                matrix_type=dendropy.RnaCharacterMatrix,
+                src_filepath=src_path,
+                schema="fasta",
+                factory_kwargs={},
+                check_taxon_annotations=False,
+                check_matrix_annotations=False,
+                check_sequence_annotations=False,
+                check_column_annotations=False,
+                check_cell_annotations=False)
+
+class FastaProteinReaderTestCase(
+        standard_file_test_chars.ProteinTestChecker,
+        dendropytest.ExtendedTestCase):
+
+    @classmethod
+    def setUpClass(cls):
+        cls.build()
+
+    def test_basic_fasta(self):
+        src_path = pathmap.char_source_path("standard-test-chars-protein.fasta")
+        self.verify_get_from(
+                matrix_type=dendropy.ProteinCharacterMatrix,
+                src_filepath=src_path,
+                schema="fasta",
+                factory_kwargs={},
+                check_taxon_annotations=False,
+                check_matrix_annotations=False,
+                check_sequence_annotations=False,
+                check_column_annotations=False,
+                check_cell_annotations=False)
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/dendropy/test/test_dataio_fasta_writer.py b/dendropy/test/test_dataio_fasta_writer.py
new file mode 100644
index 0000000..9792bfd
--- /dev/null
+++ b/dendropy/test/test_dataio_fasta_writer.py
@@ -0,0 +1,79 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Tests for FASTA characters writing.
+"""
+
+import unittest
+import dendropy
+from dendropy.test.support import dendropytest
+from dendropy.test.support import compare_and_validate
+from dendropy.test.support import pathmap
+from dendropy.test.support import standard_file_test_chars
+
+class FastaWriterCharactersTestCase(
+        compare_and_validate.ValidateWriteable,
+        dendropytest.ExtendedTestCase):
+
+    @classmethod
+    def setUpClass(cls):
+        cls.check_taxon_annotations = False
+        cls.check_matrix_annotations = False
+        cls.check_sequence_annotations = False
+        cls.check_column_annotations = False
+        cls.check_cell_annotations = False
+        standard_file_test_chars.DnaTestChecker.build()
+        standard_file_test_chars.RnaTestChecker.build()
+        standard_file_test_chars.ProteinTestChecker.build()
+        standard_file_test_chars.Standard01234TestChecker.build()
+        standard_file_test_chars.ContinuousTestChecker.build()
+        cls.srcs = (
+                ("standard-test-chars-dna.fasta", dendropy.DnaCharacterMatrix, standard_file_test_chars.DnaTestChecker),
+                ("standard-test-chars-rna.fasta", dendropy.RnaCharacterMatrix, standard_file_test_chars.RnaTestChecker),
+                ("standard-test-chars-protein.fasta", dendropy.ProteinCharacterMatrix, standard_file_test_chars.ProteinTestChecker),
+                )
+
+    def verify_char_matrix(self, char_matrix, src_matrix_checker_type):
+        self.assertEqual(type(char_matrix), src_matrix_checker_type.matrix_type)
+        if src_matrix_checker_type.matrix_type is dendropy.StandardCharacterMatrix:
+            src_matrix_checker_type.create_class_fixtures_label_sequence_map_based_on_state_alphabet(src_matrix_checker_type,
+                    char_matrix.default_state_alphabet)
+        standard_file_test_chars.general_char_matrix_checker(
+                self,
+                char_matrix,
+                src_matrix_checker_type,
+                check_taxon_annotations=self.check_taxon_annotations,
+                check_matrix_annotations=self.check_matrix_annotations,
+                check_sequence_annotations=self.check_sequence_annotations,
+                check_column_annotations=self.check_column_annotations,
+                check_cell_annotations=self.check_cell_annotations,)
+
+    def test_basic_fasta_chars(self):
+        for src_filename, matrix_type, src_matrix_checker_type in self.__class__.srcs:
+            src_path = pathmap.char_source_path(src_filename)
+            d1 = matrix_type.get_from_path(src_path, "fasta")
+            for wrap in (True, False):
+                s = self.write_out_validate_equal_and_return(
+                        d1, "fasta", {"wrap": wrap})
+                d2 = matrix_type.get_from_string(s, "fasta")
+                self.verify_char_matrix(d2, src_matrix_checker_type)
+
+if __name__ == "__main__":
+    unittest.main()
+
diff --git a/dendropy/test/test_dataio_newick_reader_rooting.py b/dendropy/test/test_dataio_newick_reader_rooting.py
new file mode 100644
index 0000000..6aedbd0
--- /dev/null
+++ b/dendropy/test/test_dataio_newick_reader_rooting.py
@@ -0,0 +1,176 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Tests for rooting intepretation.
+"""
+
+import unittest
+import warnings
+from dendropy.dataio import newickreader
+
+class RootingInterpreterLegacySupportTestCase(unittest.TestCase):
+    legacy_kw = (
+            ("as_unrooted", True,  "force-unrooted"),
+            ("as_unrooted", False, "force-rooted"),
+            ("as_rooted",   True,  "force-rooted"),
+            ("as_rooted",   False, "force-unrooted"),
+            ("default_as_unrooted", True,  "default-unrooted"),
+            ("default_as_unrooted", False, "default-rooted"),
+            ("default_as_rooted",   True,  "default-rooted"),
+            ("default_as_rooted",   False, "default-unrooted"),
+    )
+
+    def test_multiple_keyword_error(self):
+        with self.assertRaises(ValueError):
+            nr = newickreader.NewickReader(
+                    as_rooted=True,
+                    as_unrooted=True)
+
+    def test_legacy_keywords(self):
+        for kwset in self.legacy_kw:
+            with warnings.catch_warnings(record=True) as w:
+                # Cause all warnings to always be triggered.
+                warnings.simplefilter("always")
+                # Trigger a warning.
+                kwargs = {kwset[0]: kwset[1]}
+                nr = newickreader.NewickReader(**kwargs)
+                self.assertEqual(nr.rooting, kwset[2])
+
+class RootingInterpreterForceUnrootedTestCase(unittest.TestCase):
+
+    def test_with_no_token(self):
+        nr = newickreader.NewickReader(rooting="force-unrooted")
+        self.assertIs(nr._parse_tree_rooting_state(""), False)
+
+    def test_with_no_token2(self):
+        nr = newickreader.NewickReader(rooting="force-unrooted")
+        self.assertIs(nr._parse_tree_rooting_state(), False)
+
+    def test_with_rooted_token_lcase(self):
+        nr = newickreader.NewickReader(rooting="force-unrooted")
+        self.assertIs(nr._parse_tree_rooting_state("&r"), False)
+
+    def test_with_rooted_token_ucase(self):
+        nr = newickreader.NewickReader(rooting="force-unrooted")
+        self.assertIs(nr._parse_tree_rooting_state("&R"), False)
+
+    def test_with_unrooted_token_ucase(self):
+        nr = newickreader.NewickReader(rooting="force-unrooted")
+        self.assertIs(nr._parse_tree_rooting_state("&u"), False)
+
+    def test_with_unrooted_token_ucase(self):
+        nr = newickreader.NewickReader(rooting="force-unrooted")
+        self.assertIs(nr._parse_tree_rooting_state("&U"), False)
+
+    def test_with_meaningless_token_ucase(self):
+        nr = newickreader.NewickReader(rooting="force-unrooted")
+        self.assertIs(nr._parse_tree_rooting_state("zz"), False)
+
+class RootingInterpreterForceRootedTestCase(unittest.TestCase):
+
+    def test_with_no_token(self):
+        nr = newickreader.NewickReader(rooting="force-rooted")
+        self.assertIs(nr._parse_tree_rooting_state(""), True)
+
+    def test_with_no_token2(self):
+        nr = newickreader.NewickReader(rooting="force-rooted")
+        self.assertIs(nr._parse_tree_rooting_state(), True)
+
+    def test_with_rooted_token_lcase(self):
+        nr = newickreader.NewickReader(rooting="force-rooted")
+        self.assertIs(nr._parse_tree_rooting_state("&r"), True)
+
+    def test_with_rooted_token_ucase(self):
+        nr = newickreader.NewickReader(rooting="force-rooted")
+        self.assertIs(nr._parse_tree_rooting_state("&R"), True)
+
+    def test_with_unrooted_token_ucase(self):
+        nr = newickreader.NewickReader(rooting="force-rooted")
+        self.assertIs(nr._parse_tree_rooting_state("&u"), True)
+
+    def test_with_unrooted_token_ucase(self):
+        nr = newickreader.NewickReader(rooting="force-rooted")
+        self.assertIs(nr._parse_tree_rooting_state("&U"), True)
+
+    def test_with_meaningless_token_ucase(self):
+        nr = newickreader.NewickReader(rooting="force-rooted")
+        self.assertIs(nr._parse_tree_rooting_state("zz"), True)
+
+class RootingInterpreterDefaultUnrootedTestCase(unittest.TestCase):
+
+    def test_with_no_token(self):
+        nr = newickreader.NewickReader(rooting="default-unrooted")
+        self.assertIs(nr._parse_tree_rooting_state(""), False)
+
+    def test_with_no_token2(self):
+        nr = newickreader.NewickReader(rooting="default-unrooted")
+        self.assertIs(nr._parse_tree_rooting_state(), False)
+
+    def test_with_rooted_token_lcase(self):
+        nr = newickreader.NewickReader(rooting="default-unrooted")
+        self.assertIs(nr._parse_tree_rooting_state("&r"), True)
+
+    def test_with_rooted_token_ucase(self):
+        nr = newickreader.NewickReader(rooting="default-unrooted")
+        self.assertIs(nr._parse_tree_rooting_state("&R"), True)
+
+    def test_with_unrooted_token_ucase(self):
+        nr = newickreader.NewickReader(rooting="default-unrooted")
+        self.assertIs(nr._parse_tree_rooting_state("&u"), False)
+
+    def test_with_unrooted_token_ucase(self):
+        nr = newickreader.NewickReader(rooting="default-unrooted")
+        self.assertIs(nr._parse_tree_rooting_state("&U"), False)
+
+    def test_with_meaningless_token_ucase(self):
+        nr = newickreader.NewickReader(rooting="default-unrooted")
+        self.assertIs(nr._parse_tree_rooting_state("zz"), False)
+
+class RootingInterpreterDefaultRootedTestCase(unittest.TestCase):
+
+    def test_with_no_token(self):
+        nr = newickreader.NewickReader(rooting="default-rooted")
+        self.assertIs(nr._parse_tree_rooting_state(""), True)
+
+    def test_with_no_token2(self):
+        nr = newickreader.NewickReader(rooting="default-rooted")
+        self.assertIs(nr._parse_tree_rooting_state(), True)
+
+    def test_with_rooted_token_lcase(self):
+        nr = newickreader.NewickReader(rooting="default-rooted")
+        self.assertIs(nr._parse_tree_rooting_state("&r"), True)
+
+    def test_with_rooted_token_ucase(self):
+        nr = newickreader.NewickReader(rooting="default-rooted")
+        self.assertIs(nr._parse_tree_rooting_state("&R"), True)
+
+    def test_with_unrooted_token_ucase(self):
+        nr = newickreader.NewickReader(rooting="default-rooted")
+        self.assertIs(nr._parse_tree_rooting_state("&u"), False)
+
+    def test_with_unrooted_token_ucase(self):
+        nr = newickreader.NewickReader(rooting="default-rooted")
+        self.assertIs(nr._parse_tree_rooting_state("&U"), False)
+
+    def test_with_meaningless_token_ucase(self):
+        nr = newickreader.NewickReader(rooting="default-rooted")
+        self.assertIs(nr._parse_tree_rooting_state("zz"), True)
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/dendropy/test/test_dataio_newick_reader_tree.py b/dendropy/test/test_dataio_newick_reader_tree.py
new file mode 100644
index 0000000..d577536
--- /dev/null
+++ b/dendropy/test/test_dataio_newick_reader_tree.py
@@ -0,0 +1,934 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Tests for general NEWICK reading.
+"""
+
+import sys
+import os
+import unittest
+import itertools
+import collections
+import random
+import dendropy
+from dendropy.utility import error
+from dendropy.dataio import newickreader
+from dendropy.test.support import dendropytest
+from dendropy.test.support import compare_and_validate
+from dendropy.test.support import standard_file_test_trees
+from dendropy.test.support import curated_test_tree
+from dendropy.test.support import pathmap
+from dendropy.utility.messaging import get_logger
+_LOG = get_logger(__name__)
+if sys.hexversion < 0x03040000:
+    from dendropy.utility.filesys import pre_py34_open as open
+
+class NewickTreeReaderBasic(
+        curated_test_tree.CuratedTestTree,
+        dendropytest.ExtendedTestCase):
+
+    def test_basic_parsing(self):
+        tree_string = self.get_newick_string()
+        reader_kwargs = {}
+        with pathmap.SandboxedFile() as tempf:
+            tempf.write(tree_string)
+            tempf.flush()
+            tree_filepath = tempf.name
+            for suppress_internal_node_taxa in (None, False, True):
+                if suppress_internal_node_taxa is None:
+                    expected_suppress_internal_node_taxa = True
+                    reader_kwargs.pop("suppress_internal_node_taxa", None)
+                else:
+                    expected_suppress_internal_node_taxa = suppress_internal_node_taxa
+                    reader_kwargs["suppress_internal_node_taxa"] = suppress_internal_node_taxa
+                for suppress_leaf_node_taxa in (None, False, True):
+                    if suppress_leaf_node_taxa is None:
+                        expected_suppress_leaf_node_taxa = False
+                        reader_kwargs.pop("suppress_leaf_node_taxa", None)
+                    else:
+                        expected_suppress_leaf_node_taxa = suppress_leaf_node_taxa
+                        reader_kwargs["suppress_leaf_node_taxa"] = suppress_leaf_node_taxa
+                    for suppress_edge_lengths in (None, False, True):
+                        if suppress_edge_lengths is None:
+                            expected_suppress_edge_lengths = False
+                            reader_kwargs.pop("suppress_edge_lengths", None)
+                        else:
+                            expected_suppress_edge_lengths = suppress_edge_lengths
+                            reader_kwargs["suppress_edge_lengths"] = suppress_edge_lengths
+                        with open(tree_filepath, "r") as tree_stream:
+                            approaches = (
+                                    {"path": tree_filepath},
+                                    {"file": tree_stream},
+                                    {"data": tree_string},
+                                    )
+                            for approach_kwargs in approaches:
+                                approach_kwargs.update(reader_kwargs)
+                                approach_kwargs["schema"] = "newick"
+                                t = dendropy.Tree.get(**approach_kwargs)
+                                self.verify_curated_tree(t,
+                                        suppress_internal_node_taxa=expected_suppress_internal_node_taxa,
+                                        suppress_leaf_node_taxa=expected_suppress_leaf_node_taxa,
+                                        suppress_edge_lengths=expected_suppress_edge_lengths)
+                            # approaches = (
+                            #         (dendropy.Tree.get_from_path, tree_filepath),
+                            #         (dendropy.Tree.get_from_stream, tree_stream),
+                            #         (dendropy.Tree.get_from_string, tree_string),
+                            #         )
+                            # for method, src in approaches:
+                            #     t = method(src,
+                            #             "newick",
+                            #             **reader_kwargs
+                            #             )
+                            #     self.verify_curated_tree(t,
+                            #             suppress_internal_node_taxa=expected_suppress_internal_node_taxa,
+                            #             suppress_leaf_node_taxa=expected_suppress_leaf_node_taxa,
+                            #             suppress_edge_lengths=expected_suppress_edge_lengths)
+                        # with open(tree_filepath, "r") as tree_stream:
+                        #     approaches = (
+                        #             ("read_from_path", tree_filepath),
+                        #             ("read_from_stream", tree_stream),
+                        #             ("read_from_string", tree_string),
+                        #             )
+                        #     for method, src in approaches:
+                        #         t = dendropy.Tree.get_from_string("(zzz1,(zzz2,(zzz3,zzz4)));",
+                        #                 "newick",
+                        #                 suppress_internal_node_taxa=False,
+                        #                 suppress_leaf_node_taxa=False,)
+                        #         tns0 = t.taxon_namespace
+                        #         self.assertIs(t.taxon_namespace, tns0)
+                        #         f = getattr(t, method)
+                        #         self.assertIs(t.taxon_namespace, tns0)
+                        #         f(src, "newick", **reader_kwargs)
+                        #         self.verify_curated_tree(t,
+                        #                 suppress_internal_node_taxa=expected_suppress_internal_node_taxa,
+                        #                 suppress_leaf_node_taxa=expected_suppress_leaf_node_taxa,
+                        #                 suppress_edge_lengths=expected_suppress_edge_lengths)
+
+    def test_rooting_weighting_and_tree_metadata_handling(self):
+        rooting_tokens = ("", "[&R]", "[&U]", "[&r]", "[&u]", "[&0]", "[&invalid]", "[R]", "[U]", "[&]")
+        rooting_interpretations = ("force-rooted", "force-unrooted", "default-rooted", "default-unrooted", None)
+        for rooting_token in rooting_tokens:
+            for rooting_interpretation in rooting_interpretations:
+                if rooting_interpretation == "force-rooted":
+                    expected_is_rooted = True
+                    expected_is_unrooted = False
+                    expected_is_rootedness_undefined = False
+                elif rooting_interpretation == "force-unrooted":
+                    expected_is_rooted = False
+                    expected_is_unrooted = True
+                    expected_is_rootedness_undefined = False
+                elif rooting_interpretation == "default-rooted":
+                    if rooting_token.upper() == "[&R]":
+                        expected_is_rooted = True
+                        expected_is_unrooted = False
+                        expected_is_rootedness_undefined = False
+                    elif rooting_token.upper() == "[&U]":
+                        expected_is_rooted = False
+                        expected_is_unrooted = True
+                        expected_is_rootedness_undefined = False
+                    else:
+                        expected_is_rooted = True
+                        expected_is_unrooted = False
+                        expected_is_rootedness_undefined = False
+                elif rooting_interpretation == "default-unrooted":
+                    if rooting_token.upper() == "[&R]":
+                        expected_is_rooted = True
+                        expected_is_unrooted = False
+                        expected_is_rootedness_undefined = False
+                    elif rooting_token.upper() == "[&U]":
+                        expected_is_rooted = False
+                        expected_is_unrooted = True
+                        expected_is_rootedness_undefined = False
+                    else:
+                        expected_is_rooted = False
+                        expected_is_unrooted = True
+                        expected_is_rootedness_undefined = False
+                elif rooting_interpretation is None:
+                    if rooting_token.upper() == "[&R]":
+                        expected_is_rooted = True
+                        expected_is_unrooted = False
+                        expected_is_rootedness_undefined = False
+                    elif rooting_token.upper() == "[&U]":
+                        expected_is_rooted = False
+                        expected_is_unrooted = True
+                        expected_is_rootedness_undefined = False
+                    else:
+                        expected_is_rooted = None
+                        expected_is_unrooted = None
+                        expected_is_rootedness_undefined = True
+                else:
+                    raise Exception("Unexpected rooting interpretation: '{}'".format(rooting_interpretation))
+                weighting_tokens = ("", "[&w 0.25]", "[ &W 0.25]", "[&w 1/4]", "[&W 1/4]")
+                for weighting_token in weighting_tokens:
+                    for store_tree_weights in (False, True):
+                        for extract_comment_metadata in (False, True):
+                            # pre_tree_token_candidates = ["[&color=blue]", weighting_token, rooting_token]
+                            # for token_combination in itertools.permutations(pre_tree_token_candidates):
+                            token_combination = ["[&color=blue]", "[&Why=42]", weighting_token, "[&rate=0.15]", rooting_token, "[&lnL=-3.14]"]
+                            token_str = "".join(token_combination)
+                            _LOG.debug("Token = '{}', Rooting interpretation = '{}'".format(token_str, rooting_interpretation))
+                            s = self.get_newick_string(tree_preamble_tokens=token_str)
+                            _LOG.debug(s)
+                            t = dendropy.Tree.get(
+                                    data=s,
+                                    schema="newick",
+                                    rooting=rooting_interpretation,
+                                    store_tree_weights=store_tree_weights,
+                                    extract_comment_metadata=extract_comment_metadata)
+                            self.assertIs(t.is_rooted, expected_is_rooted)
+                            self.assertIs(t.is_unrooted, expected_is_unrooted)
+                            self.assertIs(t.is_rootedness_undefined, expected_is_rootedness_undefined)
+                            if store_tree_weights:
+                                if weighting_token:
+                                    self.assertEqual(t.weight, 0.25)
+                                else:
+                                    self.assertEqual(t.weight, 1.0)
+                            else:
+                                self.assertIs(t.weight, None)
+                            if extract_comment_metadata:
+                                self.assertEqual(t.annotations.get_value("color", None), "blue")
+                                self.assertEqual(t.annotations.get_value("Why", None), "42")
+
+class NewickTreeMultifurcatingtree(dendropytest.ExtendedTestCase):
+
+    def test_multifurcating(self):
+        s = """\
+                ([p]([a]a:1[a],[b]b:2[b],[c]c:3[c],[s]([d]d:4[d],
+         [e]e:5[e],[f]f:6[f],[g]g:7[g])[s]s:19[s])[p]p:16[p],[w]
+         ([t]t:20[t],[u]u:21[u],[v]v:22[v])[w]w:23[w],[q]
+         ([h]h:8[h],[i]i:9[i],[j]j:10[j],[k]k:11[k],[o]([l]l:12[l],[m]m:13[m],[n]n:14[n])[o]o:15[o])[q]q:17[q])[r]r:18[r][r];
+        """
+        tree = dendropy.Tree.get(data=s,
+                schema="newick",
+                suppress_internal_node_taxa=True,
+                suppress_leaf_node_taxa=True)
+        expected_children = {
+            'a': [],
+            'b': [],
+            'c': [],
+            'd': [],
+            'e': [],
+            'f': [],
+            'g': [],
+            'h': [],
+            'i': [],
+            'j': [],
+            'k': [],
+            'l': [],
+            'm': [],
+            'n': [],
+            'o': ['l','m','n'],
+            'p': ['s', 'a', 'b', 'c'],
+            'q': ['o', 'h', 'i', 'j','k'],
+            'r': ['q','p', 'w'],
+            's': ['d', 'e', 'f', 'g'],
+            't': [],
+            'u': [],
+            'v': [],
+            'w': ['t', 'u', 'v'],
+        }
+        expected_parent = {
+            'a': 'p',
+            'b': 'p',
+            'c': 'p',
+            'd': 's',
+            'e': 's',
+            'f': 's',
+            'g': 's',
+            'h': 'q',
+            'i': 'q',
+            'j': 'q',
+            'k': 'q',
+            'l': 'o',
+            'm': 'o',
+            'n': 'o',
+            'o': 'q',
+            'p': 'r',
+            'q': 'r',
+            'r': None,
+            's': 'p',
+            't': 'w',
+            'u': 'w',
+            'v': 'w',
+            'w': 'r',
+        }
+        for nd in tree:
+            children = [ch.label for ch in nd.child_node_iter()]
+            self.assertCountEqual(children, expected_children[nd.label])
+            if nd.parent_node is not None:
+                self.assertEqual(nd.parent_node.label, expected_parent[nd.label])
+            else:
+                self.assertIs(expected_parent[nd.label], None)
+            if nd.is_leaf():
+                self.assertEqual(len(nd.comments), 2)
+            else:
+                self.assertEqual(len(nd.comments), 3)
+            for comment in nd.comments:
+                self.assertEqual(comment, nd.label)
+            self.assertEqual(nd.edge.length, ord(nd.label) - ord('a') + 1)
+
+class NewickTreeInvalidStatements(dendropytest.ExtendedTestCase):
+
+    def test_invalid_trees(self):
+        invalid_tree_statements = (
+            "(a,(b,c))a",
+            "(a,(b,c)) (b,(a,c))",
+            "(a,(b,c)) (d,(e,f))",
+            "(a,(b,c)),",
+            "(a,(b,c)z1)z2,",
+            "(a,(b,c)))",
+            "(a,(b,c)):",
+            "(a,(b,c))(",
+            "(e,(c,(d,e)a)b;(b,(a,e)c)d;",
+            )
+        for s in invalid_tree_statements:
+            # t = dendropy.Tree.get_from_string(s, "newick")
+            with self.assertRaises(error.DataParseError):
+                t = dendropy.Tree.get(data=s, schema="newick")
+
+class NewickTreeDuplicateTaxa(
+        curated_test_tree.CuratedTestTree,
+        dendropytest.ExtendedTestCase):
+
+    def test_duplicate_taxa1(self):
+        tree_statements = (
+            "((a,b)c,(b,c)a)d;",
+            "((_,_)_,(_,_)_)_;",
+        )
+        expected_labels = (
+            ("a","b","c","b","c","a","d"),
+            (" "," "," "," "," "," "," "),
+        )
+        for sidx, s in enumerate(tree_statements):
+            with self.assertRaises(newickreader.NewickReader.NewickReaderDuplicateTaxonError):
+                tree = dendropy.Tree.get(data=s, schema="newick")
+            tree = dendropy.Tree.get(data=s,
+                    schema="newick",
+                    suppress_internal_node_taxa=True,
+                    suppress_leaf_node_taxa=True)
+            labels = [nd.label for nd in tree]
+            self.assertCountEqual(labels, expected_labels[sidx])
+
+class NewickTreeAnonymousTaxa(dendropytest.ExtendedTestCase):
+
+    def test_anonymous_taxa_no_error(self):
+        s = "((,),(,(,(,))));"
+        tree = dendropy.Tree.get(data=s,
+                schema="newick")
+
+    def test_anonymous_taxa(self):
+        s = "((:1[a],:2[b])[c]:3,(:4[d],([e]:5,([f]:6,:7[g]):8[h])[i]:9)[j]:10):11[k];"
+        tree = dendropy.Tree.get(
+                data=s,
+                schema="newick")
+        self.assertEqual(len(tree.taxon_namespace), 0)
+        anodes = [nd for nd in tree]
+        leaves = [nd for nd in tree.leaf_node_iter()]
+        internal = [nd for nd in tree.postorder_internal_node_iter()]
+        self.assertEqual(len(anodes), 11)
+        self.assertEqual(len(leaves), 6)
+        self.assertEqual(len(internal), 5)
+        leaf_labels = [nd.comments[0] for nd in leaves]
+        internal_labels = [nd.comments[0] for nd in internal]
+        self.assertCountEqual(leaf_labels, ('a','b','d','e','f','g'))
+        self.assertCountEqual(internal_labels, ('c','h','i','j','k'))
+        for nd in tree:
+            x = nd.comments[0]
+            k = ord(x) - ord('a') + 1
+            self.assertEqual(nd.edge.length, k)
+
+class NewickTreeUnsupportedKeywordArguments(
+        curated_test_tree.CuratedTestTree,
+        dendropytest.ExtendedTestCase):
+
+    def test_unsupported_keyword_arguments(self):
+        tree_filepath = pathmap.tree_source_path('dendropy-test-trees-n12-x2.newick')
+        tree_string = self.get_newick_string()
+        reader_kwargs = {
+                "suppress_internal_taxa": True,  # should be suppress_internal_node_taxa
+                "gobbledegook": False,
+        }
+        with open(tree_filepath, "r") as tree_stream:
+            approaches = (
+                    (dendropy.Tree.get_from_path, tree_filepath),
+                    (dendropy.Tree.get_from_stream, tree_stream),
+                    (dendropy.Tree.get_from_string, tree_string),
+            )
+            for method, src in approaches:
+                with self.assertRaises(TypeError):
+                    t = method(src, "newick", **reader_kwargs)
+        # with open(tree_filepath, "r") as tree_stream:
+        #     approaches = (
+        #             ("read_from_path", tree_filepath),
+        #             ("read_from_stream", tree_stream),
+        #             ("read_from_string", tree_string),
+        #     )
+        #     for method, src in approaches:
+        #         t = dendropy.Tree()
+        #         tns0 = t.taxon_namespace
+        #         self.assertIs(t.taxon_namespace, tns0)
+        #         f = getattr(t, method)
+        #         self.assertIs(t.taxon_namespace, tns0)
+        #         with self.assertRaises(TypeError):
+        #             f(src, "newick", **reader_kwargs)
+
+
+class NewickTreeQuotedLabels(dendropytest.ExtendedTestCase):
+
+    def test_edge_lengths1(self):
+        tree = dendropy.Tree.get_from_string(
+                """
+                ((('T1 = 1.242e-10':1.242e-10,
+                'T2 is 213.31e-4':213.31e-4)i1:3.44e-3,
+                ('T3 is a (nice) taxon':3.3e7,
+                T4:4.4e+8)'this is an internal node called "i2"':4.0e+1)i3:4.0E-4,
+                (T5:6.7E+2,
+                'and this so-called ''node'' is ("T6" with a length of ''7.2E-9'')':7.2E-9)i4:4.0E8)'this is the ''root\'\'\':7.0;
+                """,
+                "newick",
+                suppress_internal_node_taxa=True,
+                suppress_leaf_node_taxa=True,
+                )
+        expected_edge_lens = {
+            'T1 = 1.242e-10': 1.242e-10,
+            'T2 is 213.31e-4': 213.31e-4,
+            'i1': 3.44e-3,
+            'T3 is a (nice) taxon': 3.3e7,
+            'T4': 4.4e+8,
+            'this is an internal node called "i2"': 4.0e+1,
+            'i3': 4.0e-4,
+            'T5': 6.7e+2,
+            "and this so-called 'node' is (\"T6\" with a length of '7.2E-9')": 7.2e-9,
+            'i4': 4.0e8,
+            "this is the 'root'": 7.0,
+        }
+        for nd in tree.postorder_node_iter():
+            self.assertAlmostEqual(nd.edge.length, expected_edge_lens[nd.label])
+
+
+class CommentReadingTests(dendropytest.ExtendedTestCase):
+
+    def test_simple_post_node_comments(self):
+        s = "((A[A]:1,B[B]:1)AB[AB]:1,(C[C]:1,D[D]:1)CD[CD]:1)Root[Root]:1;"
+        _LOG.info("Tree = %s" % s)
+        tree = dendropy.Tree.get_from_string(
+                s,
+                "newick",
+                suppress_internal_node_taxa=True,
+                suppress_leaf_node_taxa=True,
+                )
+        for nd in tree:
+            _LOG.info("%s: %s" % (nd.label, nd.comments))
+            self.assertEqual(len(nd.comments), 1)
+            self.assertEqual(nd.comments[0], nd.label)
+            self.assertEqual(nd.edge.length, 1)
+
+    def test_simple_post_edge_length_comments(self):
+        s = "((A:1[A],B:1[B])AB:1[AB],(C:1[C],D:1[D])CD:1[CD])Root:1[Root];"
+        _LOG.info("Tree = %s" % s)
+        tree = dendropy.Tree.get_from_string(
+                s,
+                "newick",
+                suppress_internal_node_taxa=True,
+                suppress_leaf_node_taxa=True,
+                )
+        for nd in tree:
+            _LOG.info("%s: %s" % (nd.label, nd.comments))
+            self.assertEqual(len(nd.comments), 1)
+            self.assertEqual(nd.comments[0], nd.label)
+
+    def test_post_node_and_edge_comments(self):
+        s = "((A[A]:1[A],B[B]:1[B])AB[AB]:1[AB],(C[C]:1[C],D[D]:1[D])CD[CD]:1[CD])Root[Root]:1[Root];"
+        _LOG.info("Tree = %s" % s)
+        tree = dendropy.Tree.get_from_string(
+                s,
+                "newick",
+                suppress_internal_node_taxa=True,
+                suppress_leaf_node_taxa=True,
+                )
+        for nd in tree:
+            _LOG.info("%s: %s" % (nd.label, nd.comments))
+            self.assertEqual(len(nd.comments), 2)
+            self.assertEqual(nd.comments[0], nd.label)
+            self.assertEqual(nd.comments[1], nd.label)
+
+    def test_multi_position_comments(self):
+        s = """(([xxx]A[A][A]:[A][A]1[A][A],
+                 [xxx]B[B][B]:[B][B]1[B][B])
+                 [xxx]AB[AB][AB]:[AB][AB]1[AB][AB],
+                ([xxx]C[C][C]:[C][C]1[C][C],
+                 [xxx]D[D][D]:[D][D]1[D][D])
+                 [xxx]CD[CD][CD]:[CD][CD]1[CD][CD])
+                 [xxx]Root[Root][Root]:[Root][Root]1[Root][Root];"""
+        _LOG.info("Tree = %s" % s)
+        tree = dendropy.Tree.get_from_string(
+                s,
+                "newick",
+                suppress_internal_node_taxa=True,
+                suppress_leaf_node_taxa=True,
+                )
+        for nd in tree:
+            _LOG.info("%s: %s" % (nd.label, nd.comments))
+            self.assertEqual(len(nd.comments), 7)
+            self.assertEqual(nd.comments[0], 'xxx')
+            for i in range(1,7):
+                self.assertEqual(nd.comments[i], nd.label)
+
+    def test_comment_association(self):
+        tree_strings = [
+                "([a1][a2]a[a3]:[a4]1[a5],[h1][h2][h3]([b1]b[b2]:[b3][b4]2[b5],[g1][g2]([c1]c[c2]:[c3]3[c4][c5],[f1]([d1]d[d2][d3]:[d4]4[d5],[e1]e[e2]:5[e3][e4][e5])[f2]f[f3]:[f4]6[f5])[g3][g4]g:7[g5])[h4]h[h5]:8)[i1][i2]i[i3]:[i4]9[i5];",
+            ]
+        for tree_string in tree_strings:
+            tree = dendropy.Tree.get_from_string(
+                    tree_string,
+                    "newick",
+                    suppress_leaf_node_taxa=True)
+            for nd in tree:
+                exp_brlen = ord(nd.label[0]) - ord('a') + 1
+                self.assertEqual(nd.edge.length, exp_brlen)
+                self.assertEqual(len(nd.comments), 5)
+                for comment in nd.comments:
+                    self.assertEqual(comment[0], nd.label)
+
+    def test_anonymous_node_comment_association(self):
+        tree_string1 = "[x1]([x2],[x3]([x4],[x5]([x6],[x7],[x8],[x9])[x10])[x11])[x12];"
+        tree_string2 = "[x1](a[x2],[x3]([x4]b,[x5]([x6]c,[x7]d,[x8]e,[x9]f)g[x10])h[x11])i[x12];"
+        tree1 = dendropy.Tree.get_from_string(tree_string1, "newick", suppress_leaf_node_taxa=True)
+        tree2 = dendropy.Tree.get_from_string(tree_string2, "newick", suppress_leaf_node_taxa=True)
+        expected_comments = {
+                "a": ["x2",],
+                "b": ["x4",],
+                "c": ["x6",],
+                "d": ["x7",],
+                "e": ["x8",],
+                "f": ["x9",],
+                "g": ["x5", "x10"],
+                "h": ["x3", "x11"],
+                "i": ["x12"],
+                }
+        nodes1 = [nd for nd in tree1]
+        nodes2 = [nd for nd in tree2]
+        for nd1, nd2 in zip(nodes1, nodes2):
+            self.assertEqual(nd2.comments, expected_comments[nd2.label])
+            self.assertEqual(nd1.comments, expected_comments[nd2.label])
+
+
+class CommentMetaDataTests(dendropytest.ExtendedTestCase):
+    figtree_metadata_str = """[&Tree1=1,Tree2=2, Tree3={1,2,3}](([xxx]A[&A1=1,A2=2,A3={1,2,3},  ,][A]:[A][A]1[A][A],
+                 [xxx]B[&B1=1,B2=2,B3={1,2,3}][B]:[B][B]1[B][B])
+                 [xxx]AB[&AB1=1,AB2=2,AB3={1,2,3}][AB]:[AB][AB]1[AB][AB],
+                ([xxx]C[&C1=1,C2=2,C3={1,2,3}][C]:[C][C]1[C][C],
+                 [xxx]D[&D1=1,D2=2,D3={1,2,3}][D]:[D][D]1[D][D])
+                 [xxx]CD[&CD1=1, CD2=2, CD3={1,2,3}][CD]:[CD][CD]1[CD][CD])
+                 [xxx]Root[&Root1=1, Root2=2, Root3={1,2,3}][Root]:[Root][Root]1[Root][Root];"""
+
+    nhx_metadata_str = """[&Tree1=1,Tree2=2, Tree3={1,2,3}](([xxx]A[&&A1=1:A2=2:A3={1,2,3}][A]:[A][A]1[A][A],
+                 [xxx]B[&&B1=1:B2=2:B3={1,2,3}][B]:[B][B]1[B][B])
+                 [xxx]AB[&&AB1=1:AB2=2:AB3={1,2,3}][AB]:[AB][AB]1[AB][AB],
+                ([xxx]C[&&C1=1:C2=2:C3={1,2,3}][C]:[C][C]1[C][C],
+                 [xxx]D[&&D1=1:D2=2:D3={1,2,3}][D]:[D][D]1[D][D])
+                 [xxx]CD[&&CD1=1: CD2=2: CD3={1,2,3}][CD]:[CD][CD]1[CD][CD])
+                 [xxx]Root[&&Root1=1: Root2=2: Root3={1,2,3}][Root]:[Root][Root]1[Root][Root];"""
+
+    def check_results(self, tree):
+        metadata = tree.annotations.values_as_dict()
+        self.assertEqual(metadata, {'Tree1': '1', 'Tree2': '2', 'Tree3':['1','2','3']})
+        for nd in tree.postorder_node_iter():
+            metadata = nd.annotations.values_as_dict()
+            #print("%s: %s => %s" % (nd.label, nd.comments, metadata))
+            self.assertEqual(len(metadata), 3)
+            values = ["1", "2", ["1","2","3"]]
+            for i in range(3):
+                key = "{}{}".format(nd.label, i+1)
+                self.assertTrue(key in metadata)
+                self.assertEqual(metadata[key], values[i])
+
+    def testFigtreeStyleBasic(self):
+        s = self.figtree_metadata_str
+        _LOG.info("Tree = %s" % s)
+        tree = dendropy.Tree.get_from_string(
+                s,
+                "newick",
+                suppress_internal_node_taxa=True,
+                suppress_leaf_node_taxa=True,
+                extract_comment_metadata=True)
+        self.check_results(tree)
+
+    def testNHXBasic(self):
+        s = self.nhx_metadata_str
+        _LOG.info("Tree = %s" % s)
+        tree = dendropy.Tree.get_from_string(
+                s,
+                "newick",
+                suppress_internal_node_taxa=True,
+                suppress_leaf_node_taxa=True,
+                extract_comment_metadata=True)
+        self.check_results(tree)
+
+    def test_incomplete_metadata(self):
+        s = """[&color=blue](A[&region=Asia,id=00012][cryptic],(B[&region=Africa],C[&region=Madagascar,id=19391][two of three]));"""
+        tree = dendropy.Tree.get_from_string(
+                s,
+                "newick",
+                suppress_internal_node_taxa=True,
+                suppress_leaf_node_taxa=True,
+                extract_comment_metadata=True,
+                )
+        self.assertEqual(tree.annotations.values_as_dict(), {'color': 'blue'})
+        expected = [ {'region': 'Asia', 'id': '00012'},
+                {'region': 'Africa'},
+                {'region': 'Madagascar', 'id': '19391'},
+                {},
+                {},]
+        for idx, nd in enumerate(tree.postorder_node_iter()):
+            self.assertEqual(nd.annotations.values_as_dict(), expected[idx])
+
+# class NewickTreeTaxonNamespaceTest(dendropytest.ExtendedTestCase):
+
+#     def test_namespace_passing(self):
+#         tns1 = dendropy.TaxonNamespace()
+#         s1 = "(a,(b,c));"
+#         tree = dendropy.Tree.get_from_string(
+#                 s1, "newick", taxon_namespace=tns1)
+#         self.assertIs(tree.taxon_namespace, tns1)
+#         self.assertEqual(len(tns1), 3)
+#         s2 = "((e,f),(g,h));"
+#         tree.read_from_string(
+#                 s2, "newick")
+#         self.assertIs(tree.taxon_namespace, tns1)
+#         self.assertEqual(len(tns1), 7)
+#         tns2 = dendropy.TaxonNamespace()
+#         s3 = "((j,k),(l,m));"
+#         with self.assertRaises(TypeError):
+#             tree.read_from_string(
+#                     s3, "newick",
+#                     taxon_namespace=tns2)
+
+class NewickTreeLabelParsingTest(dendropytest.ExtendedTestCase):
+
+    def test_basic_taxa(self):
+        s = "(a1:3.14e-2,(b2:1.2,(c3:0.5,d4:0.7)e5:111)f6:222)g7:333;"
+        tree = dendropy.Tree.get_from_string(s,
+                "newick",
+                suppress_internal_node_taxa=False)
+        expected = {
+                "a1": 3.14e-2,
+                "b2": 1.2,
+                "c3": 0.5,
+                "d4": 0.7,
+                "e5": 111,
+                "f6": 222,
+                "g7": 333,
+                }
+        tns = tree.taxon_namespace
+        self.assertEqual(len(tns), len(expected))
+        labels = set([t.label for t in tns])
+        self.assertEqual(labels, set(expected.keys()))
+        for nd in tree:
+            self.assertEqual(nd.edge.length, expected[nd.taxon.label])
+
+    def test_quoted_underscores(self):
+        s = "('a_1':3.14e-2,('b_2':1.2,('c_3':0.5,'d_4':0.7)'e_5':111)'f_6':222)'g_7':333;"
+        tree = dendropy.Tree.get_from_string(s,
+                "newick",
+                suppress_internal_node_taxa=False)
+        expected = {
+                "a_1": 3.14e-2,
+                "b_2": 1.2,
+                "c_3": 0.5,
+                "d_4": 0.7,
+                "e_5": 111,
+                "f_6": 222,
+                "g_7": 333,
+                }
+        tns = tree.taxon_namespace
+        self.assertEqual(len(tns), len(expected))
+        labels = set([t.label for t in tns])
+        self.assertEqual(labels, set(expected.keys()))
+        for nd in tree:
+            self.assertEqual(nd.edge.length, expected[nd.taxon.label])
+
+    def test_unquoted_underscores(self):
+        s = "(a_1:3.14e-2,(b_2:1.2,(c_3:0.5,d_4:0.7)e_5:111)f_6:222)g_7:333;"
+        tree = dendropy.Tree.get_from_string(s,
+                "newick",
+                suppress_internal_node_taxa=False)
+        expected = {
+                "a 1": 3.14e-2,
+                "b 2": 1.2,
+                "c 3": 0.5,
+                "d 4": 0.7,
+                "e 5": 111,
+                "f 6": 222,
+                "g 7": 333,
+                }
+        tns = tree.taxon_namespace
+        self.assertEqual(len(tns), len(expected))
+        labels = set([t.label for t in tns])
+        self.assertEqual(labels, set(expected.keys()))
+        for nd in tree:
+            self.assertEqual(nd.edge.length, expected[nd.taxon.label])
+
+    def test_unquoted_underscores_preserved(self):
+        s = "(a_1:3.14e-2,(b_2:1.2,(c_3:0.5,d_4:0.7)e_5:111)f_6:222)g_7:333;"
+        tree = dendropy.Tree.get_from_string(s,
+                "newick",
+                suppress_internal_node_taxa=False,
+                preserve_underscores=True)
+        expected = {
+                "a_1": 3.14e-2,
+                "b_2": 1.2,
+                "c_3": 0.5,
+                "d_4": 0.7,
+                "e_5": 111,
+                "f_6": 222,
+                "g_7": 333,
+                }
+        tns = tree.taxon_namespace
+        self.assertEqual(len(tns), len(expected))
+        labels = set([t.label for t in tns])
+        self.assertEqual(labels, set(expected.keys()))
+        for nd in tree:
+            self.assertEqual(nd.edge.length, expected[nd.taxon.label])
+
+class NewickTreeReaderOffsetTreeTest(
+        standard_file_test_trees.NewickTestTreesChecker,
+        dendropytest.ExtendedTestCase):
+
+    @classmethod
+    def setUpClass(cls):
+        standard_file_test_trees.NewickTestTreesChecker.create_class_fixtures(cls)
+
+    def test_tree_offset_newick_get(self):
+        tree_file_title = "dendropy-test-trees-n33-unrooted-x100a"
+        tree_reference = standard_file_test_trees._TREE_REFERENCES[tree_file_title]
+        expected_number_of_trees = tree_reference["num_trees"]
+        tree_offsets = set([0, expected_number_of_trees-1, -1, -expected_number_of_trees])
+        while len(tree_offsets) < 8:
+            tree_offsets.add(random.randint(1, expected_number_of_trees-2))
+        while len(tree_offsets) < 12:
+            tree_offsets.add(random.randint(-expected_number_of_trees-2, -2))
+        tree_filepath = self.schema_tree_filepaths[tree_file_title]
+        with open(tree_filepath, "r") as src:
+            tree_string = src.read()
+        for tree_offset in tree_offsets:
+            tree_reference = standard_file_test_trees._TREE_REFERENCES[tree_file_title]
+            expected_number_of_trees = tree_reference["num_trees"]
+            if tree_offset < 0:
+                if abs(tree_offset) > expected_number_of_trees:
+                    tree_offset = 0
+                else:
+                    tree_offset = expected_number_of_trees + tree_offset
+            with open(tree_filepath, "r") as tree_stream:
+                approaches = (
+                        (dendropy.Tree.get_from_path, tree_filepath),
+                        (dendropy.Tree.get_from_stream, tree_stream),
+                        (dendropy.Tree.get_from_string, tree_string),
+                        )
+                for method, src in approaches:
+                    tree = method(
+                            src,
+                            "newick",
+                            collection_offset=0,
+                            tree_offset=tree_offset,
+                            suppress_internal_node_taxa=True,
+                            suppress_leaf_node_taxa=False,
+                            rooting="default-unrooted")
+                    reference_tree_idx = tree_offset
+                    self.compare_to_reference_by_title_and_index(
+                            tree=tree,
+                            tree_file_title=tree_file_title,
+                            reference_tree_idx=tree_offset)
+
+    def test_offset_get_with_redundant_semicolons(self):
+        s = """\
+            ;;;;(a,(b,c)d)e;;;;(e,(c,a)d)b;;;;(b,(a,e)c)d;;;;
+            """
+        expected_roots = {
+            0 : 'e',
+            1 : 'b',
+            2 : 'd',
+        }
+        expected_leaves = {
+            0 : ['a', 'b', 'c'],
+            1 : ['e', 'c', 'a'],
+            2 : ['b', 'a', 'e'],
+        }
+        for idx in range(3):
+            tree = dendropy.Tree.get_from_string(
+                    s, "newick",
+                    collection_offset=0,
+                    tree_offset=idx,
+                    suppress_internal_node_taxa=True,
+                    suppress_leaf_node_taxa=True)
+            self.assertEqual(tree.seed_node.label, expected_roots[idx])
+            leaves = [nd.label for nd in tree.leaf_node_iter()]
+            self.assertCountEqual(leaves, expected_leaves[idx])
+
+    # def test_tree_offset_newick_read(self):
+    #     tree_file_title = "dendropy-test-trees-n33-unrooted-x100a"
+    #     tree_reference = standard_file_test_trees._TREE_REFERENCES[tree_file_title]
+    #     expected_number_of_trees = tree_reference["num_trees"]
+    #     tree_offsets = set([0, expected_number_of_trees-1, -1, -expected_number_of_trees])
+    #     while len(tree_offsets) < 8:
+    #         tree_offsets.add(random.randint(1, expected_number_of_trees-2))
+    #     while len(tree_offsets) < 12:
+    #         tree_offsets.add(random.randint(-expected_number_of_trees-2, -2))
+    #     tree_filepath = self.schema_tree_filepaths[tree_file_title]
+    #     with open(tree_filepath, "r") as src:
+    #         tree_string = src.read()
+    #     for tree_offset in tree_offsets:
+    #         tree_reference = standard_file_test_trees._TREE_REFERENCES[tree_file_title]
+    #         expected_number_of_trees = tree_reference["num_trees"]
+    #         if tree_offset < 0:
+    #             if abs(tree_offset) > expected_number_of_trees:
+    #                 tree_offset = 0
+    #             else:
+    #                 tree_offset = expected_number_of_trees + tree_offset
+    #         with open(tree_filepath, "r") as tree_stream:
+    #             approaches = (
+    #                     ("read_from_path", tree_filepath),
+    #                     ("read_from_stream", tree_stream),
+    #                     ("read_from_string", tree_string),
+    #                     )
+    #             for method, src in approaches:
+    #                 tree = dendropy.Tree()
+    #                 tns0 = tree.taxon_namespace
+    #                 f = getattr(tree, method)
+    #                 trees_read = f(src,
+    #                         "newick",
+    #                         collection_offset=0,
+    #                         tree_offset=tree_offset,
+    #                         suppress_internal_node_taxa=True,
+    #                         suppress_leaf_node_taxa=False,
+    #                         rooting="default-unrooted")
+    #                 self.assertIs(tree.taxon_namespace, tns0)
+    #                 reference_tree_idx = tree_offset
+    #                 self.compare_to_reference_by_title_and_index(
+    #                         tree=tree,
+    #                         tree_file_title=tree_file_title,
+    #                         reference_tree_idx=tree_offset)
+
+    def test_tree_offset_without_collection_offset_newick_get(self):
+        tree_file_title = 'dendropy-test-trees-n33-unrooted-x10a'
+        tree_filepath = self.schema_tree_filepaths[tree_file_title]
+        tree_reference = standard_file_test_trees._TREE_REFERENCES[tree_file_title]
+        expected_number_of_trees = tree_reference["num_trees"]
+        with open(tree_filepath, "r") as src:
+            tree_string = src.read()
+        with open(tree_filepath, "r") as tree_stream:
+            approaches = (
+                    (dendropy.Tree.get_from_path, tree_filepath),
+                    (dendropy.Tree.get_from_stream, tree_stream),
+                    (dendropy.Tree.get_from_string, tree_string),
+                    )
+            for approach in approaches:
+                tree_offset = 2
+                tree = approach[0](approach[1], "newick", tree_offset=tree_offset)
+                reference_tree_idx = tree_offset
+                self.compare_to_reference_by_title_and_index(
+                        tree=tree,
+                        tree_file_title=tree_file_title,
+                        reference_tree_idx=tree_offset)
+
+
+    # def test_tree_offset_without_collection_offset_newick_read(self):
+    #     tree_file_title = 'dendropy-test-trees-n33-unrooted-x10a'
+    #     tree_filepath = self.schema_tree_filepaths[tree_file_title]
+    #     approaches = (
+    #             "read_from_path",
+    #             "read_from_stream",
+    #             "read_from_string",
+    #             )
+    #     for approach in approaches:
+    #         tree = dendropy.Tree()
+    #         f = getattr(tree, approach)
+    #         with self.assertRaises(TypeError):
+    #             f(tree_filepath, "newick", collection_offset=None, tree_offset=0)
+
+    def test_out_of_range_tree_offset_newick_get(self):
+        tree_file_title = 'dendropy-test-trees-n33-unrooted-x10a'
+        tree_filepath = self.schema_tree_filepaths[tree_file_title]
+        tree_reference = standard_file_test_trees._TREE_REFERENCES[tree_file_title]
+        expected_number_of_trees = tree_reference["num_trees"]
+        with open(tree_filepath, "r") as src:
+            tree_string = src.read()
+        with open(tree_filepath, "r") as tree_stream:
+            approaches = (
+                    (dendropy.Tree.get_from_path, tree_filepath),
+                    (dendropy.Tree.get_from_stream, tree_stream),
+                    (dendropy.Tree.get_from_string, tree_string),
+                    )
+            for method, src in approaches:
+                with self.assertRaises(IndexError):
+                    method(src, "newick", collection_offset=0, tree_offset=expected_number_of_trees)
+
+    # def test_out_of_range_tree_offset_newick_read(self):
+    #     tree_file_title = 'dendropy-test-trees-n33-unrooted-x10a'
+    #     tree_filepath = self.schema_tree_filepaths[tree_file_title]
+    #     tree_reference = standard_file_test_trees._TREE_REFERENCES[tree_file_title]
+    #     expected_number_of_trees = tree_reference["num_trees"]
+    #     with open(tree_filepath, "r") as src:
+    #         tree_string = src.read()
+    #     with open(tree_filepath, "r") as tree_stream:
+    #         approaches = (
+    #                 ("read_from_path", tree_filepath),
+    #                 ("read_from_stream", tree_stream),
+    #                 ("read_from_string", tree_string),
+    #                 )
+    #         for method, src in approaches:
+    #             tree = dendropy.Tree()
+    #             f = getattr(tree, method)
+    #             with self.assertRaises(IndexError):
+    #                 f(src, "newick", collection_offset=0, tree_offset=expected_number_of_trees)
+
+    def test_out_of_range_collection_offset_newick_get(self):
+        tree_file_title = 'dendropy-test-trees-n33-unrooted-x10a'
+        tree_filepath = self.schema_tree_filepaths[tree_file_title]
+        with open(tree_filepath, "r") as src:
+            tree_string = src.read()
+        with open(tree_filepath, "r") as tree_stream:
+            approaches = (
+                    (dendropy.Tree.get_from_path, tree_filepath),
+                    (dendropy.Tree.get_from_stream, tree_stream),
+                    (dendropy.Tree.get_from_string, tree_string),
+                    )
+            for method, src in approaches:
+                with self.assertRaises(IndexError):
+                    method(src, "newick", collection_offset=1, tree_offset=0)
+
+    # def test_out_of_range_collection_offset_newick_read(self):
+    #     tree_file_title = 'dendropy-test-trees-n33-unrooted-x10a'
+    #     tree_filepath = self.schema_tree_filepaths[tree_file_title]
+    #     with open(tree_filepath, "r") as src:
+    #         tree_string = src.read()
+    #     with open(tree_filepath, "r") as tree_stream:
+    #         approaches = (
+    #                 ("read_from_path", tree_filepath),
+    #                 ("read_from_stream", tree_stream),
+    #                 ("read_from_string", tree_string),
+    #                 )
+    #         for method, src in approaches:
+    #             tree = dendropy.Tree()
+    #             f = getattr(tree, method)
+    #             with self.assertRaises(IndexError):
+    #                 f(src, "newick", collection_offset=1, tree_offset=0)
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/dendropy/test/test_dataio_newick_reader_tree_iter.py b/dendropy/test/test_dataio_newick_reader_tree_iter.py
new file mode 100644
index 0000000..8ebe727
--- /dev/null
+++ b/dendropy/test/test_dataio_newick_reader_tree_iter.py
@@ -0,0 +1,39 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Tests for general NEWICK tree iteration reading.
+"""
+
+import sys
+import unittest
+import dendropy
+from dendropy.test.support import dendropytest
+from dendropy.test.support import standard_file_test_trees
+
+if not (sys.version_info.major >= 3 and sys.version_info.minor >= 4):
+    from dendropy.utility.filesys import pre_py34_open as open
+
+class NewickTreeIteratorReaderDefaultTestCase(
+        standard_file_test_trees.NewickTestTreesChecker,
+        dendropytest.ExtendedTestCase):
+
+    @classmethod
+    def setUpClass(cls):
+        standard_file_test_trees.NewickTestTreesChecker.create_class_fixtures(cls)
+
diff --git a/dendropy/test/test_dataio_newick_reader_tree_list.py b/dendropy/test/test_dataio_newick_reader_tree_list.py
new file mode 100644
index 0000000..07f92b5
--- /dev/null
+++ b/dendropy/test/test_dataio_newick_reader_tree_list.py
@@ -0,0 +1,205 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Tests for general NEWICK tree list reading.
+"""
+
+import sys
+import unittest
+import dendropy
+from dendropy.test.support import dendropytest
+from dendropy.test.support import standard_file_test_trees
+from dendropy.test.support import curated_test_tree
+from dendropy.test.support import pathmap
+from dendropy.test import base_newick_test_cases
+from dendropy.test import base_standard_trees_parsing_test_cases
+
+if not (sys.version_info.major >= 3 and sys.version_info.minor >= 4):
+    from dendropy.utility.filesys import pre_py34_open as open
+
+class NewickTreeListReaderDefaultTestCase(
+        base_standard_trees_parsing_test_cases.StandardTreesParsingTestCase,
+        standard_file_test_trees.NewickTestTreesChecker,
+        dendropytest.ExtendedTestCase):
+
+    @classmethod
+    def setUpClass(cls):
+        standard_file_test_trees.NewickTestTreesChecker.create_class_fixtures(cls)
+
+    # ## NOTE: tests are in base_standard_trees_parsing_test_cases.StandardTreesParsingTestCase, !! ##
+
+class NewickTreeListReaderSuppressedInternalSuppressedLeafTaxaTestCase(
+        base_newick_test_cases.NewickTreeListReaderTaxaManagementBaseTestCase,
+        standard_file_test_trees.NewickTestTreesChecker,
+        dendropytest.ExtendedTestCase):
+
+    @classmethod
+    def setUpClass(cls):
+        standard_file_test_trees.NewickTestTreesChecker.create_class_fixtures(cls,
+                suppress_internal_node_taxa=True,
+                suppress_leaf_node_taxa=True,
+                )
+
+class NewickTreeListReaderUnsuppressedInternalSuppressedLeafTaxaTestCase(
+        base_newick_test_cases.NewickTreeListReaderTaxaManagementBaseTestCase,
+        standard_file_test_trees.NewickTestTreesChecker,
+        dendropytest.ExtendedTestCase):
+
+    @classmethod
+    def setUpClass(cls):
+        standard_file_test_trees.NewickTestTreesChecker.create_class_fixtures(cls,
+                suppress_internal_node_taxa=False,
+                suppress_leaf_node_taxa=True,
+                )
+
+class NewickTreeListReaderSuppressedInternalUnsuppressedLeafTaxaTestCase(
+        base_newick_test_cases.NewickTreeListReaderTaxaManagementBaseTestCase,
+        standard_file_test_trees.NewickTestTreesChecker,
+        dendropytest.ExtendedTestCase):
+
+    @classmethod
+    def setUpClass(cls):
+        standard_file_test_trees.NewickTestTreesChecker.create_class_fixtures(cls,
+                suppress_internal_node_taxa=True,
+                suppress_leaf_node_taxa=False,
+                )
+
+class NewickTreeListReaderUnsuppressedInternalUnsuppressedLeafTaxaTestCase(
+        base_newick_test_cases.NewickTreeListReaderTaxaManagementBaseTestCase,
+        standard_file_test_trees.NewickTestTreesChecker,
+        dendropytest.ExtendedTestCase):
+
+    @classmethod
+    def setUpClass(cls):
+        standard_file_test_trees.NewickTestTreesChecker.create_class_fixtures(cls,
+                suppress_internal_node_taxa=False,
+                suppress_leaf_node_taxa=False,
+                )
+
+
+class NewickTreeListMetadataTest(
+        base_newick_test_cases.NewickTreeListReaderTaxaManagementBaseTestCase,
+        standard_file_test_trees.NewickTestTreesChecker,
+        dendropytest.ExtendedTestCase):
+
+    @classmethod
+    def setUpClass(cls):
+        standard_file_test_trees.NewickTestTreesChecker.create_class_fixtures(cls,
+                is_metadata_extracted=True,
+                )
+
+    def test_read_metadata(self):
+        tree_file_titles = [
+            "dendropy-test-trees-multifurcating-rooted-annotated",
+            "dendropy-test-trees-n33-unrooted-annotated-x10a",
+        ]
+        for tree_file_title in tree_file_titles:
+            tree_filepath = standard_file_test_trees._TREE_FILEPATHS["newick"][tree_file_title]
+            with open(tree_filepath, "r") as src:
+                tree_string = src.read()
+            with open(tree_filepath, "r") as tree_stream:
+                approaches = (
+                        (dendropy.TreeList.get_from_path, tree_filepath),
+                        (dendropy.TreeList.get_from_stream, tree_stream),
+                        (dendropy.TreeList.get_from_string, tree_string),
+                        )
+                for method, src in approaches:
+                    tree_list = method(src,
+                            "newick",
+                            extract_comment_metadata=True)
+                    self.verify_standard_trees(
+                            tree_list=tree_list,
+                            tree_file_title=tree_file_title)
+
+class NewickTreeListRootingAndMetadataTest(
+        standard_file_test_trees.StandardTestTreesChecker,
+        dendropytest.ExtendedTestCase):
+
+    def test_correct_rooting_weighting_and_metadata_association(self):
+        tree_str = """\
+                ;;;;
+                [&color=red][&W 0.25][&R](a,(b,(c,d)))[&W 0.1][&color=wrong1];
+                [&W 0.1][&color=wrong1][&U];[&W 0.1][&color=wrong1];[&W 0.1][&color=wrong1];
+                [&color=red][&W 0.25][&R](a,(b,(c,d)))[&W 0.1][&color=wrong1];
+                [&W 0.1][&color=wrong1][&U];[&W 0.1][&color=wrong1];[&W 0.1][&color=wrong1];
+                (a,(b,(c,d)));;;
+        """
+        trees = dendropy.TreeList.get_from_string(tree_str,
+                "newick",
+                extract_comment_metadata=True,
+                store_tree_weights=True)
+        self.assertEqual(len(trees.taxon_namespace), 4)
+        tax_labels = [t.label for t in trees.taxon_namespace]
+        self.assertSequenceEqual(set(tax_labels), set(["a", "b", "c", "d"]))
+        self.assertEqual(len(trees), 3)
+        for tree_idx, tree in enumerate(trees):
+            if tree_idx < 2:
+                self.assertIs(tree.is_rooted, True)
+                self.assertEqual(tree.weight, 0.25)
+                self.assertEqual(tree.annotations.get_value("color", None), "red")
+            else:
+                self.assertIs(tree.is_rooted, None)
+                self.assertEqual(tree.weight, 1.0)
+                self.assertFalse(tree.has_annotations)
+
+class NewickTreeListReaderMultipleRedundantSemiColons(
+        curated_test_tree.CuratedTestTree,
+        dendropytest.ExtendedTestCase):
+
+    def test_multiple_redundant_semicolons(self):
+        tree_str = self.get_newick_string()
+        s = ";;;;;{tree_str};;; ;\n; \n ;       ;;{tree_str};;;  [(a,(b,c)];  ; ;;".format(tree_str=tree_str)
+        trees = dendropy.TreeList.get_from_string(s,
+                "newick",
+                suppress_internal_node_taxa=True,
+                suppress_leaf_node_taxa=False,
+                suppress_edge_lengths=False)
+        self.assertEqual(len(trees), 2)
+        for t in trees:
+            self.verify_curated_tree(t,
+                suppress_internal_node_taxa=True,
+                suppress_leaf_node_taxa=False,
+                suppress_edge_lengths=False)
+
+class NewickTreeListReaderTaxonNamespaceTest(dendropytest.ExtendedTestCase):
+
+    def test_shared_taxon_namespace(self):
+        tree_filenames = [
+                ("pythonidae.reference-trees.newick", 33), # ntax = 33
+                ("pythonidae.reference-trees.newick", 33), # ntax = 33
+                ("bird_orders.newick", 56), # ntax = 23
+                ("pythonidae.reference-trees.taxon-numbers-only.newick", 89), # ntax = 33
+                ("pythonidae.reference-trees.newick", 89), # ntax = 33
+                ("bird_orders.newick", 89), # ntax = 23
+        ]
+        common_taxon_namespace = dendropy.TaxonNamespace()
+        prev_expected_ntax = 0
+        for tree_filename, expected_ntax in tree_filenames:
+            self.assertEqual(len(common_taxon_namespace), prev_expected_ntax)
+            tree_filepath = pathmap.tree_source_path(tree_filename)
+            for reps in range(3):
+                tree_list = dendropy.TreeList.get_from_path(
+                        pathmap.tree_source_path(tree_filename),
+                        "newick",
+                        taxon_namespace=common_taxon_namespace)
+                self.assertEqual(len(common_taxon_namespace), expected_ntax)
+            prev_expected_ntax = expected_ntax
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/dendropy/test/test_dataio_newick_writer.py b/dendropy/test/test_dataio_newick_writer.py
new file mode 100644
index 0000000..f39e898
--- /dev/null
+++ b/dendropy/test/test_dataio_newick_writer.py
@@ -0,0 +1,444 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Tests for NEWICK writing.
+"""
+
+import collections
+import unittest
+import dendropy
+import re
+from dendropy.test.support import pathmap
+from dendropy.test.support import standard_file_test_trees
+from dendropy.test.support import compare_and_validate
+from dendropy.test.support import dendropytest
+
+def newick_tree_writer_test_tree(
+        has_leaf_node_taxa=True,
+        has_leaf_node_labels=True,
+        has_internal_node_taxa=True,
+        has_internal_node_labels=True,
+        has_edge_lengths=True,
+        label_pool=None,
+        label_separator=' ',
+        ):
+    anodes = set()
+    tree = dendropy.Tree()
+    tns = tree.taxon_namespace
+    a = tree.seed_node.new_child()
+    b = tree.seed_node.new_child()
+    a1 = a.new_child()
+    a2 = a.new_child()
+    b1 = b.new_child()
+    b2 = b.new_child()
+    anodes.add(tree.seed_node)
+    anodes.add(a)
+    anodes.add(b)
+    anodes.add(a1)
+    anodes.add(a2)
+    anodes.add(b1)
+    anodes.add(b2)
+    if label_pool is None:
+        label_pool = [chr(i) for i in range(ord('a'), ord('z')+1)]
+    labeller = iter(label_pool)
+    for nd_idx, nd in enumerate(anodes):
+        expected_label_parts = collections.defaultdict(list)
+        is_leaf = nd.is_leaf()
+        if ( (is_leaf and has_leaf_node_taxa)
+                or ((not is_leaf) and has_internal_node_taxa) ):
+            label = next(labeller)
+            tx = tree.taxon_namespace.require_taxon(label=label)
+            nd.taxon = tx
+            expected_label_parts[ (False, True ) ].append(label)
+            expected_label_parts[ (False, False) ].append(label)
+        if ( (is_leaf and has_leaf_node_labels)
+                or ((not is_leaf) and has_internal_node_labels) ):
+            label = next(labeller)
+            nd.label = label
+            expected_label_parts[ (True,  False) ].append(label)
+            expected_label_parts[ (False, False) ].append(label)
+        nd.expected_label = collections.defaultdict(lambda: None)
+        for k in expected_label_parts:
+            nd.expected_label[k] = label_separator.join(expected_label_parts[k])
+        if has_edge_lengths:
+            nd.edge.length = nd_idx
+    return tree
+
+class NewickTreeWriterDefaultTest(
+        standard_file_test_trees.NewickTestTreesChecker,
+        compare_and_validate.ValidateWriteable,
+        dendropytest.ExtendedTestCase):
+
+    @classmethod
+    def setUpClass(cls):
+        standard_file_test_trees.NewickTestTreesChecker.create_class_fixtures(cls,
+                is_metadata_extracted=True,
+                )
+
+    def test_roundtrip_full(self):
+        tree_file_title = 'dendropy-test-trees-n33-unrooted-annotated-x10a'
+        tree_filepath = standard_file_test_trees._TREE_FILEPATHS["newick"][tree_file_title]
+        tree1 = dendropy.Tree.get_from_path(
+                tree_filepath,
+                "newick",
+                extract_comment_metadata=True,
+                store_tree_weights=True,
+                suppress_internal_node_taxa=False,
+                suppress_leaf_node_taxa=False,
+        )
+        kwargs = {
+            "suppress_leaf_taxon_labels"     :  False , # default: False ,
+            "suppress_leaf_node_labels"      :  True  , # default: True  ,
+            "suppress_internal_taxon_labels" :  False , # default: False ,
+            "suppress_internal_node_labels"  :  True  , # default: False ,
+            "suppress_rooting"               :  False , # default: False ,
+            "suppress_edge_lengths"          :  False , # default: False ,
+            "unquoted_underscores"           :  False , # default: False ,
+            "preserve_spaces"                :  False , # default: False ,
+            "store_tree_weights"             :  False , # default: False ,
+            "suppress_annotations"           :  False , # default: True  ,
+            "annotations_as_nhx"             :  False , # default: False ,
+            "suppress_item_comments"         :  False , # default: True  ,
+            "node_label_element_separator"   :  ' '   , # default: ' '   ,
+            "node_label_compose_fn"        :  None  , # default: None  ,
+            "edge_label_compose_fn"        :  None  , # default: None  ,
+        }
+        s = self.write_out_validate_equal_and_return(
+                tree1, "newick", kwargs)
+        tree2 = dendropy.Tree.get_from_string(
+                s,
+                "newick",
+                extract_comment_metadata=True,
+                store_tree_weights=True,
+                suppress_internal_node_taxa=True,
+                suppress_leaf_node_taxa=False,
+        )
+        self.compare_to_reference_by_title_and_index(
+            tree=tree2,
+            tree_file_title=tree_file_title,
+            reference_tree_idx=0)
+            # suppress_internal_node_taxa=False,
+            # suppress_leaf_node_taxa=False,
+            # is_metadata_extracted=True,
+            # is_coerce_metadata_values_to_string=True,
+            # is_distinct_nodes_and_edges_representation=False)
+
+class NewickTreeWriterGeneralOptionsTests(
+        compare_and_validate.ValidateWriteable,
+        dendropytest.ExtendedTestCase):
+
+    def test_node_labeling(self):
+        for has_leaf_node_taxa in (True, False):
+            for has_leaf_node_labels in (True, False):
+                for has_internal_node_taxa in (True, False):
+                    for has_internal_node_labels in (True, False):
+                        for label_separator in (' ', '$$$'):
+                            tree = newick_tree_writer_test_tree(
+                                    has_leaf_node_taxa=has_leaf_node_taxa,
+                                    has_leaf_node_labels=has_leaf_node_labels,
+                                    has_internal_node_taxa=has_internal_node_taxa,
+                                    has_internal_node_labels=has_internal_node_labels,
+                                    label_separator=label_separator,
+                                    )
+                            for suppress_leaf_taxon_labels in (True, False):
+                                for suppress_leaf_node_labels in (True, False):
+                                    for suppress_internal_taxon_labels in (True, False):
+                                        for suppress_internal_node_labels in (True, False):
+                                            kwargs = {
+                                                    "suppress_leaf_taxon_labels"     : suppress_leaf_taxon_labels,
+                                                    "suppress_leaf_node_labels"      : suppress_leaf_node_labels,
+                                                    "suppress_internal_taxon_labels" : suppress_internal_taxon_labels,
+                                                    "suppress_internal_node_labels"  : suppress_internal_node_labels,
+                                                    "node_label_element_separator"   : label_separator,
+                                                    }
+                                            s = self.write_out_validate_equal_and_return(
+                                                    tree, "newick", kwargs)
+                                            tree2 = dendropy.Tree.get_from_string(
+                                                    s,
+                                                    "newick",
+                                                    extract_comment_metadata=True,
+                                                    store_tree_weights=True,
+                                                    suppress_internal_node_taxa=True,
+                                                    suppress_leaf_node_taxa=True,
+                                            )
+                                            nodes1 = [nd for nd in tree]
+                                            nodes2 = [nd for nd in tree2]
+                                            self.assertEqual(len(nodes1), len(nodes2))
+                                            for nd1, nd2 in zip(nodes1, nodes2):
+                                                is_leaf = nd1.is_leaf()
+                                                self.assertEqual(nd2.is_leaf(), is_leaf)
+                                                if is_leaf:
+                                                    self.assertEqual(nd2.label,
+                                                            nd1.expected_label[(suppress_leaf_taxon_labels, suppress_leaf_node_labels)])
+                                                else:
+                                                    self.assertEqual(nd2.label,
+                                                            nd1.expected_label[ (suppress_internal_taxon_labels, suppress_internal_node_labels) ])
+
+    def test_rooting_token(self):
+        tree1 = newick_tree_writer_test_tree()
+        for rooted_state in (None, True, False):
+            tree1.is_rooted = rooted_state
+            for suppress_rooting in (True, False):
+                kwargs = {
+                        "suppress_rooting": suppress_rooting,
+                }
+                s = self.write_out_validate_equal_and_return(
+                        tree1, "newick", kwargs)
+                if suppress_rooting:
+                    self.assertFalse(s.startswith("[&R]") or s.startswith("[&U]"))
+                else:
+                    if rooted_state is True:
+                        self.assertTrue(s.startswith("[&R]"))
+                    elif rooted_state is False:
+                        self.assertTrue(s.startswith("[&U]"))
+                    else:
+                        self.assertFalse(s.startswith("[&R]") or s.startswith("[&U]"))
+                tree2 = dendropy.Tree.get_from_string(
+                        s, "newick", rooting=None)
+                if suppress_rooting:
+                    self.assertTrue(tree2.is_rootedness_undefined)
+                else:
+                    if rooted_state is True:
+                        self.assertTrue(tree2.is_rooted)
+                        self.assertFalse(tree2.is_unrooted)
+                    elif rooted_state is False:
+                        self.assertFalse(tree2.is_rooted)
+                        self.assertTrue(tree2.is_unrooted)
+                    else:
+                        self.assertTrue(tree2.is_rootedness_undefined)
+
+    def test_edge_lengths(self):
+        tree1 = newick_tree_writer_test_tree()
+        for suppress_edge_lengths in (True, False):
+            kwargs = {
+                    "suppress_edge_lengths": suppress_edge_lengths,
+            }
+            s = self.write_out_validate_equal_and_return(
+                    tree1, "newick", kwargs)
+            tree2 = dendropy.Tree.get_from_string(
+                    s, "newick", rooting=None)
+            nodes1 = [nd for nd in tree1]
+            nodes2 = [nd for nd in tree2]
+            self.assertEqual(len(nodes1), len(nodes2))
+            for nd1, nd2 in zip(nodes1, nodes2):
+                if suppress_edge_lengths:
+                    self.assertIs(nd2.edge.length, None)
+                else:
+                    self.assertEqual(nd2.edge.length, nd1.edge.length)
+
+    def test_unquoted_underscores(self):
+        tree1 = newick_tree_writer_test_tree(
+                has_leaf_node_labels=False,
+                has_internal_node_labels=False)
+        for taxon in tree1.taxon_namespace:
+            taxon.label = "{label}_{label}".format(label=taxon.label)
+        for unquoted_underscores in (True, False):
+            kwargs = {
+                    "unquoted_underscores": unquoted_underscores,
+            }
+            s = self.write_out_validate_equal_and_return(
+                    tree1, "newick", kwargs)
+            for preserve_underscores in (True, False):
+                tree2 = dendropy.Tree.get_from_string(
+                        s,
+                        "newick",
+                        suppress_internal_node_taxa=False,
+                        preserve_underscores=preserve_underscores)
+                nodes1 = [nd for nd in tree1]
+                nodes2 = [nd for nd in tree2]
+                self.assertEqual(len(nodes1), len(nodes2))
+                for nd1, nd2 in zip(nodes1, nodes2):
+                    original_label = nd1.taxon.label
+                    if unquoted_underscores:
+                        if preserve_underscores:
+                            expected_label = original_label
+                        else:
+                            expected_label = original_label.replace("_", " ")
+                    else:
+                        expected_label = original_label
+                    self.assertEqual(nd2.taxon.label, expected_label)
+
+    def test_preserve_spaces(self):
+        tree1 = newick_tree_writer_test_tree(
+                has_leaf_node_labels=False,
+                has_internal_node_labels=False)
+        for taxon in tree1.taxon_namespace:
+            taxon.label = "{label} {label}".format(label=taxon.label)
+        for preserve_spaces in (True, False):
+            kwargs = {
+                    "preserve_spaces": preserve_spaces,
+            }
+            s = self.write_out_validate_equal_and_return(
+                    tree1, "newick", kwargs)
+            tree2 = dendropy.Tree.get_from_string(
+                    s,
+                    "newick",
+                    suppress_internal_node_taxa=False,
+                    preserve_underscores=True)
+            nodes1 = [nd for nd in tree1]
+            nodes2 = [nd for nd in tree2]
+            self.assertEqual(len(nodes1), len(nodes2))
+            for nd1, nd2 in zip(nodes1, nodes2):
+                original_label = nd1.taxon.label
+                if preserve_spaces:
+                    expected_label = original_label
+                else:
+                    expected_label = original_label.replace(" ", "_")
+                self.assertEqual(nd2.taxon.label, expected_label)
+
+    def test_store_tree_weights(self):
+        tree1 = newick_tree_writer_test_tree(
+                has_leaf_node_labels=False,
+                has_internal_node_labels=False)
+        for store_tree_weights in (True, False):
+            for weight in (None, "23.0", "1/2", 1.0):
+                tree1.weight = weight
+                kwargs = {
+                        "store_tree_weights": store_tree_weights,
+                }
+                s = self.write_out_validate_equal_and_return(
+                        tree1, "newick", kwargs)
+                tree2 = dendropy.Tree.get_from_string(
+                        s,
+                        "newick",
+                        store_tree_weights=True)
+                if store_tree_weights and weight is not None:
+                    self.assertTrue("[&W " in s)
+                    try:
+                        w = float(weight)
+                    except ValueError:
+                        w = eval("/".join(str(float(w)) for w in weight.split("/")))
+                    self.assertEqual(tree2.weight, w)
+                else:
+                    self.assertFalse("[&W " in s)
+                    self.assertEqual(tree2.weight, 1.0) # default weight
+
+    def test_suppress_annotations(self):
+        tree1 = dendropy.Tree()
+        a1 = tree1.seed_node.new_child()
+        a2 = tree1.seed_node.new_child()
+        tree1.annotations.add_new("t", 1)
+        for nd in tree1:
+            nd.annotations.add_new("a", 1)
+            nd.edge.annotations.add_new("b", 2)
+        for suppress_annotations in (True, False):
+            for annotations_as_nhx in (True, False):
+                kwargs = {
+                        "suppress_annotations"   :  suppress_annotations,
+                        "annotations_as_nhx"     :  annotations_as_nhx,
+                }
+                s = self.write_out_validate_equal_and_return(
+                        tree1, "newick", kwargs)
+                tree2 = dendropy.Tree.get_from_string(
+                        s,
+                        "newick",
+                        extract_comment_metadata=True)
+                if suppress_annotations:
+                    self.assertFalse(tree2.has_annotations)
+                    for nd in tree2:
+                        self.assertFalse(nd.has_annotations)
+                else:
+                    if annotations_as_nhx:
+                        self.assertEqual(s.count("[&&NHX"), 7)
+                        # self.assertEqual(len(re.findall(r"\[&&NHX", s)), 7)
+                    else:
+                        self.assertEqual(s.count("[&&NHX"), 0)
+                        self.assertEqual(s.count("[&"), 7)
+                        # self.assertEqual(len(re.findall(r"\[&&NHX", s)), 0)
+                        # self.assertEqual(len(re.findall(r"\[&.*?\]", s)), 7)
+                    self.assertTrue(tree2.has_annotations)
+                    self.assertEqual(tree2.annotations.get_value("t"), '1')
+                    for nd in tree2:
+                        self.assertTrue(nd.has_annotations)
+                        self.assertEqual(nd.annotations.get_value("a"), '1')
+                        self.assertEqual(nd.annotations.get_value("b"), '2')
+
+    def test_suppress_item_comments(self):
+        tree1 = dendropy.Tree()
+        a1 = tree1.seed_node.new_child()
+        a2 = tree1.seed_node.new_child()
+        tree1.comments.append("t1")
+        for nd in tree1:
+            nd.comments.append("n1")
+            nd.edge.comments.append("e1")
+        for suppress_item_comments in (True, False):
+            kwargs = {
+                    "suppress_item_comments"   :  suppress_item_comments,
+            }
+            s = self.write_out_validate_equal_and_return(
+                    tree1, "newick", kwargs)
+            tree2 = dendropy.Tree.get_from_string(
+                    s,
+                    "newick",
+                    extract_comment_metadata=False)
+            if suppress_item_comments:
+                self.assertEqual(tree2.comments, [])
+                for nd in tree2:
+                    self.assertEqual(nd.comments, [])
+                    self.assertEqual(nd.edge.comments, [])
+            else:
+                self.assertEqual(tree2.comments, ["t1"])
+                for nd in tree2:
+                    self.assertEqual(nd.comments, ["n1", "e1"])
+
+    def test_node_label_compose_fn(self):
+        tree1 = dendropy.Tree()
+        a1 = tree1.seed_node.new_child(label="a1")
+        a1.taxon = tree1.taxon_namespace.require_taxon("hula")
+        a2 = tree1.seed_node.new_child(label="a1")
+        a2.taxon = tree1.taxon_namespace.require_taxon("hoop")
+        f = lambda x: "zzz"
+        kwargs = {
+                "suppress_leaf_taxon_labels"     :  False ,
+                "suppress_leaf_node_labels"      :  False ,
+                "suppress_internal_taxon_labels" :  False ,
+                "suppress_internal_node_labels"  :  False ,
+                "node_label_compose_fn"   :  f,
+        }
+        s = self.write_out_validate_equal_and_return(
+                tree1, "newick", kwargs)
+        tree2 = dendropy.Tree.get_from_string(
+                s,
+                "newick",
+                suppress_leaf_node_taxa=True,
+                suppress_internal_node_taxa=True)
+        for nd in tree2:
+            self.assertEqual(nd.label, "zzz")
+
+    def test_edge_label_compose_fn(self):
+        tree1 = dendropy.Tree()
+        tree1.seed_node.edge.length = 1
+        a1 = tree1.seed_node.new_child(label="a1", edge_length=1)
+        a2 = tree1.seed_node.new_child(label="a1", edge_length=1)
+        f = lambda x: 1000
+        kwargs = {
+                "edge_label_compose_fn"   :  f,
+        }
+        s = self.write_out_validate_equal_and_return(
+                tree1, "newick", kwargs)
+        tree2 = dendropy.Tree.get_from_string(
+                s,
+                "newick",
+                suppress_leaf_node_taxa=True,
+                suppress_internal_node_taxa=True)
+        for nd in tree2:
+            self.assertEqual(nd.edge.length, 1000)
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/dendropy/test/test_dataio_nexml_reader_chars.py b/dendropy/test/test_dataio_nexml_reader_chars.py
new file mode 100644
index 0000000..717a79e
--- /dev/null
+++ b/dendropy/test/test_dataio_nexml_reader_chars.py
@@ -0,0 +1,170 @@
+# !/usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Tests for general NEXUS character matrix reading.
+"""
+
+import unittest
+import dendropy
+from dendropy.utility import error
+from dendropy.test.support import dendropytest
+from dendropy.test.support import pathmap
+from dendropy.test.support import standard_file_test_chars
+from dendropy.test.support import compare_and_validate
+from dendropy.dataio import nexmlreader
+from dendropy.utility import messaging
+_LOG = messaging.get_logger(__name__)
+
+class NexmlCharactersReaderDnaTestCase(
+        standard_file_test_chars.DnaTestChecker,
+        dendropytest.ExtendedTestCase):
+
+    @classmethod
+    def setUpClass(cls):
+        cls.build()
+
+    def test_basic_nexml(self):
+        src_filenames = [
+                "standard-test-chars-dna.as_cells.nexml",
+                "standard-test-chars-dna.as_seqs.nexml",
+                ]
+        for src_idx, src_filename in enumerate(src_filenames):
+            # print(src_idx, src_filename)
+            src_path = pathmap.char_source_path(src_filename)
+            self.verify_get_from(
+                    matrix_type=dendropy.DnaCharacterMatrix,
+                    src_filepath=src_path,
+                    schema="nexml",
+                    factory_kwargs={},
+                    check_taxon_annotations=False,
+                    check_matrix_annotations=False,
+                    check_sequence_annotations=False,
+                    check_column_annotations=False,
+                    check_cell_annotations=False)
+
+class NexmlCharactersReaderRnaTestCase(
+        standard_file_test_chars.RnaTestChecker,
+        dendropytest.ExtendedTestCase):
+
+    @classmethod
+    def setUpClass(cls):
+        cls.build()
+
+    def test_basic_nexml(self):
+        src_filenames = [
+                "standard-test-chars-rna.as_cells.nexml",
+                "standard-test-chars-rna.as_seqs.nexml",
+                ]
+        for src_idx, src_filename in enumerate(src_filenames):
+            # print(src_idx, src_filename)
+            src_path = pathmap.char_source_path(src_filename)
+            self.verify_get_from(
+                    matrix_type=dendropy.RnaCharacterMatrix,
+                    src_filepath=src_path,
+                    schema="nexml",
+                    factory_kwargs={},
+                    check_taxon_annotations=False,
+                    check_matrix_annotations=False,
+                    check_sequence_annotations=False,
+                    check_column_annotations=False,
+                    check_cell_annotations=False)
+
+class NexmlCharactersReaderProteinTestCase(
+        standard_file_test_chars.ProteinTestChecker,
+        dendropytest.ExtendedTestCase):
+
+    @classmethod
+    def setUpClass(cls):
+        cls.build()
+
+    def test_basic_nexml(self):
+        src_filenames = [
+                "standard-test-chars-protein.as_cells.nexml",
+                "standard-test-chars-protein.as_seqs.nexml",
+                ]
+        for src_idx, src_filename in enumerate(src_filenames):
+            # print(src_idx, src_filename)
+            src_path = pathmap.char_source_path(src_filename)
+            self.verify_get_from(
+                    matrix_type=dendropy.ProteinCharacterMatrix,
+                    src_filepath=src_path,
+                    schema="nexml",
+                    factory_kwargs={},
+                    check_taxon_annotations=False,
+                    check_matrix_annotations=False,
+                    check_sequence_annotations=False,
+                    check_column_annotations=False,
+                    check_cell_annotations=False)
+
+class NexmlCharactersContinuousTestCase(
+        standard_file_test_chars.ContinuousTestChecker,
+        dendropytest.ExtendedTestCase):
+
+    @classmethod
+    def setUpClass(cls):
+        cls.build()
+
+    def test_basic_nexml(self):
+        src_filenames = [
+                "standard-test-chars-continuous.as_cells.nexml",
+                "standard-test-chars-continuous.as_seqs.nexml",
+                ]
+        for src_idx, src_filename in enumerate(src_filenames):
+            # print(src_idx, src_filename)
+            src_path = pathmap.char_source_path(src_filename)
+            self.verify_get_from(
+                    matrix_type=dendropy.ContinuousCharacterMatrix,
+                    src_filepath=src_path,
+                    schema="nexml",
+                    factory_kwargs={},
+                    check_taxon_annotations=False,
+                    check_matrix_annotations=False,
+                    check_sequence_annotations=False,
+                    check_column_annotations=False,
+                    check_cell_annotations=False)
+
+class NexmlStandardCharacters01234TestCase(
+        standard_file_test_chars.Standard01234TestChecker,
+        dendropytest.ExtendedTestCase):
+
+    @classmethod
+    def setUpClass(cls):
+        cls.build()
+
+    def test_basic_nexml(self):
+        src_filenames = [
+                "standard-test-chars-generic.as_cells.nexml",
+                "standard-test-chars-generic.as_seqs.nexml",
+                ]
+        for src_idx, src_filename in enumerate(src_filenames):
+            # print(src_idx, src_filename)
+            src_path = pathmap.char_source_path(src_filename)
+            self.verify_get_from(
+                    matrix_type=dendropy.StandardCharacterMatrix,
+                    src_filepath=src_path,
+                    schema="nexml",
+                    factory_kwargs={},
+                    check_taxon_annotations=False,
+                    check_matrix_annotations=False,
+                    check_sequence_annotations=False,
+                    check_column_annotations=False,
+                    check_cell_annotations=False)
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/dendropy/test/test_dataio_nexml_reader_tree_list.py b/dendropy/test/test_dataio_nexml_reader_tree_list.py
new file mode 100644
index 0000000..3fc58e5
--- /dev/null
+++ b/dendropy/test/test_dataio_nexml_reader_tree_list.py
@@ -0,0 +1,70 @@
+# !/usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Tests for general NEXML tree list reading.
+"""
+
+import sys
+import unittest
+import dendropy
+from dendropy.test.support import dendropytest
+from dendropy.test.support import standard_file_test_trees
+from dendropy.test import base_standard_trees_parsing_test_cases
+from dendropy.test.support import curated_test_tree
+from dendropy.test.support import pathmap
+if not (sys.version_info.major >= 3 and sys.version_info.minor >= 4):
+    from dendropy.utility.filesys import pre_py34_open as open
+
+class NexmlStandardTreeParsingTestCase(
+        base_standard_trees_parsing_test_cases.StandardTreesParsingTestCase,
+        standard_file_test_trees.NexmlTestTreesChecker,
+        dendropytest.ExtendedTestCase):
+
+    @classmethod
+    def setUpClass(cls):
+        standard_file_test_trees.NexmlTestTreesChecker.create_class_fixtures(cls)
+
+    ## NOTE: many tests are in standard_file_test_trees.StandardTreeParsingTestCase !! ##
+
+    def test_collection_comments_and_annotations(self):
+        for tree_file_title in (
+                "dendropy-test-trees-multifurcating-rooted-annotated",
+                "dendropy-test-trees-n33-unrooted-annotated-x10a",
+                ):
+            tree_reference = dict(standard_file_test_trees._TREE_REFERENCES[tree_file_title])
+            expected_non_metadata_comments = tree_reference["tree_list_comments"]
+            expected_metadata = tree_reference["tree_list_metadata"]
+            tree_filepath = self.schema_tree_filepaths[tree_file_title]
+            tree_list = dendropy.TreeList.get_from_path(
+                    tree_filepath,
+                    "nexml")
+            expected_comments = expected_non_metadata_comments
+            self.compare_annotations_to_json_metadata_dict(
+                    tree_list,
+                    expected_metadata)
+            if self.__class__.is_check_comments:
+                self.assertEqual(len(tree_list.comments), len(expected_comments))
+                self.assertEqual(set(tree_list.comments), set(expected_comments))
+            self.verify_standard_trees(
+                    tree_list=tree_list,
+                    tree_file_title=tree_file_title,
+                    tree_offset=0)
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/dendropy/test/test_dataio_nexml_writer_chars.py b/dendropy/test/test_dataio_nexml_writer_chars.py
new file mode 100644
index 0000000..e8eb3f5
--- /dev/null
+++ b/dendropy/test/test_dataio_nexml_writer_chars.py
@@ -0,0 +1,83 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Tests for NEXML tree list writing.
+"""
+
+import unittest
+import dendropy
+from dendropy.test.support import dendropytest
+from dendropy.test.support import compare_and_validate
+from dendropy.test.support import pathmap
+from dendropy.test.support import standard_file_test_chars
+
+class NexmlWriterCharactersTestCase(
+        compare_and_validate.ValidateWriteable,
+        dendropytest.ExtendedTestCase):
+
+    @classmethod
+    def setUpClass(cls):
+        cls.check_taxon_annotations = False
+        cls.check_matrix_annotations = False
+        cls.check_sequence_annotations = False
+        cls.check_column_annotations = False
+        cls.check_cell_annotations = False
+        standard_file_test_chars.DnaTestChecker.build()
+        standard_file_test_chars.RnaTestChecker.build()
+        standard_file_test_chars.ProteinTestChecker.build()
+        standard_file_test_chars.Standard01234TestChecker.build()
+        standard_file_test_chars.ContinuousTestChecker.build()
+        cls.srcs = (
+                ("standard-test-chars-dna.as_cells.nexml", dendropy.DnaCharacterMatrix, standard_file_test_chars.DnaTestChecker),
+                ("standard-test-chars-rna.as_cells.nexml", dendropy.RnaCharacterMatrix, standard_file_test_chars.RnaTestChecker),
+                ("standard-test-chars-protein.as_cells.nexml", dendropy.ProteinCharacterMatrix, standard_file_test_chars.ProteinTestChecker),
+                ("standard-test-chars-generic.as_cells.nexml", dendropy.StandardCharacterMatrix, standard_file_test_chars.Standard01234TestChecker),
+                ("standard-test-chars-continuous.as_cells.nexml", dendropy.ContinuousCharacterMatrix, standard_file_test_chars.ContinuousTestChecker),
+                )
+
+    def verify_char_matrix(self, char_matrix, src_matrix_checker_type):
+        self.assertEqual(type(char_matrix), src_matrix_checker_type.matrix_type)
+        if src_matrix_checker_type.matrix_type is dendropy.StandardCharacterMatrix:
+            src_matrix_checker_type.create_class_fixtures_label_sequence_map_based_on_state_alphabet(src_matrix_checker_type,
+                    char_matrix.default_state_alphabet)
+        standard_file_test_chars.general_char_matrix_checker(
+                self,
+                char_matrix,
+                src_matrix_checker_type,
+                check_taxon_annotations=self.check_taxon_annotations,
+                check_matrix_annotations=self.check_matrix_annotations,
+                check_sequence_annotations=self.check_sequence_annotations,
+                check_column_annotations=self.check_column_annotations,
+                check_cell_annotations=self.check_cell_annotations,)
+
+    def test_basic_nexml_chars(self):
+        for src_filename, matrix_type, src_matrix_checker_type in self.__class__.srcs:
+            src_path = pathmap.char_source_path(src_filename)
+            d1 = matrix_type.get_from_path(src_path, "nexml")
+            # for markup_as_sequences in (True, False):
+            for markup_as_sequences in (False,):
+                s = self.write_out_validate_equal_and_return(
+                        d1, "nexml", {"markup_as_sequences": markup_as_sequences})
+                # if not markup_as_sequences:
+                #     print(s)
+                d2 = matrix_type.get_from_string(s, "nexml")
+                self.verify_char_matrix(d2, src_matrix_checker_type)
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/dendropy/test/test_dataio_nexml_writer_trees.py b/dendropy/test/test_dataio_nexml_writer_trees.py
new file mode 100644
index 0000000..06f4bae
--- /dev/null
+++ b/dendropy/test/test_dataio_nexml_writer_trees.py
@@ -0,0 +1,100 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Tests for NeXML tree writing.
+"""
+
+import collections
+import unittest
+import dendropy
+import re
+from dendropy.test.support import pathmap
+from dendropy.test.support import compare_and_validate
+from dendropy.test.support import dendropytest
+from dendropy.test.support import curated_test_tree
+from dendropy.test.support import standard_file_test_trees
+
+class NexmlTreeWriterTests(
+        curated_test_tree.CuratedTestTree,
+        compare_and_validate.ValidateWriteable,
+        dendropytest.ExtendedTestCase):
+
+    def test_simple(self):
+        tree1, all_nodes, leaf_nodes, internal_nodes = self.get_tree(
+                    suppress_internal_node_taxa=False,
+                    suppress_leaf_node_taxa=False
+                )
+        s = tree1.as_string("nexml")
+        tree2 = dendropy.Tree.get_from_string(
+                s, "nexml",
+                )
+        self.verify_curated_tree(tree2)
+
+class NexmlTreeWriterDefaultTest(
+        standard_file_test_trees.NexmlTestTreesChecker,
+        compare_and_validate.ValidateWriteable,
+        dendropytest.ExtendedTestCase):
+
+    @classmethod
+    def setUpClass(cls):
+        standard_file_test_trees.NexmlTestTreesChecker.create_class_fixtures(cls)
+
+    def test_roundtrip_full(self):
+        tree_file_title = 'dendropy-test-trees-n33-unrooted-annotated-x10a'
+        tree_filepath = standard_file_test_trees._TREE_FILEPATHS["nexml"][tree_file_title]
+        tree1 = dendropy.Tree.get_from_path(
+                tree_filepath,
+                "nexml",
+        )
+        s = tree1.as_string("nexml")
+        tree2 = dendropy.Tree.get_from_string(
+                s,
+                "nexml",
+        )
+        self.compare_to_reference_by_title_and_index(
+            tree=tree2,
+            tree_file_title=tree_file_title,
+            reference_tree_idx=0)
+
+class NexmlStandardTreeListWriterTestCase(
+        compare_and_validate.ValidateWriteable,
+        standard_file_test_trees.NexmlTestTreesChecker,
+        dendropytest.ExtendedTestCase):
+
+    @classmethod
+    def setUpClass(cls):
+        standard_file_test_trees.NexmlTestTreesChecker.create_class_fixtures(cls)
+
+    def test_annotated_tree_list_writing(self):
+        tree_file_title = 'dendropy-test-trees-n33-unrooted-annotated-x10a'
+        tree_filepath = self.schema_tree_filepaths[tree_file_title]
+        tree_list1 = dendropy.TreeList.get_from_path(
+                tree_filepath,
+                "nexml")
+        s = self.write_out_validate_equal_and_return(
+                tree_list1, "nexml", {})
+        tree_list2 = dendropy.TreeList.get_from_string(s,
+                "nexml")
+        self.verify_standard_trees(
+                tree_list=tree_list2,
+                tree_file_title=tree_file_title,
+                tree_offset=0)
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/dendropy/test/test_dataio_nexus_reader_chars.py b/dendropy/test/test_dataio_nexus_reader_chars.py
new file mode 100644
index 0000000..5495eba
--- /dev/null
+++ b/dendropy/test/test_dataio_nexus_reader_chars.py
@@ -0,0 +1,288 @@
+# !/usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Tests for general NEXUS character matrix reading.
+"""
+
+import unittest
+import dendropy
+from dendropy.utility import error
+from dendropy.test.support import dendropytest
+from dendropy.test.support import pathmap
+from dendropy.test.support import standard_file_test_chars
+from dendropy.test.support import compare_and_validate
+from dendropy.dataio import nexusreader
+from dendropy.utility import messaging
+_LOG = messaging.get_logger(__name__)
+
+class NexusCharactersReaderDnaTestCase(
+        standard_file_test_chars.DnaTestChecker,
+        dendropytest.ExtendedTestCase):
+
+    @classmethod
+    def setUpClass(cls):
+        cls.build()
+
+    def test_basic_nexus(self):
+        src_filenames = [
+                "standard-test-chars-dna.simple.nexus",
+                "standard-test-chars-dna.basic.nexus",
+                "standard-test-chars-dna.interleaved.nexus",
+                "standard-test-chars-dna.matchchar.nexus",
+                "standard-test-chars-dna.multi.nexus",
+                ]
+        for src_idx, src_filename in enumerate(src_filenames):
+            # print(src_idx, src_filename)
+            src_path = pathmap.char_source_path(src_filename)
+            self.verify_get_from(
+                    matrix_type=dendropy.DnaCharacterMatrix,
+                    src_filepath=src_path,
+                    schema="nexus",
+                    factory_kwargs={},
+                    check_taxon_annotations=False,
+                    check_matrix_annotations=False,
+                    check_sequence_annotations=False,
+                    check_column_annotations=False,
+                    check_cell_annotations=False)
+
+class NexusCharactersReaderRnaTestCase(
+        standard_file_test_chars.RnaTestChecker,
+        dendropytest.ExtendedTestCase):
+
+    @classmethod
+    def setUpClass(cls):
+        cls.build()
+
+    def test_basic_nexus(self):
+        src_filenames = [
+                "standard-test-chars-rna.simple.nexus",
+                "standard-test-chars-rna.basic.nexus",
+                "standard-test-chars-rna.interleaved.nexus",
+                "standard-test-chars-rna.matchchar.nexus",
+                "standard-test-chars-rna.multi.nexus",
+                ]
+        for src_idx, src_filename in enumerate(src_filenames):
+            # print(src_idx, src_filename)
+            src_path = pathmap.char_source_path(src_filename)
+            self.verify_get_from(
+                    matrix_type=dendropy.RnaCharacterMatrix,
+                    src_filepath=src_path,
+                    schema="nexus",
+                    factory_kwargs={},
+                    check_taxon_annotations=False,
+                    check_matrix_annotations=False,
+                    check_sequence_annotations=False,
+                    check_column_annotations=False,
+                    check_cell_annotations=False)
+
+class NexusCharactersReaderProteinTestCase(
+        standard_file_test_chars.ProteinTestChecker,
+        dendropytest.ExtendedTestCase):
+
+    @classmethod
+    def setUpClass(cls):
+        cls.build()
+
+    def test_basic_nexus(self):
+        src_filenames = [
+                "standard-test-chars-protein.simple.nexus",
+                "standard-test-chars-protein.basic.nexus",
+                "standard-test-chars-protein.interleaved.nexus",
+                "standard-test-chars-protein.matchchar.nexus",
+                "standard-test-chars-protein.multi.nexus",
+                ]
+        for src_idx, src_filename in enumerate(src_filenames):
+            # print(src_idx, src_filename)
+            src_path = pathmap.char_source_path(src_filename)
+            self.verify_get_from(
+                    matrix_type=dendropy.ProteinCharacterMatrix,
+                    src_filepath=src_path,
+                    schema="nexus",
+                    factory_kwargs={},
+                    check_taxon_annotations=False,
+                    check_matrix_annotations=False,
+                    check_sequence_annotations=False,
+                    check_column_annotations=False,
+                    check_cell_annotations=False)
+
+class NexusCharactersContinuousTestCase(
+        standard_file_test_chars.ContinuousTestChecker,
+        dendropytest.ExtendedTestCase):
+
+    @classmethod
+    def setUpClass(cls):
+        cls.build()
+
+    def test_basic_nexus(self):
+        src_filenames = [
+                "standard-test-chars-continuous.mesquite.nexus",
+                "standard-test-chars-continuous.mesquite.interleaved.nexus",
+                ]
+        for src_idx, src_filename in enumerate(src_filenames):
+            # print(src_idx, src_filename)
+            src_path = pathmap.char_source_path(src_filename)
+            self.verify_get_from(
+                    matrix_type=dendropy.ContinuousCharacterMatrix,
+                    src_filepath=src_path,
+                    schema="nexus",
+                    factory_kwargs={},
+                    check_taxon_annotations=False,
+                    check_matrix_annotations=False,
+                    check_sequence_annotations=False,
+                    check_column_annotations=False,
+                    check_cell_annotations=False)
+
+class NexusStandardCharacters01234TestCase(
+        standard_file_test_chars.Standard01234TestChecker,
+        dendropytest.ExtendedTestCase):
+
+    @classmethod
+    def setUpClass(cls):
+        cls.build()
+
+    def test_basic_nexus(self):
+        src_filenames = [
+                "standard-test-chars-generic.simple.nexus",
+                "standard-test-chars-generic.basic.nexus",
+                "standard-test-chars-generic.dotted.nexus",
+                "standard-test-chars-generic.interleaved.nexus",
+                ]
+        for src_idx, src_filename in enumerate(src_filenames):
+            # print(src_idx, src_filename)
+            src_path = pathmap.char_source_path(src_filename)
+            self.verify_get_from(
+                    matrix_type=dendropy.StandardCharacterMatrix,
+                    src_filepath=src_path,
+                    schema="nexus",
+                    factory_kwargs={},
+                    check_taxon_annotations=False,
+                    check_matrix_annotations=False,
+                    check_sequence_annotations=False,
+                    check_column_annotations=False,
+                    check_cell_annotations=False)
+
+
+class NexusTooManyTaxaTest(
+        dendropytest.ExtendedTestCase):
+
+    def testTooManyTaxaNonInterleaved(self):
+        data_str = """\
+        #NEXUS
+        BEGIN TAXA;
+            DIMENSIONS NTAX=2;
+            TAXLABELS AAA BBB ;
+        END;
+        BEGIN CHARACTERS;
+            DIMENSIONS  NCHAR=8;
+            FORMAT DATATYPE=DNA GAP=- MISSING=? MATCHCHAR=.;
+            MATRIX
+                AAA ACGTACGT
+                BBB ACGTACGT
+                CCC ACGTACGT
+            ;
+        END;
+        """
+        self.assertRaises(nexusreader.NexusReader.TooManyTaxaError,
+                dendropy.DnaCharacterMatrix.get_from_string,
+                data_str,
+                'nexus')
+
+class NexusCharsSubsetsTest(
+        compare_and_validate.Comparator,
+        dendropytest.ExtendedTestCase):
+
+    def verify_subsets(self, src_filename, expected_sets):
+        """
+        ``src_filename`` -- name of file containing full data and charsets
+                          statement
+        ``expected_sets`` -- dictionary with keys = label of charset, and values
+                           = name of file with subset of characters correspond
+                           to the charset.
+        """
+
+        src_data = dendropy.DnaCharacterMatrix.get_from_path(
+                pathmap.char_source_path(src_filename),
+                'nexus')
+
+        state_alphabet = src_data.default_state_alphabet
+        self.assertEqual(len(src_data.character_subsets), len(expected_sets))
+        for label, expected_data_file in expected_sets.items():
+
+            _LOG.debug(label)
+
+            self.assertTrue(label in src_data.character_subsets)
+            result_subset = src_data.export_character_subset(label)
+            expected_subset = dendropy.DnaCharacterMatrix.get_from_path(
+                pathmap.char_source_path(expected_data_file),
+                'nexus')
+
+            # confirm subset is correct
+            self.compare_distinct_char_matrix(
+                    result_subset,
+                    expected_subset,
+                    taxon_namespace_scoped=False,
+                    )
+
+            # mutate new and confirm that old remains unchanged
+            e1_symbols = src_data[0].symbols_as_string()
+            r1 = result_subset[0]
+            dummy_state = state_alphabet["A"]
+            for idx in range(len(r1)):
+                r1[idx].value = dummy_state
+            self.assertEqual(e1_symbols, src_data[0].symbols_as_string())
+
+            # mutate old and confirm that new remains unchanged
+            r2_symbols = result_subset[1].symbols_as_string()
+            e2 = src_data[1]
+            dummy_state = state_alphabet["A"]
+            for idx in range(len(e2)):
+                e2[idx].value = dummy_state
+            self.assertEqual(r2_symbols, result_subset[1].symbols_as_string())
+
+    def testNonInterleaved(self):
+        """
+        Charsets here go through all forms of position specification.
+        """
+        expected_sets = {
+                "coding" : "primates.chars.subsets-coding.nexus",
+                "noncoding" : "primates.chars.subsets-noncoding.nexus",
+                "1stpos" : "primates.chars.subsets-1stpos.nexus",
+                "2ndpos" : "primates.chars.subsets-2ndpos.nexus",
+                "3rdpos" : "primates.chars.subsets-3rdpos.nexus",
+                }
+        self.verify_subsets('primates.chars.subsets-all.nexus', expected_sets)
+
+    def testInterleaved(self):
+        """
+        A bug in DendroPy resulted in the block immediately following an
+        interleaved character matrix DATA or CHARACTERS block being skipped.
+        This tests for it by ensuring that the ASSUMPTIONS block following an
+        interleaved CHARACTERS block is parsed. A better test would approach
+        the issue more directly, by checking to see if block parsing left the
+        stream reader in the correct position.
+        """
+        expected_sets = {
+                "c1" : "interleaved-charsets-c1.nex",
+                "c2" : "interleaved-charsets-c2.nex",
+                "c3" : "interleaved-charsets-c3.nex",
+                }
+        self.verify_subsets('interleaved-charsets-all.nex', expected_sets)
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/dendropy/test/test_dataio_nexus_reader_dataset.py b/dendropy/test/test_dataio_nexus_reader_dataset.py
new file mode 100644
index 0000000..5c9ec15
--- /dev/null
+++ b/dendropy/test/test_dataio_nexus_reader_dataset.py
@@ -0,0 +1,361 @@
+# !/usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+NEXUS data read/write parse/format tests.
+"""
+
+from dendropy.test.support import pathmap
+from dendropy.test.support import dendropytest
+from dendropy.test.support import standard_file_test_chars
+from dendropy.test.support import curated_test_tree
+from dendropy.test.support import standard_file_test_datasets
+from dendropy.utility import messaging
+import unittest
+import dendropy
+_LOG = messaging.get_logger(__name__)
+
+class DataSetNexusSingleCharsTestCase(dendropytest.ExtendedTestCase):
+
+    @classmethod
+    def setUpClass(cls):
+        cls.check_taxon_annotations = False
+        cls.check_matrix_annotations = False
+        cls.check_sequence_annotations = False
+        cls.check_column_annotations = False
+        cls.check_cell_annotations = False
+        standard_file_test_chars.DnaTestChecker.build()
+        standard_file_test_chars.RnaTestChecker.build()
+        standard_file_test_chars.ProteinTestChecker.build()
+        standard_file_test_chars.Standard01234TestChecker.build()
+        standard_file_test_chars.ContinuousTestChecker.build()
+        cls.srcs = (
+                ("standard-test-chars-dna.multi.nexus", standard_file_test_chars.DnaTestChecker),
+                ("standard-test-chars-rna.multi.nexus", standard_file_test_chars.RnaTestChecker),
+                ("standard-test-chars-protein.multi.nexus", standard_file_test_chars.ProteinTestChecker),
+                ("standard-test-chars-generic.interleaved.nexus", standard_file_test_chars.Standard01234TestChecker),
+                ("standard-test-chars-continuous.mesquite.nexus", standard_file_test_chars.ContinuousTestChecker),
+                )
+
+    def verify_char_matrix(self, char_matrix, src_matrix_checker_type):
+        self.assertEqual(type(char_matrix), src_matrix_checker_type.matrix_type)
+        if src_matrix_checker_type.matrix_type is dendropy.StandardCharacterMatrix:
+            src_matrix_checker_type.create_class_fixtures_label_sequence_map_based_on_state_alphabet(src_matrix_checker_type,
+                    char_matrix.default_state_alphabet)
+        standard_file_test_chars.general_char_matrix_checker(
+                self,
+                char_matrix,
+                src_matrix_checker_type,
+                check_taxon_annotations=self.check_taxon_annotations,
+                check_matrix_annotations=self.check_matrix_annotations,
+                check_sequence_annotations=self.check_sequence_annotations,
+                check_column_annotations=self.check_column_annotations,
+                check_cell_annotations=self.check_cell_annotations,)
+
+    def test_get_single(self):
+        for src_filename, src_matrix_checker_type in self.__class__.srcs:
+            src_path = pathmap.char_source_path(src_filename)
+            ds = dendropy.DataSet.get_from_path(src_path, "nexus")
+            self.assertEqual(len(ds.char_matrices), 1)
+            self.assertEqual(len(ds.taxon_namespaces), 1)
+            self.assertIs(ds.char_matrices[0].taxon_namespace,
+                    ds.taxon_namespaces[0])
+            self.verify_char_matrix(ds.char_matrices[0], src_matrix_checker_type)
+
+    def test_read_single(self):
+        for src_filename, src_matrix_checker_type in self.__class__.srcs:
+            src_path = pathmap.char_source_path(src_filename)
+            ds = dendropy.DataSet()
+            result = ds.read(path=src_path, schema="nexus")
+            self.assertEqual(result, (1,0,1))
+            self.assertEqual(len(ds.char_matrices), 1)
+            self.assertEqual(len(ds.taxon_namespaces), 1)
+            self.assertIs(ds.char_matrices[0].taxon_namespace,
+                    ds.taxon_namespaces[0])
+            self.verify_char_matrix(ds.char_matrices[0], src_matrix_checker_type)
+
+    def test_read_successive_unattached_taxon_namespace(self):
+        ds = dendropy.DataSet()
+        for src_idx, (src_filename, src_matrix_checker_type) in enumerate(self.__class__.srcs):
+            src_path = pathmap.char_source_path(src_filename)
+            result = ds.read(path=src_path, schema="nexus")
+            self.assertEqual(result, (1,0,1))
+            self.assertEqual(len(ds.char_matrices), src_idx+1)
+            self.assertEqual(len(ds.taxon_namespaces), src_idx+1)
+            self.assertIs(ds.char_matrices[src_idx].taxon_namespace,
+                    ds.taxon_namespaces[src_idx])
+            self.verify_char_matrix(ds.char_matrices[src_idx], src_matrix_checker_type)
+
+    def test_read_successive_attached_taxon_namespace(self):
+        ds = dendropy.DataSet()
+        tns = dendropy.TaxonNamespace()
+        ds.attach_taxon_namespace(tns)
+        for src_idx, (src_filename, src_matrix_checker_type) in enumerate(self.__class__.srcs):
+            src_path = pathmap.char_source_path(src_filename)
+            result = ds.read(path=src_path, schema="nexus")
+            self.assertEqual(result, (0,0,1))
+            self.assertEqual(len(ds.char_matrices), src_idx+1)
+            self.assertEqual(len(ds.taxon_namespaces), 1)
+            self.assertIs(ds.char_matrices[src_idx].taxon_namespace,
+                    ds.taxon_namespaces[0])
+            self.assertIs(ds.taxon_namespaces[0], tns)
+            self.verify_char_matrix(ds.char_matrices[src_idx], src_matrix_checker_type)
+
+class DataSetNexusMultipleCharBlocksTestCase(dendropytest.ExtendedTestCase):
+
+    @classmethod
+    def setUpClass(cls):
+        cls.check_taxon_annotations = False
+        cls.check_matrix_annotations = False
+        cls.check_sequence_annotations = False
+        cls.check_column_annotations = False
+        cls.check_cell_annotations = False
+        standard_file_test_chars.DnaTestChecker.build()
+        standard_file_test_chars.RnaTestChecker.build()
+        standard_file_test_chars.ProteinTestChecker.build()
+        standard_file_test_chars.Standard01234TestChecker.build()
+
+    def verify_dataset(self, ds):
+        self.assertEqual(len(ds.taxon_namespaces), 1)
+        tns = ds.taxon_namespaces[0]
+        checkers = (
+                standard_file_test_chars.RnaTestChecker,
+                standard_file_test_chars.ProteinTestChecker,
+                standard_file_test_chars.Standard01234TestChecker,
+                standard_file_test_chars.DnaTestChecker,
+                )
+        self.assertEqual(len(ds.char_matrices), len(checkers))
+        for idx, (char_matrix, checker) in enumerate(zip(ds.char_matrices, checkers)):
+            self.assertIs(char_matrix.taxon_namespace, tns)
+            if checker.matrix_type is dendropy.StandardCharacterMatrix:
+                checker.create_class_fixtures_label_sequence_map_based_on_state_alphabet(checker,
+                        char_matrix.default_state_alphabet)
+            standard_file_test_chars.general_char_matrix_checker(
+                    self,
+                    char_matrix,
+                    checker,
+                    check_taxon_annotations=self.check_taxon_annotations,
+                    check_matrix_annotations=self.check_matrix_annotations,
+                    check_sequence_annotations=self.check_sequence_annotations,
+                    check_column_annotations=self.check_column_annotations,
+                    check_cell_annotations=self.check_cell_annotations,)
+
+    def test_basic_get(self):
+        src_filename = "standard-test-chars-multiple-char-blocks.1.basic.nexus"
+        src_path = pathmap.char_source_path(src_filename)
+        ds = dendropy.DataSet.get_from_path(src_path, "nexus")
+        self.verify_dataset(ds)
+
+    def test_basic_read(self):
+        src_filename = "standard-test-chars-multiple-char-blocks.1.basic.nexus"
+        src_path = pathmap.char_source_path(src_filename)
+        ds = dendropy.DataSet()
+        result = ds.read(path=src_path, schema="nexus")
+        self.assertEqual(result, (1, 0, 4))
+        self.verify_dataset(ds)
+
+class DataSetNexusTreesTestCase(
+        curated_test_tree.CuratedTestTree,
+        dendropytest.ExtendedTestCase):
+
+    def test_multiple_trees(self):
+        src_filename = "multitreeblocks.nex"
+        src_path = pathmap.tree_source_path(src_filename)
+        ds = dendropy.DataSet.get_from_path(src_path, "nexus")
+        self.assertEqual(len(ds.taxon_namespaces), 1)
+        self.assertEqual(len(ds.tree_lists), 3)
+
+    def test_with_translate(self):
+        srcs = (
+            ("curated-with-translate-block-and-internal-taxa.nex", False),
+            ("curated-with-translate-block-and-untranslated-internal-taxa.nex", True),
+            )
+        for src_filename, suppress_internal_taxa in srcs:
+            src_path = pathmap.tree_source_path(src_filename)
+            ds = dendropy.DataSet.get_from_path(
+                    src_path,
+                    "nexus",
+                    suppress_internal_node_taxa=suppress_internal_taxa)
+            self.assertEqual(len(ds.tree_lists), 1)
+            tree_list = ds.tree_lists[0]
+            tree_labels = ("1", "2", "3")
+            self.assertEqual(len(tree_list), len(tree_labels))
+            for tree_idx, (tree, label) in enumerate(zip(tree_list, tree_labels)):
+                self.assertEqual(tree.label, label)
+                self.verify_curated_tree(
+                        tree=tree,
+                        suppress_internal_node_taxa=suppress_internal_taxa,
+                        suppress_leaf_node_taxa=False,
+                        suppress_edge_lengths=False,
+                        node_taxon_label_map=None)
+
+class DataSetNexusReaderMixedTestCase(
+        standard_file_test_datasets.StandardSingleTaxonNamespaceDataSet,
+        dendropytest.ExtendedTestCase):
+
+    @classmethod
+    def setUpClass(cls):
+        standard_file_test_datasets.StandardSingleTaxonNamespaceDataSet.build(cls)
+        cls.check_taxon_annotations = False
+        cls.check_matrix_annotations = False
+        cls.check_sequence_annotations = False
+        cls.check_column_annotations = False
+        cls.check_cell_annotations = False
+
+    def test_basic_get(self):
+        src_filename = "standard-test-mixed.1.basic.nexus"
+        src_path = pathmap.mixed_source_path(src_filename)
+        ds = dendropy.DataSet.get_from_path(
+                src_path,
+                "nexus",
+                suppress_internal_node_taxa=False, # so internal labels get translated
+                )
+        self.verify_dataset(ds)
+
+    def test_basic_read(self):
+        src_filename = "standard-test-mixed.1.basic.nexus"
+        src_path = pathmap.mixed_source_path(src_filename)
+        ds = dendropy.DataSet()
+        result = ds.read(
+                path=src_path,
+                schema="nexus",
+                suppress_internal_node_taxa=False, # so internal labels get translated
+                )
+        self.assertEqual(result, (1, 7, 4))
+        self.verify_dataset(ds)
+
+class DataSetNexusTaxonManagementTestCase(dendropytest.ExtendedTestCase):
+
+    def testMultiTaxonNamespace(self):
+        d = dendropy.DataSet()
+        d.read(path=pathmap.mixed_source_path('reference_single_taxonset_dataset.nex'), schema="nexus")
+        self.assertEqual(len(d.taxon_namespaces), 1)
+        self.assertEqual(len(d.taxon_namespaces[0]), 33)
+        d.read(path=pathmap.tree_source_path('pythonidae.mle.nex'), schema="nexus")
+        self.assertEqual(len(d.taxon_namespaces), 2)
+        self.assertEqual(len(d.taxon_namespaces[1]), 33)
+        d.read(path=pathmap.tree_source_path('pythonidae.reference-trees.newick'), schema="newick")
+        self.assertEqual(len(d.taxon_namespaces), 3)
+        self.assertEqual(len(d.taxon_namespaces[2]), 33)
+        d.read(path=pathmap.char_source_path('caenophidia_mos.chars.fasta'), schema="fasta", data_type="protein")
+        self.assertEqual(len(d.taxon_namespaces), 4)
+        self.assertEqual(len(d.taxon_namespaces[3]), 114)
+
+    def testBoundTaxonNamespaceDefault(self):
+        d = dendropy.DataSet()
+        t = dendropy.TaxonNamespace()
+        d.attach_taxon_namespace(t)
+        self.assertEqual(len(d.taxon_namespaces), 1)
+        self.assertIs(d.taxon_namespaces[0], d.attached_taxon_namespace)
+        d.read(path=pathmap.mixed_source_path('reference_single_taxonset_dataset.nex'), schema="nexus")
+        self.assertEqual(len(d.taxon_namespaces), 1)
+        self.assertEqual(len(d.taxon_namespaces[0]), 33)
+        d.read(path=pathmap.tree_source_path('pythonidae.mle.nex'), schema="nexus")
+        self.assertEqual(len(d.taxon_namespaces), 1)
+        self.assertEqual(len(d.taxon_namespaces[0]), 33)
+        d.read(path=pathmap.tree_source_path('pythonidae.reference-trees.newick'), schema="newick")
+        self.assertEqual(len(d.taxon_namespaces), 1)
+        self.assertEqual(len(d.taxon_namespaces[0]), 33)
+        d.read(path=pathmap.char_source_path('caenophidia_mos.chars.fasta'), schema="fasta", data_type="protein")
+        self.assertEqual(len(d.taxon_namespaces), 1)
+        self.assertEqual(len(d.taxon_namespaces[0]), 147)
+
+    def testBindAndUnbind(self):
+        d = dendropy.DataSet()
+        t = dendropy.TaxonNamespace()
+        d.attach_taxon_namespace(t)
+        self.assertEqual(len(d.taxon_namespaces), 1)
+        self.assertIs(d.taxon_namespaces[0], d.attached_taxon_namespace)
+        self.assertIs(d.attached_taxon_namespace, t)
+        d.read(path=pathmap.mixed_source_path('reference_single_taxonset_dataset.nex'), schema="nexus")
+        self.assertEqual(len(d.taxon_namespaces), 1)
+        self.assertEqual(len(d.taxon_namespaces[0]), 33)
+        d.read(path=pathmap.tree_source_path('pythonidae.mle.nex'), schema="nexus")
+        self.assertEqual(len(d.taxon_namespaces), 1)
+        self.assertEqual(len(d.taxon_namespaces[0]), 33)
+        d.read(path=pathmap.tree_source_path('pythonidae.reference-trees.newick'), schema="newick")
+        self.assertEqual(len(d.taxon_namespaces), 1)
+        self.assertEqual(len(d.taxon_namespaces[0]), 33)
+        d.detach_taxon_namespace()
+        d.read(path=pathmap.char_source_path('caenophidia_mos.chars.fasta'), schema="fasta", data_type="protein")
+        self.assertEqual(len(d.taxon_namespaces), 2)
+        self.assertEqual(len(d.taxon_namespaces[0]), 33)
+        self.assertEqual(len(d.taxon_namespaces[1]), 114)
+
+    def testAttachTaxonNamespaceOnGet(self):
+        t = dendropy.TaxonNamespace()
+        d = dendropy.DataSet.get_from_path(pathmap.mixed_source_path('reference_single_taxonset_dataset.nex'),
+                "nexus",
+                taxon_namespace=t)
+        self.assertEqual(len(d.taxon_namespaces), 1)
+        self.assertIsNot(d.attached_taxon_namespace, None)
+        self.assertIs(d.taxon_namespaces[0], d.attached_taxon_namespace)
+        self.assertIs(d.attached_taxon_namespace, t)
+        self.assertEqual(len(d.taxon_namespaces[0]), 33)
+        d.read(path=pathmap.tree_source_path('pythonidae.mle.nex'), schema="nexus")
+        self.assertEqual(len(d.taxon_namespaces), 1)
+        self.assertEqual(len(d.taxon_namespaces[0]), 33)
+        d.read(path=pathmap.tree_source_path('pythonidae.reference-trees.newick'), schema="newick")
+        self.assertEqual(len(d.taxon_namespaces), 1)
+        self.assertEqual(len(d.taxon_namespaces[0]), 33)
+        d.detach_taxon_namespace()
+        d.read_from_path(pathmap.char_source_path('caenophidia_mos.chars.fasta'), schema="fasta", data_type="protein")
+        self.assertEqual(len(d.taxon_namespaces), 2)
+        self.assertEqual(len(d.taxon_namespaces[0]), 33)
+        self.assertEqual(len(d.taxon_namespaces[1]), 114)
+
+class NexusDataSetMultiTreesBlockTestCase(dendropytest.ExtendedTestCase):
+
+    def test_multiple_trees1(self):
+        src_filename = "multitreeblocks.nex"
+        src_path = pathmap.tree_source_path(src_filename)
+        ds = dendropy.DataSet.get_from_path(src_path, "nexus")
+        self.assertEqual(len(ds.tree_lists), 3)
+        for tt in ds.tree_lists:
+            self.assertEqual(len(tt), 3)
+
+    def test_multiple_trees2(self):
+        src_filename = "multitreeblocks2.nex"
+        src_path = pathmap.tree_source_path(src_filename)
+        ds = dendropy.DataSet.get_from_path(src_path, "nexus")
+        self.assertEqual(len(ds.taxon_namespaces), 1)
+        self.assertEqual(len(ds.tree_lists), 2)
+        for tt in ds.tree_lists:
+            self.assertEqual(len(tt), 2)
+            self.assertIs(tt.taxon_namespace, ds.taxon_namespaces[0])
+
+class DataSetNexusReaderMesquiteMultipleTaxonNamespacesTest(
+        standard_file_test_datasets.MultipleTaxonNamespaceDataSet,
+        dendropytest.ExtendedTestCase):
+
+    def test_attached_taxon_namespace(self):
+        tns = dendropy.TaxonNamespace()
+        ds = dendropy.DataSet.get_from_path(
+                pathmap.mixed_source_path('multitaxa_mesquite.nex'),
+                "nexus",
+                taxon_namespace=tns)
+        self.verify_attached_taxon_namespace(ds, tns)
+
+    def test_default(self):
+        ds = dendropy.DataSet.get_from_path(
+                pathmap.mixed_source_path('multitaxa_mesquite.nex'),
+                "nexus")
+        self.verify_unrestricted(ds)
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/dendropy/test/test_dataio_nexus_reader_tree_list.py b/dendropy/test/test_dataio_nexus_reader_tree_list.py
new file mode 100644
index 0000000..f1b2283
--- /dev/null
+++ b/dendropy/test/test_dataio_nexus_reader_tree_list.py
@@ -0,0 +1,109 @@
+# !/usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Tests for general NEXUS tree list reading.
+"""
+
+import sys
+import unittest
+import dendropy
+from dendropy.test.support import dendropytest
+from dendropy.test.support import standard_file_test_trees
+from dendropy.test.support import curated_test_tree
+from dendropy.test.support import pathmap
+from dendropy.test import base_standard_trees_parsing_test_cases
+if not (sys.version_info.major >= 3 and sys.version_info.minor >= 4):
+    from dendropy.utility.filesys import pre_py34_open as open
+
+class NexusStandardTreeParsingTestCase(
+        base_standard_trees_parsing_test_cases.StandardTreesParsingTestCase,
+        standard_file_test_trees.NexusTestTreesChecker,
+        dendropytest.ExtendedTestCase):
+
+    @classmethod
+    def setUpClass(cls):
+        standard_file_test_trees.NexusTestTreesChecker.create_class_fixtures(cls)
+
+    ## NOTE: many tests are in standard_file_test_trees.StandardTreeParsingTestCase !! ##
+
+    def test_collection_comments_and_annotations(self):
+        for tree_file_title in (
+                "dendropy-test-trees-multifurcating-rooted-annotated",
+                "dendropy-test-trees-n33-unrooted-annotated-x10a",
+                ):
+            tree_reference = dict(standard_file_test_trees._TREE_REFERENCES[tree_file_title])
+            expected_non_metadata_comments = tree_reference["tree_list_comments"]
+            expected_metadata = tree_reference["tree_list_metadata"]
+            tree_filepath = self.schema_tree_filepaths[tree_file_title]
+            tree_list = dendropy.TreeList.get_from_path(
+                    tree_filepath,
+                    "nexus")
+            expected_comments = expected_non_metadata_comments
+            self.compare_annotations_to_json_metadata_dict(
+                    tree_list,
+                    expected_metadata)
+            if self.__class__.is_check_comments:
+                self.assertEqual(len(tree_list.comments), len(expected_comments))
+                self.assertEqual(set(tree_list.comments), set(expected_comments))
+            self.verify_standard_trees(
+                    tree_list=tree_list,
+                    tree_file_title=tree_file_title,
+                    tree_offset=0)
+
+class NexusMultiTreeListTestCase(dendropytest.ExtendedTestCase):
+
+    def test_multiple_trees1(self):
+        src_filename = "multitreeblocks.nex"
+        src_path = pathmap.tree_source_path(src_filename)
+        trees = dendropy.TreeList.get_from_path(src_path, "nexus")
+        self.assertEqual(len(trees), 9)
+
+    def test_multiple_trees2(self):
+        src_filename = "multitreeblocks2.nex"
+        src_path = pathmap.tree_source_path(src_filename)
+        trees = dendropy.TreeList.get_from_path(src_path, "nexus")
+        self.assertEqual(len(trees), 4)
+        labels = ["x2.1","x2.2","x2.3","x2.4"]
+        # self.assertEqual(len(trees.taxon_namespace), len(labels))
+        self.assertEqual([t.label for t in trees.taxon_namespace], labels)
+        for tree in trees:
+            self.assertIs(tree.taxon_namespace, trees.taxon_namespace)
+            seen_taxa = 0
+            for nd in tree:
+                if nd.taxon is not None:
+                    seen_taxa += 1
+                    self.assertIn(nd.taxon, tree.taxon_namespace)
+            self.assertEqual(seen_taxa, len(tree.taxon_namespace))
+
+class NexusStandardTreesWithTranslateBlockButNoTaxaBlockTestCase(
+        curated_test_tree.CuratedTestTree,
+        dendropytest.ExtendedTestCase):
+
+    def test_with_translate_but_no_taxa_block(self):
+        src_filename = "curated-with-translate-block-and-no-taxa-block-and-untranslated-internal-taxa.nex"
+        src_path = pathmap.tree_source_path(src_filename)
+        tree_list = dendropy.TreeList.get_from_path(src_path, "nexus")
+        tree_labels = ("1", "2", "3")
+        self.assertEqual(len(tree_list), len(tree_labels))
+        for tree_idx, (tree, label) in enumerate(zip(tree_list, tree_labels)):
+            self.assertEqual(tree.label, label)
+            self.verify_curated_tree(tree=tree)
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/dendropy/test/test_dataio_nexus_taxon_handling.py b/dendropy/test/test_dataio_nexus_taxon_handling.py
new file mode 100644
index 0000000..467a263
--- /dev/null
+++ b/dendropy/test/test_dataio_nexus_taxon_handling.py
@@ -0,0 +1,279 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Tests for NEWICK taxon handling.
+"""
+
+import sys
+import os
+import unittest
+import dendropy
+from dendropy.utility import error
+from dendropy.test.support import dendropytest
+from dendropy.dataio import nexusreader
+from dendropy.dataio import nexusprocessing
+
+class TaxonSymbolMappingTest(unittest.TestCase):
+
+    def test_standard_lookup_and_create(self):
+        labels = ["t{}".format(i) for i in range(1, 101)]
+        tns = dendropy.TaxonNamespace()
+        tsm = nexusprocessing.NexusTaxonSymbolMapper(taxon_namespace=tns)
+        for idx, label in enumerate(labels):
+            self.assertEqual(len(tns), idx)
+            t1 = tsm.require_taxon_for_symbol(label)
+            self.assertEqual(len(tns), idx+1)
+            self.assertEqual(t1.label, label)
+
+            t2 = tsm.require_taxon_for_symbol(label)
+            self.assertEqual(len(tns), idx+1)
+            self.assertIs(t1, t2)
+            self.assertEqual(t2.label, label)
+
+            t3 = tsm.require_taxon_for_symbol(str(idx+1))
+            self.assertEqual(len(tns), idx+1)
+            self.assertIs(t1, t3)
+            self.assertEqual(t3.label, label)
+
+            assert label.upper() != label
+            t4 = tsm.require_taxon_for_symbol(label.upper())
+            self.assertEqual(len(tns), idx+1)
+            self.assertIs(t1, t4)
+            self.assertEqual(t4.label, label)
+
+    def test_no_number_lookup_and_create(self):
+        # looking up a number symbol should result in new taxon creation
+        labels = ["t{}".format(i) for i in range(1, 101)]
+        tns = dendropy.TaxonNamespace()
+        tsm = nexusprocessing.NexusTaxonSymbolMapper(taxon_namespace=tns,
+                enable_lookup_by_taxon_number=False)
+        for idx, label in enumerate(labels):
+            self.assertEqual(len(tns), idx)
+            t1 = tsm.require_taxon_for_symbol(label)
+            self.assertEqual(len(tns), idx+1)
+            self.assertEqual(t1.label, label)
+
+            t2 = tsm.require_taxon_for_symbol(label)
+            self.assertEqual(len(tns), idx+1)
+            self.assertIs(t1, t2)
+            self.assertEqual(t2.label, label)
+
+            t3 = tsm.lookup_taxon_symbol(str(idx+1), create_taxon_if_not_found=False)
+            self.assertIs(t3, None)
+            self.assertEqual(len(tns), idx+1)
+
+    def test_no_number_lookup_and_create2(self):
+        # looking up a number symbol should result in new taxon creation
+        labels = ["t{}".format(i) for i in range(1, 101)]
+        tns = dendropy.TaxonNamespace()
+        tsm = nexusprocessing.NexusTaxonSymbolMapper(taxon_namespace=tns,
+                enable_lookup_by_taxon_number=False)
+        taxa = []
+        for label_idx, label in enumerate(labels):
+            t = dendropy.Taxon(label)
+            tsm.add_taxon(t)
+            taxa.append(t)
+        self.assertEqual(len(tns), len(labels))
+        for label_idx, label in enumerate(labels):
+            t1 = tsm.require_taxon_for_symbol(label_idx+1)
+            self.assertNotIn(t1, taxa)
+            self.assertEqual(t1.label, str(label_idx+1))
+            self.assertEqual(len(tns), len(labels)+label_idx+1)
+
+    def test_new_taxon(self):
+        labels = ["t{}".format(i) for i in range(1, 101)]
+        tns = dendropy.TaxonNamespace()
+        tsm = nexusprocessing.NexusTaxonSymbolMapper(taxon_namespace=tns)
+        for label_idx, label in enumerate(labels):
+            t = tsm.new_taxon(label)
+            self.assertEqual(len(tns), label_idx+1)
+            self.assertEqual(t.label, label)
+            self.assertIs(tsm.require_taxon_for_symbol(label), t)
+            self.assertEqual(len(tns), label_idx+1)
+            self.assertIs(tsm.require_taxon_for_symbol(str(label_idx+1)), t)
+            self.assertEqual(len(tns), label_idx+1)
+        self.assertEqual(len(tns), len(labels))
+
+    def test_add_taxon(self):
+        labels = ["t{}".format(i) for i in range(1, 101)]
+        tns = dendropy.TaxonNamespace()
+        tsm = nexusprocessing.NexusTaxonSymbolMapper(taxon_namespace=tns)
+        for label_idx, label in enumerate(labels):
+            t = dendropy.Taxon(label)
+            tsm.add_taxon(t)
+            self.assertEqual(len(tns), label_idx+1)
+            self.assertEqual(t.label, label)
+            self.assertIs(tsm.require_taxon_for_symbol(label), t)
+            self.assertEqual(len(tns), label_idx+1)
+            self.assertIs(tsm.require_taxon_for_symbol(str(label_idx+1)), t)
+            self.assertEqual(len(tns), label_idx+1)
+        self.assertEqual(len(tns), len(labels))
+
+    def test_simple_token_lookup(self):
+        labels = ["t{}".format(i) for i in range(1, 101)]
+        tns = dendropy.TaxonNamespace()
+        tsm = nexusprocessing.NexusTaxonSymbolMapper(taxon_namespace=tns)
+        translate = {}
+        t_labels = {}
+        for label_idx, label in enumerate(labels):
+            t = dendropy.Taxon(label)
+            t_labels[t] = t.label
+            tsm.add_taxon(t)
+            token = label_idx + 1
+            translate[token] = t
+            tsm.add_translate_token(token, t)
+        self.assertEqual(len(tns), len(labels))
+        for token in translate:
+            t1 = translate[token]
+            t2 = tsm.require_taxon_for_symbol(token)
+            self.assertIs(t1, t2)
+            self.assertEqual(t2.label, t_labels[t1])
+        self.assertEqual(len(tns), len(labels))
+
+    def test_tricky_token_lookup(self):
+        labels = ["t{}".format(i) for i in range(1, 101)]
+        tns = dendropy.TaxonNamespace()
+        tsm = nexusprocessing.NexusTaxonSymbolMapper(taxon_namespace=tns)
+        translate = {}
+        t_labels = {}
+        for label_idx, label in enumerate(labels):
+            t = dendropy.Taxon(label)
+            t_labels[t] = t.label
+            tsm.add_taxon(t)
+            token = str(len(labels) - label_idx)
+            translate[token] = t
+            tsm.add_translate_token(token, t)
+        self.assertEqual(len(tns), len(labels))
+        for token in translate:
+            t1 = translate[token]
+            t2 = tsm.require_taxon_for_symbol(token)
+            self.assertIs(t1, t2)
+            self.assertEqual(t2.label, t_labels[t1])
+        self.assertEqual(len(tns), len(labels))
+
+    def test_mixed_token_lookup(self):
+        labels = ["t{}".format(i) for i in range(1, 101)]
+        tns = dendropy.TaxonNamespace()
+        tsm = nexusprocessing.NexusTaxonSymbolMapper(taxon_namespace=tns)
+        translate = {}
+        t_labels = {}
+        labels_t = {}
+        for label_idx, label in enumerate(labels):
+            t = dendropy.Taxon(label)
+            t_labels[t] = t.label
+            labels_t[t.label] = t
+            tsm.add_taxon(t)
+            if label_idx % 2 == 0:
+                token = str(label_idx+1)
+                translate[token] = t
+                tsm.add_translate_token(token, t)
+        self.assertEqual(len(tns), len(labels))
+        for label_idx, label in enumerate(labels):
+            token = label_idx + 1
+            t1 = tsm.require_taxon_for_symbol(token)
+            self.assertEqual(len(tns), len(labels))
+            self.assertEqual(t1.label, label)
+            self.assertIs(t1, labels_t[label])
+            if token in translate:
+                self.assertIs(t1, translate[token])
+        self.assertEqual(len(tns), len(labels))
+
+    def test_taxon_namespace_locking(self):
+        tns = dendropy.TaxonNamespace()
+        tsm = nexusprocessing.NexusTaxonSymbolMapper(taxon_namespace=tns)
+        self.assertFalse(tns.is_mutable)
+        del tsm
+        self.assertTrue(tns.is_mutable)
+
+class NexusTaxaCaseInsensitivityTest(unittest.TestCase):
+
+    def setUp(self):
+        self.data_str = """\
+            #NEXUS
+
+            BEGIN TAXA;
+                DIMENSIONS NTAX=5;
+                TAXLABELS AAA BBB CCC DDD EEE;
+            END;
+
+            BEGIN CHARACTERS;
+                DIMENSIONS  NCHAR=8;
+                FORMAT DATATYPE=DNA GAP=- MISSING=? MATCHCHAR=. INTERLEAVE;
+                MATRIX
+                    AAA ACGT
+                    BBB ACGT
+                    CCC ACGT
+                    DDD ACGT
+                    EEE ACGT
+
+                    aaa ACGT
+                    bbb ACGT
+                    ccc ACGT
+                    ddd ACGT
+                    eee ACGT
+                ;
+            END;
+            """
+
+    def testCaseInsensitiveChars(self):
+        d = dendropy.DnaCharacterMatrix.get_from_string(self.data_str, 'nexus', case_sensitive_taxon_labels=False)
+        expected = ["AAA", "BBB", "CCC", "DDD", "EEE"]
+        observed = [t.label.upper() for t in d.taxon_namespace]
+        for i, x in enumerate(expected):
+            self.assertTrue(x in observed)
+        for i, x in enumerate(observed):
+            self.assertTrue(x in expected)
+        self.assertEqual(len(d.taxon_namespace), 5)
+
+    def testCaseSensitiveChars(self):
+        #d = dendropy.DnaCharacterMatrix.get_from_string(self.data_str, 'nexus', case_sensitive_taxon_labels=False)
+        self.assertRaises(error.DataParseError,
+                dendropy.DnaCharacterMatrix.get_from_string,
+                self.data_str,
+                'nexus',
+                case_sensitive_taxon_labels=True)
+
+    def testDefaultCaseSensitivityChars(self):
+        d = dendropy.DnaCharacterMatrix.get_from_string(self.data_str, 'nexus')
+        expected = ["AAA", "BBB", "CCC", "DDD", "EEE"]
+        observed = [t.label.upper() for t in d.taxon_namespace]
+        for i, x in enumerate(expected):
+            self.assertTrue(x in observed)
+        for i, x in enumerate(observed):
+            self.assertTrue(x in expected)
+        self.assertEqual(len(d.taxon_namespace), 5)
+
+class NexusTooManyTaxaTest(
+        dendropytest.ExtendedTestCase):
+
+    def testTooManyTaxaNonInterleaved(self):
+        data_str = """\
+        #NEXUS
+        BEGIN TAXA;
+            DIMENSIONS NTAX=2;
+            TAXLABELS AAA BBB CCC DDD EEE;
+        END;
+        """
+        self.assertRaises(nexusreader.NexusReader.TooManyTaxaError,
+                dendropy.DnaCharacterMatrix.get_from_string,
+                data_str,
+                'nexus')
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/dendropy/test/test_dataio_nexus_tree_yielder.py b/dendropy/test/test_dataio_nexus_tree_yielder.py
new file mode 100644
index 0000000..4a30989
--- /dev/null
+++ b/dendropy/test/test_dataio_nexus_tree_yielder.py
@@ -0,0 +1,89 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Tests for general NEWICK tree iteration reading.
+"""
+
+import sys
+import unittest
+import dendropy
+from dendropy.test.support import dendropytest
+from dendropy.test.support import standard_file_test_trees
+from dendropy.test.support import pathmap
+
+if not (sys.version_info.major >= 3 and sys.version_info.minor >= 4):
+    from dendropy.utility.filesys import pre_py34_open as open
+
+class NexusTreeYielderDefaultTestCase(
+        standard_file_test_trees.NexusTestTreesChecker,
+        dendropytest.ExtendedTestCase):
+
+    @classmethod
+    def setUpClass(cls):
+        standard_file_test_trees.NexusTestTreesChecker.create_class_fixtures(cls)
+
+    def test_basic(self):
+        tree_file_titles = [
+            # "dendropy-test-trees-multifurcating-rooted-annotated",
+            # "dendropy-test-trees-multifurcating-rooted",
+            # "dendropy-test-trees-multifurcating-unrooted",
+            # "dendropy-test-trees-n10-rooted-treeshapes",
+            "dendropy-test-trees-n12-x2",
+            "dendropy-test-trees-n33-unrooted-x10a",
+            "dendropy-test-trees-n33-unrooted-x10b",
+            "dendropy-test-trees-n33-unrooted-annotated-x10a",
+            "dendropy-test-trees-n33-unrooted-annotated-x10a",
+        ]
+        expected_file_names = []
+        expected_tree_references = []
+        tree_files = []
+        for file_idx, tree_file_title in enumerate(tree_file_titles):
+            tree_filepath = self.schema_tree_filepaths[tree_file_title]
+            if False and idx % 2 == 0:
+                tree_files.append(open(tree_filepath, "r"))
+            else:
+                tree_files.append(tree_filepath)
+            num_trees = self.tree_references[tree_file_title]["num_trees"]
+            for tree_idx in range(num_trees):
+                expected_file_names.append(tree_filepath)
+                expected_tree_references.append(self.tree_references[tree_file_title][str(tree_idx)])
+        collected_trees = []
+        tns = dendropy.TaxonNamespace()
+        # for f in tree_files:
+        #     dendropy.TreeList.get_from_path(f, "nexus")
+        tree_sources = dendropy.Tree.yield_from_files(
+                files=tree_files,
+                schema="nexus",
+                taxon_namespace=tns)
+        for tree_idx, tree in enumerate(tree_sources):
+            self.assertEqual(tree_sources.current_file_name, expected_file_names[tree_idx])
+            tree.current_file_name = tree_sources.current_file_name
+            collected_trees.append(tree)
+        self.assertEqual(len(collected_trees), len(expected_tree_references))
+        for tree, ref_tree in zip(collected_trees, expected_tree_references):
+            self.assertIs(tree.taxon_namespace, tns)
+            self.compare_to_reference_tree(tree, ref_tree)
+
+## TODO:
+# - test multiple trees blocks
+# - mix of newick/nexus
+
+if __name__ == "__main__":
+    unittest.main()
+
diff --git a/dendropy/test/test_dataio_nexus_writer_chars.py b/dendropy/test/test_dataio_nexus_writer_chars.py
new file mode 100644
index 0000000..24a5dcc
--- /dev/null
+++ b/dendropy/test/test_dataio_nexus_writer_chars.py
@@ -0,0 +1,79 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Tests for NEXUS tree list writing.
+"""
+
+import unittest
+import dendropy
+from dendropy.test.support import dendropytest
+from dendropy.test.support import compare_and_validate
+from dendropy.test.support import pathmap
+from dendropy.test.support import standard_file_test_chars
+
+class NexusWriterCharactersTestCase(
+        compare_and_validate.ValidateWriteable,
+        dendropytest.ExtendedTestCase):
+
+    @classmethod
+    def setUpClass(cls):
+        cls.check_taxon_annotations = False
+        cls.check_matrix_annotations = False
+        cls.check_sequence_annotations = False
+        cls.check_column_annotations = False
+        cls.check_cell_annotations = False
+        standard_file_test_chars.DnaTestChecker.build()
+        standard_file_test_chars.RnaTestChecker.build()
+        standard_file_test_chars.ProteinTestChecker.build()
+        standard_file_test_chars.Standard01234TestChecker.build()
+        standard_file_test_chars.ContinuousTestChecker.build()
+        cls.srcs = (
+                ("standard-test-chars-dna.multi.nexus", dendropy.DnaCharacterMatrix, standard_file_test_chars.DnaTestChecker),
+                ("standard-test-chars-rna.multi.nexus", dendropy.RnaCharacterMatrix, standard_file_test_chars.RnaTestChecker),
+                ("standard-test-chars-protein.multi.nexus", dendropy.ProteinCharacterMatrix, standard_file_test_chars.ProteinTestChecker),
+                ("standard-test-chars-generic.interleaved.nexus", dendropy.StandardCharacterMatrix, standard_file_test_chars.Standard01234TestChecker),
+                ("standard-test-chars-continuous.mesquite.nexus", dendropy.ContinuousCharacterMatrix, standard_file_test_chars.ContinuousTestChecker),
+                )
+
+    def verify_char_matrix(self, char_matrix, src_matrix_checker_type):
+        self.assertEqual(type(char_matrix), src_matrix_checker_type.matrix_type)
+        if src_matrix_checker_type.matrix_type is dendropy.StandardCharacterMatrix:
+            src_matrix_checker_type.create_class_fixtures_label_sequence_map_based_on_state_alphabet(src_matrix_checker_type,
+                    char_matrix.default_state_alphabet)
+        standard_file_test_chars.general_char_matrix_checker(
+                self,
+                char_matrix,
+                src_matrix_checker_type,
+                check_taxon_annotations=self.check_taxon_annotations,
+                check_matrix_annotations=self.check_matrix_annotations,
+                check_sequence_annotations=self.check_sequence_annotations,
+                check_column_annotations=self.check_column_annotations,
+                check_cell_annotations=self.check_cell_annotations,)
+
+    def test_basic_nexus_chars(self):
+        for src_filename, matrix_type, src_matrix_checker_type in self.__class__.srcs:
+            src_path = pathmap.char_source_path(src_filename)
+            d1 = matrix_type.get_from_path(src_path, "nexus")
+            s = self.write_out_validate_equal_and_return(
+                    d1, "nexus", {})
+            d2 = matrix_type.get_from_string(s, "nexus")
+            self.verify_char_matrix(d2, src_matrix_checker_type)
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/dendropy/test/test_dataio_nexus_writer_dataset.py b/dendropy/test/test_dataio_nexus_writer_dataset.py
new file mode 100644
index 0000000..017bb92
--- /dev/null
+++ b/dendropy/test/test_dataio_nexus_writer_dataset.py
@@ -0,0 +1,91 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Tests for NEXUS dataset writing.
+"""
+
+import unittest
+import dendropy
+from dendropy.test.support import pathmap
+from dendropy.test.support import dendropytest
+from dendropy.test.support import compare_and_validate
+from dendropy.test.support import standard_file_test_datasets
+
+class DataSetNexusWriterMixedTestCase(
+        standard_file_test_datasets.StandardSingleTaxonNamespaceDataSet,
+        compare_and_validate.ValidateWriteable,
+        dendropytest.ExtendedTestCase):
+
+    @classmethod
+    def setUpClass(cls):
+        standard_file_test_datasets.StandardSingleTaxonNamespaceDataSet.build(cls)
+        cls.check_taxon_annotations = False
+        cls.check_matrix_annotations = False
+        cls.check_sequence_annotations = False
+        cls.check_column_annotations = False
+        cls.check_cell_annotations = False
+
+    def get_source_dataset(self, **kwargs):
+        src_ds = dendropy.DataSet.get_from_path(
+                pathmap.mixed_source_path("standard-test-mixed.1.basic.nexus"),
+                "nexus",
+                **kwargs
+                )
+        return src_ds
+
+    def test_basic(self):
+        # `suppress_internal_node_taxa=False` so internal labels get translated
+        d0 = self.get_source_dataset(suppress_internal_node_taxa=False)
+        s = self.write_out_validate_equal_and_return(
+                d0, "nexus", {})
+        ds = dendropy.DataSet.get_from_string(
+                s, "nexus",
+                suppress_internal_node_taxa=False, # so internal labels get translated
+                )
+        self.verify_dataset(ds)
+
+class DataSetNexusWriterMesquiteMultipleTaxonNamespacesTest(
+        standard_file_test_datasets.MultipleTaxonNamespaceDataSet,
+        compare_and_validate.ValidateWriteable,
+        dendropytest.ExtendedTestCase):
+
+    def test_attached_taxon_namespace(self):
+        d0 = dendropy.DataSet.get_from_path(
+                pathmap.mixed_source_path('multitaxa_mesquite.nex'),
+                "nexus")
+        for tns in d0.taxon_namespaces:
+            d0.attach_taxon_namespace(tns)
+            s = self.write_out_validate_equal_and_return(
+                    d0, "nexus", {})
+            ds = dendropy.DataSet.get_from_string(s, "nexus",)
+            self.verify_attached_taxon_namespace_written(ds, tns)
+
+    def test_default(self):
+        d0 = dendropy.DataSet.get_from_path(
+                pathmap.mixed_source_path('multitaxa_mesquite.nex'),
+                "nexus")
+        s = self.write_out_validate_equal_and_return(
+                d0, "nexus", {})
+        ds = dendropy.DataSet.get_from_string(
+                s, "nexus",
+                )
+        self.verify_unrestricted(ds)
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/dendropy/test/test_dataio_nexus_writer_tree.py b/dendropy/test/test_dataio_nexus_writer_tree.py
new file mode 100644
index 0000000..bae3ce2
--- /dev/null
+++ b/dendropy/test/test_dataio_nexus_writer_tree.py
@@ -0,0 +1,407 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Tests for NEXUS tree writing.
+"""
+
+import collections
+import unittest
+import dendropy
+import re
+from dendropy.test.support import pathmap
+from dendropy.test.support import compare_and_validate
+from dendropy.test.support import dendropytest
+from dendropy.test.support import curated_test_tree
+from dendropy.test.support import standard_file_test_trees
+from dendropy.test.test_dataio_newick_writer import newick_tree_writer_test_tree
+
+class NexusTreeWriterTests(
+        curated_test_tree.CuratedTestTree,
+        compare_and_validate.ValidateWriteable,
+        dendropytest.ExtendedTestCase):
+
+    def test_simple(self):
+        tree1, all_nodes, leaf_nodes, internal_nodes = self.get_tree(
+                    suppress_internal_node_taxa=False,
+                    suppress_leaf_node_taxa=False
+                )
+        kwargs = {
+                "suppress_leaf_node_labels": True,
+                "suppress_internal_node_labels": True
+                }
+        s = self.write_out_validate_equal_and_return(
+                tree1, "nexus", kwargs)
+        tree2 = dendropy.Tree.get_from_string(
+                s, "nexus",
+                )
+        self.verify_curated_tree(tree2)
+
+class NexusTreeWriterDefaultTest(
+        standard_file_test_trees.NexusTestTreesChecker,
+        compare_and_validate.ValidateWriteable,
+        dendropytest.ExtendedTestCase):
+
+    @classmethod
+    def setUpClass(cls):
+        standard_file_test_trees.NexusTestTreesChecker.create_class_fixtures(cls,
+                is_metadata_extracted=True,
+                )
+
+    def test_roundtrip_full(self):
+        tree_file_title = 'dendropy-test-trees-n33-unrooted-annotated-x10a'
+        tree_filepath = standard_file_test_trees._TREE_FILEPATHS["nexus"][tree_file_title]
+        tree1 = dendropy.Tree.get_from_path(
+                tree_filepath,
+                "nexus",
+                extract_comment_metadata=True,
+                store_tree_weights=True,
+                suppress_internal_node_taxa=False,
+                suppress_leaf_node_taxa=False,
+        )
+        kwargs = {
+            "suppress_leaf_taxon_labels"     :  False , # default: False ,
+            "suppress_leaf_node_labels"      :  True  , # default: True  ,
+            "suppress_internal_taxon_labels" :  False , # default: False ,
+            "suppress_internal_node_labels"  :  True  , # default: False ,
+            "suppress_rooting"               :  False , # default: False ,
+            "suppress_edge_lengths"          :  False , # default: False ,
+            "unquoted_underscores"           :  False , # default: False ,
+            "preserve_spaces"                :  False , # default: False ,
+            "store_tree_weights"             :  False , # default: False ,
+            "suppress_annotations"           :  False , # default: True  ,
+            "annotations_as_nhx"             :  False , # default: False ,
+            "suppress_item_comments"         :  False , # default: True  ,
+            "node_label_element_separator"   :  ' '   , # default: ' '   ,
+            "node_label_compose_fn"        :  None  , # default: None  ,
+            "edge_label_compose_fn"        :  None  , # default: None  ,
+        }
+        s = self.write_out_validate_equal_and_return(
+                tree1, "nexus", kwargs)
+        tree2 = dendropy.Tree.get_from_string(
+                s,
+                "nexus",
+                extract_comment_metadata=True,
+                store_tree_weights=True,
+                suppress_internal_node_taxa=True,
+                suppress_leaf_node_taxa=False,
+        )
+        self.compare_to_reference_by_title_and_index(
+            tree=tree2,
+            tree_file_title=tree_file_title,
+            reference_tree_idx=0)
+            # suppress_internal_node_taxa=False,
+            # suppress_leaf_node_taxa=False,
+            # is_metadata_extracted=True,
+            # is_coerce_metadata_values_to_string=True,
+            # is_distinct_nodes_and_edges_representation=False)
+
+class NexusTreeWriterGeneralOptionsTests(
+        compare_and_validate.ValidateWriteable,
+        dendropytest.ExtendedTestCase):
+
+    def test_node_labeling(self):
+        for has_leaf_node_taxa in (True, False):
+            for has_leaf_node_labels in (True, False):
+                for has_internal_node_taxa in (True, False):
+                    for has_internal_node_labels in (True, False):
+                        for label_separator in (' ', '$$$'):
+                            tree = newick_tree_writer_test_tree(
+                                    has_leaf_node_taxa=has_leaf_node_taxa,
+                                    has_leaf_node_labels=has_leaf_node_labels,
+                                    has_internal_node_taxa=has_internal_node_taxa,
+                                    has_internal_node_labels=has_internal_node_labels,
+                                    label_separator=label_separator,
+                                    )
+                            for suppress_leaf_taxon_labels in (True, False):
+                                for suppress_leaf_node_labels in (True, False):
+                                    for suppress_internal_taxon_labels in (True, False):
+                                        for suppress_internal_node_labels in (True, False):
+                                            kwargs = {
+                                                    "suppress_leaf_taxon_labels"     : suppress_leaf_taxon_labels,
+                                                    "suppress_leaf_node_labels"      : suppress_leaf_node_labels,
+                                                    "suppress_internal_taxon_labels" : suppress_internal_taxon_labels,
+                                                    "suppress_internal_node_labels"  : suppress_internal_node_labels,
+                                                    "node_label_element_separator"   : label_separator,
+                                                    }
+                                            s = self.write_out_validate_equal_and_return(
+                                                    tree, "nexus", kwargs)
+                                            tree2 = dendropy.Tree.get_from_string(
+                                                    s,
+                                                    "nexus",
+                                                    extract_comment_metadata=True,
+                                                    store_tree_weights=True,
+                                                    suppress_internal_node_taxa=True,
+                                                    suppress_leaf_node_taxa=True,
+                                            )
+                                            nodes1 = [nd for nd in tree]
+                                            nodes2 = [nd for nd in tree2]
+                                            self.assertEqual(len(nodes1), len(nodes2))
+                                            for nd1, nd2 in zip(nodes1, nodes2):
+                                                is_leaf = nd1.is_leaf()
+                                                self.assertEqual(nd2.is_leaf(), is_leaf)
+                                                if is_leaf:
+                                                    self.assertEqual(nd2.label,
+                                                            nd1.expected_label[(suppress_leaf_taxon_labels, suppress_leaf_node_labels)])
+                                                else:
+                                                    self.assertEqual(nd2.label,
+                                                            nd1.expected_label[ (suppress_internal_taxon_labels, suppress_internal_node_labels) ])
+
+    def test_rooting_token(self):
+        tree1 = newick_tree_writer_test_tree()
+        for rooted_state in (None, True, False):
+            tree1.is_rooted = rooted_state
+            for suppress_rooting in (True, False):
+                kwargs = {
+                        "suppress_rooting": suppress_rooting,
+                }
+                s = self.write_out_validate_equal_and_return(
+                        tree1, "nexus", kwargs)
+                tree2 = dendropy.Tree.get_from_string(
+                        s, "nexus", rooting=None)
+                if suppress_rooting:
+                    self.assertTrue(tree2.is_rootedness_undefined)
+                else:
+                    if rooted_state is True:
+                        self.assertTrue(tree2.is_rooted)
+                        self.assertFalse(tree2.is_unrooted)
+                    elif rooted_state is False:
+                        self.assertFalse(tree2.is_rooted)
+                        self.assertTrue(tree2.is_unrooted)
+                    else:
+                        self.assertTrue(tree2.is_rootedness_undefined)
+
+    def test_edge_lengths(self):
+        tree1 = newick_tree_writer_test_tree()
+        for suppress_edge_lengths in (True, False):
+            kwargs = {
+                    "suppress_edge_lengths": suppress_edge_lengths,
+            }
+            s = self.write_out_validate_equal_and_return(
+                    tree1, "nexus", kwargs)
+            tree2 = dendropy.Tree.get_from_string(
+                    s, "nexus", rooting=None)
+            nodes1 = [nd for nd in tree1]
+            nodes2 = [nd for nd in tree2]
+            self.assertEqual(len(nodes1), len(nodes2))
+            for nd1, nd2 in zip(nodes1, nodes2):
+                if suppress_edge_lengths:
+                    self.assertIs(nd2.edge.length, None)
+                else:
+                    self.assertEqual(nd2.edge.length, nd1.edge.length)
+
+    def test_unquoted_underscores(self):
+        tree1 = newick_tree_writer_test_tree(
+                has_leaf_node_labels=False,
+                has_internal_node_labels=False)
+        for taxon in tree1.taxon_namespace:
+            taxon.label = "{label}_{label}".format(label=taxon.label)
+        for unquoted_underscores in (True, False):
+            kwargs = {
+                    "unquoted_underscores": unquoted_underscores,
+            }
+            s = self.write_out_validate_equal_and_return(
+                    tree1, "nexus", kwargs)
+            for preserve_underscores in (True, False):
+                tree2 = dendropy.Tree.get_from_string(
+                        s,
+                        "nexus",
+                        suppress_internal_node_taxa=False,
+                        preserve_underscores=preserve_underscores)
+                nodes1 = [nd for nd in tree1]
+                nodes2 = [nd for nd in tree2]
+                self.assertEqual(len(nodes1), len(nodes2))
+                for nd1, nd2 in zip(nodes1, nodes2):
+                    original_label = nd1.taxon.label
+                    if unquoted_underscores:
+                        if preserve_underscores:
+                            expected_label = original_label
+                        else:
+                            expected_label = original_label.replace("_", " ")
+                    else:
+                        expected_label = original_label
+                    self.assertEqual(nd2.taxon.label, expected_label)
+
+    def test_preserve_spaces(self):
+        tree1 = newick_tree_writer_test_tree(
+                has_leaf_node_labels=False,
+                has_internal_node_labels=False)
+        for taxon in tree1.taxon_namespace:
+            taxon.label = "{label} {label}".format(label=taxon.label)
+        for preserve_spaces in (True, False):
+            kwargs = {
+                    "preserve_spaces": preserve_spaces,
+            }
+            s = self.write_out_validate_equal_and_return(
+                    tree1, "nexus", kwargs)
+            tree2 = dendropy.Tree.get_from_string(
+                    s,
+                    "nexus",
+                    suppress_internal_node_taxa=False,
+                    preserve_underscores=True)
+            nodes1 = [nd for nd in tree1]
+            nodes2 = [nd for nd in tree2]
+            self.assertEqual(len(nodes1), len(nodes2))
+            for nd1, nd2 in zip(nodes1, nodes2):
+                original_label = nd1.taxon.label
+                if preserve_spaces:
+                    expected_label = original_label
+                else:
+                    expected_label = original_label.replace(" ", "_")
+                self.assertEqual(nd2.taxon.label, expected_label)
+
+    def test_store_tree_weights(self):
+        tree1 = newick_tree_writer_test_tree(
+                has_leaf_node_labels=False,
+                has_internal_node_labels=False)
+        for store_tree_weights in (True, False):
+            for weight in (None, "23.0", "1/2", 1.0):
+                tree1.weight = weight
+                kwargs = {
+                        "store_tree_weights": store_tree_weights,
+                }
+                s = self.write_out_validate_equal_and_return(
+                        tree1, "nexus", kwargs)
+                tree2 = dendropy.Tree.get_from_string(
+                        s,
+                        "nexus",
+                        store_tree_weights=True)
+                if store_tree_weights and weight is not None:
+                    self.assertTrue("[&W " in s)
+                    try:
+                        w = float(weight)
+                    except ValueError:
+                        w = eval("/".join(str(float(w)) for w in weight.split("/")))
+                    self.assertEqual(tree2.weight, w)
+                else:
+                    self.assertFalse("[&W " in s)
+                    self.assertEqual(tree2.weight, 1.0) # default weight
+
+    def test_suppress_annotations(self):
+        tree1 = dendropy.Tree()
+        a1 = tree1.seed_node.new_child()
+        a2 = tree1.seed_node.new_child()
+        tree1.annotations.add_new("t", 1)
+        for nd in tree1:
+            nd.annotations.add_new("a", 1)
+            nd.edge.annotations.add_new("b", 2)
+        for suppress_annotations in (True, False):
+            for annotations_as_nhx in (True, False):
+                kwargs = {
+                        "suppress_annotations"   :  suppress_annotations,
+                        "annotations_as_nhx"     :  annotations_as_nhx,
+                }
+                s = self.write_out_validate_equal_and_return(
+                        tree1, "nexus", kwargs)
+                tree2 = dendropy.Tree.get_from_string(
+                        s,
+                        "nexus",
+                        extract_comment_metadata=True)
+                if suppress_annotations:
+                    self.assertFalse(tree2.has_annotations)
+                    for nd in tree2:
+                        self.assertFalse(nd.has_annotations)
+                else:
+                    if annotations_as_nhx:
+                        self.assertEqual(s.count("[&&NHX"), 7)
+                        # self.assertEqual(len(re.findall(r"\[&&NHX", s)), 7)
+                    else:
+                        self.assertEqual(s.count("[&&NHX"), 0)
+                        self.assertEqual(s.count("[&"), 7)
+                        # self.assertEqual(len(re.findall(r"\[&&NHX", s)), 0)
+                        # self.assertEqual(len(re.findall(r"\[&.*?\]", s)), 7)
+                    self.assertTrue(tree2.has_annotations)
+                    self.assertEqual(tree2.annotations.get_value("t"), '1')
+                    for nd in tree2:
+                        self.assertTrue(nd.has_annotations)
+                        self.assertEqual(nd.annotations.get_value("a"), '1')
+                        self.assertEqual(nd.annotations.get_value("b"), '2')
+
+    def test_suppress_item_comments(self):
+        tree1 = dendropy.Tree()
+        a1 = tree1.seed_node.new_child()
+        a2 = tree1.seed_node.new_child()
+        tree1.comments.append("t1")
+        for nd in tree1:
+            nd.comments.append("n1")
+            nd.edge.comments.append("e1")
+        for suppress_item_comments in (True, False):
+            kwargs = {
+                    "suppress_item_comments"   :  suppress_item_comments,
+            }
+            s = self.write_out_validate_equal_and_return(
+                    tree1, "nexus", kwargs)
+            tree2 = dendropy.Tree.get_from_string(
+                    s,
+                    "nexus",
+                    extract_comment_metadata=False)
+            if suppress_item_comments:
+                self.assertEqual(tree2.comments, [])
+                for nd in tree2:
+                    self.assertEqual(nd.comments, [])
+                    self.assertEqual(nd.edge.comments, [])
+            else:
+                self.assertEqual(tree2.comments, ["t1"])
+                for nd in tree2:
+                    self.assertEqual(nd.comments, ["n1", "e1"])
+
+    def test_node_label_compose_fn(self):
+        tree1 = dendropy.Tree()
+        a1 = tree1.seed_node.new_child(label="a1")
+        a1.taxon = tree1.taxon_namespace.require_taxon("hula")
+        a2 = tree1.seed_node.new_child(label="a1")
+        a2.taxon = tree1.taxon_namespace.require_taxon("hoop")
+        f = lambda x: "zzz"
+        kwargs = {
+                "suppress_leaf_taxon_labels"     :  False ,
+                "suppress_leaf_node_labels"      :  False ,
+                "suppress_internal_taxon_labels" :  False ,
+                "suppress_internal_node_labels"  :  False ,
+                "node_label_compose_fn"   :  f,
+        }
+        s = self.write_out_validate_equal_and_return(
+                tree1, "nexus", kwargs)
+        tree2 = dendropy.Tree.get_from_string(
+                s,
+                "nexus",
+                suppress_leaf_node_taxa=True,
+                suppress_internal_node_taxa=True)
+        for nd in tree2:
+            self.assertEqual(nd.label, "zzz")
+
+    def test_edge_label_compose_fn(self):
+        tree1 = dendropy.Tree()
+        tree1.seed_node.edge.length = 1
+        a1 = tree1.seed_node.new_child(label="a1", edge_length=1)
+        a2 = tree1.seed_node.new_child(label="a1", edge_length=1)
+        f = lambda x: 1000
+        kwargs = {
+                "edge_label_compose_fn"   :  f,
+        }
+        s = self.write_out_validate_equal_and_return(
+                tree1, "nexus", kwargs)
+        tree2 = dendropy.Tree.get_from_string(
+                s,
+                "nexus",
+                suppress_leaf_node_taxa=True,
+                suppress_internal_node_taxa=True)
+        for nd in tree2:
+            self.assertEqual(nd.edge.length, 1000)
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/dendropy/test/test_dataio_nexus_writer_tree_list.py b/dendropy/test/test_dataio_nexus_writer_tree_list.py
new file mode 100644
index 0000000..8f07945
--- /dev/null
+++ b/dendropy/test/test_dataio_nexus_writer_tree_list.py
@@ -0,0 +1,60 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Tests for NEXUS tree list writing.
+"""
+
+import unittest
+import dendropy
+from dendropy.test.support import dendropytest
+from dendropy.test.support import standard_file_test_trees
+from dendropy.test.support import compare_and_validate
+
+class NexusStandardTreeListWriterTestCase(
+        compare_and_validate.ValidateWriteable,
+        standard_file_test_trees.NexusTestTreesChecker,
+        dendropytest.ExtendedTestCase):
+
+    @classmethod
+    def setUpClass(cls):
+        standard_file_test_trees.NexusTestTreesChecker.create_class_fixtures(cls)
+
+    def test_annotated_tree_list_writing(self):
+        tree_file_title = 'dendropy-test-trees-n33-unrooted-annotated-x10a'
+        tree_reference = standard_file_test_trees._TREE_REFERENCES[tree_file_title]
+        expected_non_metadata_comments = tree_reference["tree_list_comments"]
+        expected_metadata_comments = tree_reference["tree_list_metadata"]
+        expected_metadata = tree_reference["tree_list_metadata"]
+        tree_filepath = self.schema_tree_filepaths[tree_file_title]
+        tree_list1 = dendropy.TreeList.get_from_path(
+                tree_filepath,
+                "nexus",
+                extract_comment_metadata=True)
+        s = self.write_out_validate_equal_and_return(
+                tree_list1, "nexus", {})
+        tree_list2 = dendropy.TreeList.get_from_string(s,
+                "nexus",
+                extract_comment_metadata=True)
+        self.verify_standard_trees(
+                tree_list=tree_list2,
+                tree_file_title=tree_file_title,
+                tree_offset=0)
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/dendropy/test/test_dataio_phylip_reader_chars.py b/dendropy/test/test_dataio_phylip_reader_chars.py
new file mode 100644
index 0000000..777106b
--- /dev/null
+++ b/dendropy/test/test_dataio_phylip_reader_chars.py
@@ -0,0 +1,310 @@
+# !/usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Tests for general PHYLIP character matrix reading.
+"""
+
+import unittest
+import dendropy
+import collections
+from dendropy.utility import error
+from dendropy.test.support import dendropytest
+from dendropy.test.support import pathmap
+from dendropy.test.support import standard_file_test_chars
+from dendropy.test.support import compare_and_validate
+from dendropy.dataio import phylipreader
+from dendropy.utility import messaging
+_LOG = messaging.get_logger(__name__)
+
+class PhylipCharactersReaderDnaTestCase(
+        standard_file_test_chars.DnaTestChecker,
+        dendropytest.ExtendedTestCase):
+
+    @classmethod
+    def setUpClass(cls):
+        cls.build()
+
+    def test_basic_phylip(self):
+        src_filenames = [
+                "standard-test-chars-dna.relaxed.phylip",
+                ]
+        for src_idx, src_filename in enumerate(src_filenames):
+            # print(src_idx, src_filename)
+            src_path = pathmap.char_source_path(src_filename)
+            self.verify_get_from(
+                    matrix_type=dendropy.DnaCharacterMatrix,
+                    src_filepath=src_path,
+                    schema="phylip",
+                    factory_kwargs={},
+                    check_taxon_annotations=False,
+                    check_matrix_annotations=False,
+                    check_sequence_annotations=False,
+                    check_column_annotations=False,
+                    check_cell_annotations=False)
+
+class PhylipCharactersReaderRnaTestCase(
+        standard_file_test_chars.RnaTestChecker,
+        dendropytest.ExtendedTestCase):
+
+    @classmethod
+    def setUpClass(cls):
+        cls.build()
+
+    def test_basic_phylip(self):
+        src_filenames = [
+                "standard-test-chars-rna.relaxed.phylip",
+                ]
+        for src_idx, src_filename in enumerate(src_filenames):
+            # print(src_idx, src_filename)
+            src_path = pathmap.char_source_path(src_filename)
+            self.verify_get_from(
+                    matrix_type=dendropy.RnaCharacterMatrix,
+                    src_filepath=src_path,
+                    schema="phylip",
+                    factory_kwargs={},
+                    check_taxon_annotations=False,
+                    check_matrix_annotations=False,
+                    check_sequence_annotations=False,
+                    check_column_annotations=False,
+                    check_cell_annotations=False)
+
+class PhylipCharactersReaderProteinTestCase(
+        standard_file_test_chars.ProteinTestChecker,
+        dendropytest.ExtendedTestCase):
+
+    @classmethod
+    def setUpClass(cls):
+        cls.build()
+
+    def test_basic_phylip(self):
+        src_filenames = [
+                "standard-test-chars-protein.relaxed.phylip",
+                ]
+        for src_idx, src_filename in enumerate(src_filenames):
+            # print(src_idx, src_filename)
+            src_path = pathmap.char_source_path(src_filename)
+            self.verify_get_from(
+                    matrix_type=dendropy.ProteinCharacterMatrix,
+                    src_filepath=src_path,
+                    schema="phylip",
+                    factory_kwargs={},
+                    check_taxon_annotations=False,
+                    check_matrix_annotations=False,
+                    check_sequence_annotations=False,
+                    check_column_annotations=False,
+                    check_cell_annotations=False)
+
+class PhylipStandardCharacters01234TestCase(
+        standard_file_test_chars.Standard01234TestChecker,
+        dendropytest.ExtendedTestCase):
+
+    @classmethod
+    def setUpClass(cls):
+        cls.build(state_alphabet_fundamental_symbols="0123456789-")
+
+    def test_basic_phylip(self):
+        src_filenames = [
+                "standard-test-chars-generic.relaxed.phylip",
+                ]
+        for src_idx, src_filename in enumerate(src_filenames):
+            # print(src_idx, src_filename)
+            src_path = pathmap.char_source_path(src_filename)
+            self.verify_get_from(
+                    matrix_type=dendropy.StandardCharacterMatrix,
+                    src_filepath=src_path,
+                    schema="phylip",
+                    factory_kwargs={},
+                    check_taxon_annotations=False,
+                    check_matrix_annotations=False,
+                    check_sequence_annotations=False,
+                    check_column_annotations=False,
+                    check_cell_annotations=False)
+
+class PhylipCharactersContinuousTestCase(
+        standard_file_test_chars.ContinuousTestChecker,
+        dendropytest.ExtendedTestCase):
+
+    @classmethod
+    def setUpClass(cls):
+        cls.build()
+
+    def test_basic_nexus(self):
+        src_filenames = [
+                ("standard-test-chars-continuous.relaxed.phylip", {}),
+                ("standard-test-chars-continuous.interleaved.phylip", {"interleaved": True}),
+                ]
+        for src_idx, (src_filename, kwargs) in enumerate(src_filenames):
+            # print(src_idx, src_filename)
+            src_path = pathmap.char_source_path(src_filename)
+            self.verify_get_from(
+                    matrix_type=dendropy.ContinuousCharacterMatrix,
+                    src_filepath=src_path,
+                    schema="phylip",
+                    factory_kwargs=kwargs,
+                    check_taxon_annotations=False,
+                    check_matrix_annotations=False,
+                    check_sequence_annotations=False,
+                    check_column_annotations=False,
+                    check_cell_annotations=False)
+
+class PhylipVariantsTestCases(dendropytest.ExtendedTestCase):
+
+    @classmethod
+    def setUpClass(cls):
+        cls.expected_seqs = collections.OrderedDict()
+        cls.expected_seqs["Turkey"]     = "AAGCTNGGGCATTTCAGGGTGAGCCCGGGCAATACAGGGTAT"
+        cls.expected_seqs["Salmo gair"] = "AAGCCTTGGCAGTGCAGGGTGAGCCGTGGCCGGGCACGGTAT"
+        cls.expected_seqs["H. Sapiens"] = "ACCGGTTGGCCGTTCAGGGTACAGGTTGGCCGTTCAGGGTAA"
+        cls.expected_seqs["Chimp"]      = "AAACCCTTGCCGTTACGCTTAAACCGAGGCCGGGACACTCAT"
+        cls.expected_seqs["Gorilla"]    = "AAACCCTTGCCGGTACGCTTAAACCATTGCCGGTACGCTTAA"
+
+    def test_strict_sequential(self):
+        s = """\
+  5    42
+Turkey    AAGCTNGGGC ATTTCAGGGT
+GAGCCCGGGC AATACAGGGT AT
+Salmo gairAAGCCTTGGC AGTGCAGGGT
+GAGCCGTGGC CGGGCACGGT AT
+H. SapiensACCGGTTGGC CGTTCAGGGT
+ACAGGTTGGC CGTTCAGGGT AA
+Chimp     AAACCCTTGC CGTTACGCTT
+AAACCGAGGC CGGGACACTC AT
+Gorilla   AAACCCTTGC CGGTACGCTT
+AAACCATTGC CGGTACGCTT AA
+        """
+        char_matrix = dendropy.DnaCharacterMatrix.get_from_string(
+                s,
+                "phylip",
+                strict=True)
+        self.assertEqual(len(char_matrix), len(self.expected_seqs))
+        self.assertEqual(len(char_matrix.taxon_namespace), len(self.expected_seqs))
+        for taxon, expected_taxon in zip(char_matrix, self.expected_seqs):
+            self.assertEqual(taxon.label, expected_taxon)
+            self.assertEqual(char_matrix[taxon].symbols_as_string(), self.expected_seqs[expected_taxon])
+
+    def test_strict_interleaved(self):
+        s = """\
+5    42
+Turkey    AAGCTNGGGC ATTTCAGGGT
+Salmo gairAAGCCTTGGC AGTGCAGGGT
+H. SapiensACCGGTTGGC CGTTCAGGGT
+Chimp     AAACCCTTGC CGTTACGCTT
+Gorilla   AAACCCTTGC CGGTACGCTT
+
+GAGCCCGGGC AATACAGGGT AT
+GAGCCGTGGC CGGGCACGGT AT
+ACAGGTTGGC CGTTCAGGGT AA
+AAACCGAGGC CGGGACACTC AT
+AAACCATTGC CGGTACGCTT AA
+        """
+        char_matrix = dendropy.DnaCharacterMatrix.get_from_string(
+                s,
+                "phylip",
+                interleaved=True,
+                strict=True)
+        self.assertEqual(len(char_matrix), len(self.expected_seqs))
+        self.assertEqual(len(char_matrix.taxon_namespace), len(self.expected_seqs))
+        for taxon, expected_taxon in zip(char_matrix, self.expected_seqs):
+            self.assertEqual(taxon.label, expected_taxon)
+            self.assertEqual(char_matrix[taxon].symbols_as_string(), self.expected_seqs[expected_taxon])
+
+    def test_strict_interleaved_with_bad_chars(self):
+        s = """\
+5    42
+Turkey    AAGCTNGGGC ATTTCA3828GGGT
+Salmo gairAAGCCTTGGC AGTGCA3828GGGT
+H. SapiensACCGGTTGGC CGTTCA3828GGGT
+Chimp     AAACCCTTGC CGTTAC3828GCTT
+Gorilla   AAACCCTTGC CGGTAC3828GCTT
+
+GAGCCCGGGC AATACAGGGT AT
+GAGCCGTGGC CGGGCACGGT AT
+ACAGGTTGGC CGTTCAGGGT AA
+AAACCGAGGC CGGGACACTC AT
+AAACCATTGC CGGTACGCTT AA
+        """
+        char_matrix = dendropy.DnaCharacterMatrix.get_from_string(
+                s,
+                "phylip",
+                interleaved=True,
+                strict=True,
+                ignore_invalid_chars=True)
+        self.assertEqual(len(char_matrix), len(self.expected_seqs))
+        self.assertEqual(len(char_matrix.taxon_namespace), len(self.expected_seqs))
+        for taxon, expected_taxon in zip(char_matrix, self.expected_seqs):
+            self.assertEqual(taxon.label, expected_taxon)
+            self.assertEqual(char_matrix[taxon].symbols_as_string(), self.expected_seqs[expected_taxon])
+
+class PhylipContinuousVariantsTestCases(dendropytest.ExtendedTestCase):
+
+    @classmethod
+    def setUpClass(cls):
+        cls.expected_seqs = collections.OrderedDict()
+        cls.expected_seqs["Turkey"]     = [-231.6391 ,  972.4189  ,  626.6717  ,  -328.6811 ,  -213.5738 ,  464.3897  ,  -91.3483  ,  349.8176  ,  333.4800  ,  521.4970  ]
+        cls.expected_seqs["Salmo gair"] = [ 104.4199 ,  669.7402  ,  -68.6082  ,  975.4302  ,  -874.4510 ,  -191.3305 ,  -179.8437 ,  655.5611  ,  -657.4532 ,  -563.7863 ]
+        cls.expected_seqs["H. Sapiens"] = [-613.2947 ,  -600.7053 ,  -700.5140 ,  438.6092  ,  615.5268  ,  640.7933  ,  503.8948  ,  -159.7922 ,  866.8036  ,  274.0275  ]
+        cls.expected_seqs["Chimp"]      = [-654.7695 ,  103.3806  ,  -971.8866 ,  853.9164  ,  653.5797  ,  823.6672  ,  -476.6859 ,  325.9331  ,  456.0902  ,  -399.7095 ]
+        cls.expected_seqs["Gorilla"]    = [-762.4904 ,  808.3665  ,  522.5775  ,  250.6523  ,  -287.9786 ,  -995.4612 ,  571.9263  ,  -793.3975 ,  -42.7027  ,  186.8869  ]
+
+    def test_strict_sequential(self):
+        s = """\
+  5    10
+Turkey    -231.6391 972.4189  626.6717  -328.6811 -213.5738 464.3897  -91.3483  349.8176  333.4800  521.4970
+Salmo gair 104.4199 669.7402  -68.6082  975.4302  -874.4510 -191.3305 -179.8437 655.5611  -657.4532 -563.7863
+H. Sapiens-613.2947 -600.7053 -700.5140 438.6092  615.5268  640.7933  503.8948  -159.7922 866.8036  274.0275
+Chimp     -654.7695 103.3806  -971.8866 853.9164  653.5797  823.6672  -476.6859 325.9331  456.0902  -399.7095
+Gorilla   -762.4904 808.3665  522.5775  250.6523  -287.9786 -995.4612 571.9263  -793.3975 -42.7027  186.8869
+        """
+        char_matrix = dendropy.ContinuousCharacterMatrix.get_from_string(
+                s,
+                "phylip",
+                strict=True)
+        self.assertEqual(len(char_matrix), len(self.expected_seqs))
+        self.assertEqual(len(char_matrix.taxon_namespace), len(self.expected_seqs))
+        for taxon, expected_taxon in zip(char_matrix, self.expected_seqs):
+            self.assertEqual(taxon.label, expected_taxon)
+            self.assertEqual(char_matrix[taxon].values(), self.expected_seqs[expected_taxon])
+
+    def test_strict_interleaved(self):
+        s = """\
+  5    10
+Turkey    -231.6391 972.4189  626.6717  -328.6811
+Salmo gair 104.4199 669.7402  -68.6082  975.4302
+H. Sapiens-613.2947 -600.7053 -700.5140 438.6092
+Chimp     -654.7695 103.3806  -971.8866 853.9164
+Gorilla   -762.4904 808.3665  522.5775  250.6523
+ -213.5738 464.3897  -91.3483  349.8176  333.4800  521.4970
+ -874.4510 -191.3305 -179.8437 655.5611  -657.4532 -563.7863
+ 615.5268  640.7933  503.8948  -159.7922 866.8036  274.0275
+ 653.5797  823.6672  -476.6859 325.9331  456.0902  -399.7095
+ -287.9786 -995.4612 571.9263  -793.3975 -42.7027  186.8869
+        """
+        char_matrix = dendropy.ContinuousCharacterMatrix.get_from_string(
+                s,
+                "phylip",
+                interleaved=True,
+                strict=True)
+        self.assertEqual(len(char_matrix), len(self.expected_seqs))
+        self.assertEqual(len(char_matrix.taxon_namespace), len(self.expected_seqs))
+        for taxon, expected_taxon in zip(char_matrix, self.expected_seqs):
+            self.assertEqual(taxon.label, expected_taxon)
+            self.assertEqual(char_matrix[taxon].values(), self.expected_seqs[expected_taxon])
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/dendropy/test/test_dataio_phylip_writer_chars.py b/dendropy/test/test_dataio_phylip_writer_chars.py
new file mode 100644
index 0000000..48c9ed8
--- /dev/null
+++ b/dendropy/test/test_dataio_phylip_writer_chars.py
@@ -0,0 +1,116 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Tests for PHYLIP tree list writing.
+"""
+
+import unittest
+import dendropy
+from dendropy.test.support import dendropytest
+from dendropy.test.support import compare_and_validate
+from dendropy.test.support import pathmap
+from dendropy.test.support import standard_file_test_chars
+
+class PhylipWriterCharactersTestCase(
+        compare_and_validate.ValidateWriteable,
+        dendropytest.ExtendedTestCase):
+
+    @classmethod
+    def setUpClass(cls):
+        cls.check_taxon_annotations = False
+        cls.check_matrix_annotations = False
+        cls.check_sequence_annotations = False
+        cls.check_column_annotations = False
+        cls.check_cell_annotations = False
+        standard_file_test_chars.DnaTestChecker.build()
+        standard_file_test_chars.RnaTestChecker.build()
+        standard_file_test_chars.ProteinTestChecker.build()
+        standard_file_test_chars.Standard01234TestChecker.build()
+        standard_file_test_chars.ContinuousTestChecker.build()
+        cls.srcs = (
+                ("standard-test-chars-dna.relaxed.phylip", dendropy.DnaCharacterMatrix, standard_file_test_chars.DnaTestChecker),
+                ("standard-test-chars-rna.relaxed.phylip", dendropy.RnaCharacterMatrix, standard_file_test_chars.RnaTestChecker),
+                ("standard-test-chars-protein.relaxed.phylip", dendropy.ProteinCharacterMatrix, standard_file_test_chars.ProteinTestChecker),
+                ("standard-test-chars-generic.relaxed.phylip", dendropy.StandardCharacterMatrix, standard_file_test_chars.Standard01234TestChecker),
+                ("standard-test-chars-continuous.relaxed.phylip", dendropy.ContinuousCharacterMatrix, standard_file_test_chars.ContinuousTestChecker),
+                )
+
+    def verify_char_matrix(self, char_matrix, src_matrix_checker_type):
+        self.assertEqual(type(char_matrix), src_matrix_checker_type.matrix_type)
+        if src_matrix_checker_type.matrix_type is dendropy.StandardCharacterMatrix:
+            src_matrix_checker_type.create_class_fixtures_label_sequence_map_based_on_state_alphabet(src_matrix_checker_type,
+                    char_matrix.default_state_alphabet)
+        standard_file_test_chars.general_char_matrix_checker(
+                self,
+                char_matrix,
+                src_matrix_checker_type,
+                check_taxon_annotations=self.check_taxon_annotations,
+                check_matrix_annotations=self.check_matrix_annotations,
+                check_sequence_annotations=self.check_sequence_annotations,
+                check_column_annotations=self.check_column_annotations,
+                check_cell_annotations=self.check_cell_annotations,)
+
+    def test_basic_phylip_chars(self):
+        for src_filename, matrix_type, src_matrix_checker_type in self.__class__.srcs:
+            src_path = pathmap.char_source_path(src_filename)
+            d1 = matrix_type.get_from_path(src_path, "phylip")
+            for strict in (True, False):
+                for spaces_to_underscores in (True, False):
+                    for force_unique_taxon_labels in (True, False):
+                        s = self.write_out_validate_equal_and_return(
+                                d1, "phylip", {
+                                    "strict": strict,
+                                    "spaces_to_underscores" : spaces_to_underscores,
+                                    "force_unique_taxon_labels" : force_unique_taxon_labels,
+                                    })
+                        d2 = matrix_type.get_from_string(s, "phylip")
+                        self.verify_char_matrix(d2, src_matrix_checker_type)
+
+class PhylipWriterCharactersVariantsTestCase(dendropytest.ExtendedTestCase):
+
+    @classmethod
+    def setUpClass(cls):
+        d = dendropy.DnaCharacterMatrix()
+        cls.original_labels = [
+                "a0_123456789_1",
+                "a0_123456789_2",
+                "a0_123456789_3",
+                "a0_123456789_4",
+                "a0_123456789_5",
+                "b0_123456789_1",
+                "b0_123456789_2",
+                "b0_123456789_3",
+                "b0_123456789_4",
+                ]
+        for label in cls.original_labels:
+            t = d.taxon_namespace.require_taxon(label=label)
+            d[t] = d.default_state_alphabet.get_states_for_symbols("AACGT")
+        cls.data = d
+
+    def test_strict_write(self):
+        s0 = self.data.as_string("phylip", strict=True)
+        d2 = dendropy.DnaCharacterMatrix.get_from_string(s0, "phylip", strict=True)
+        obs_labels = set([t.label for t in d2])
+        self.assertEqual(len(obs_labels), len(self.original_labels))
+        for t in d2:
+            self.assertEqual(len(t.label), 10)
+            self.assertEqual(d2[t].symbols_as_string(), "AACGT")
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/dendropy/test/test_dataio_tokenizer.py b/dendropy/test/test_dataio_tokenizer.py
new file mode 100644
index 0000000..7430294
--- /dev/null
+++ b/dendropy/test/test_dataio_tokenizer.py
@@ -0,0 +1,163 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Tests for tokenizers classes.
+"""
+
+try:
+    from StringIO import StringIO # Python 2 legacy support: StringIO in this module is the one needed (not io)
+except ImportError:
+    from io import StringIO # Python 3
+import unittest
+from dendropy.dataio import nexusprocessing
+
+class NexusTokenizerTestCase(unittest.TestCase):
+    """
+    Unit tests for NexusTokenizer.
+    """
+
+    def check_tokenization(self,
+            input_str,
+            expected_tokens):
+        src = StringIO(input_str)
+        observed = []
+        for token in nexusprocessing.NexusTokenizer(src=src):
+            observed.append(token)
+        self.assertEqual(observed, expected_tokens)
+
+    def test_simple_string(self):
+        input_str = "the    quick    brown\t\tfox \n  jumps over\t\t\n the    lazy dog"
+        expected = [
+                "the", "quick", "brown", "fox", "jumps", "over", "the", "lazy", "dog"
+                ]
+        self.check_tokenization(input_str, expected)
+
+    def test_simple_quoted_string(self):
+        input_str = "the quick 'brown fox' jumps over the 'lazy dog'"
+        expected = [
+                "the", "quick", "brown fox", "jumps", "over", "the", "lazy dog"
+                ]
+        self.check_tokenization(input_str, expected)
+
+    def test_padded_quoted_string(self):
+        input_str = "the quick 'brown fox''s friend' jumps over the 'lazy dog''s colleague'"
+        expected = [
+                "the", "quick", "brown fox's friend", "jumps", "over", "the", "lazy dog's colleague"
+                ]
+        self.check_tokenization(input_str, expected)
+
+    def test_runon_quoted_string(self):
+        input_str = "'a','b','c','d','e'"
+        expected = [
+                "a", ",", "b", ",", "c", ",", "d", ",", "e",
+                ]
+        self.check_tokenization(input_str, expected)
+
+    def test_comments(self):
+        input_str = "[&R] (foo:1 [a foo object], [start of subgroup](bar:2, c:2)[end of group][][][";
+        expected = [
+                "(", "foo", ":","1", ",", "(", "bar", ":", "2",  ",", "c", ":", "2", ")"
+                ]
+        self.check_tokenization(input_str, expected)
+
+    def test_empty(self):
+        input_str = "";
+        expected = []
+        self.check_tokenization(input_str, expected)
+
+    def test_captured_delimiters(self):
+        input_str = "(aaa:1.00,     (b:2.18e-1,      (ccc:11, d:1e-1)   k:  3)  u:   7)    rrr:0.0;";
+        expected = [
+            "(",
+            "aaa",
+            ":",
+            "1.00",
+            ",",
+            "(",
+            "b",
+            ":",
+            "2.18e-1",
+            ",",
+            "(",
+            "ccc",
+            ":",
+            "11",
+            ",",
+            "d",
+            ":",
+            "1e-1",
+            ")",
+            "k",
+            ":",
+            "3",
+            ")",
+            "u",
+            ":",
+            "7",
+            ")",
+            "rrr",
+            ":",
+            "0.0",
+            ";"
+                ]
+        self.check_tokenization(input_str, expected)
+
+    def test_comments(self):
+        input_str = "([the quick]apple[brown],([fox]banjo,([jumps]cucumber[over the],[really]dogwood)[lazy]eggplant)) rhubarb[dog];";
+        expected_comments = {
+            "apple"    : ["the quick", "brown"],
+            "banjo"    : ["fox"               ],
+            "cucumber" : ["jumps", "over the" ],
+            "dogwood"  : ["really"            ],
+            "eggplant" : ["lazy"              ],
+            "rhubarb"  : ["dog"               ],
+                }
+        expected_tokens = [
+                "(",
+                "apple",
+                ",",
+                "(",
+                "banjo",
+                ",",
+                "(",
+                "cucumber",
+                ",",
+                "dogwood",
+                ")",
+                "eggplant",
+                ")",
+                ")",
+                "rhubarb",
+                ";"
+                ]
+        src = StringIO(input_str)
+        observed_tokens = []
+        tk = nexusprocessing.NexusTokenizer(src=src)
+        for token in tk:
+            if token in expected_comments:
+                expected_comment = expected_comments[token]
+                observed_comment = tk.pull_captured_comments()
+                self.assertEqual(expected_comment, observed_comment)
+                del expected_comments[token]
+            observed_tokens.append(token)
+        self.assertEqual(expected_comments, {})
+        self.assertEqual(observed_tokens, expected_tokens)
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/dendropy/test/test_datamodel_annotations.py b/dendropy/test/test_datamodel_annotations.py
new file mode 100644
index 0000000..90d60f2
--- /dev/null
+++ b/dendropy/test/test_datamodel_annotations.py
@@ -0,0 +1,58 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Annotation tests.
+"""
+
+import collections
+import unittest
+import copy
+from dendropy.datamodel import basemodel
+from dendropy.test.support import compare_and_validate
+
+class TestObject(basemodel.Annotable, basemodel.DataObject):
+    pass
+
+class DummyX(TestObject):
+    def __init__(self, data=None):
+        self.data = data
+
+class AnnotableDeepCopyTester(compare_and_validate.Comparator, unittest.TestCase):
+
+    def test_deep_copy(self):
+        x1 = DummyX()
+        a1 = x1.annotations.add_new(name="a1", value="1")
+        a2 = x1.annotations.add_new(name="a2", value="2")
+        a3 = x1.annotations.add_new(name="a3", value="3")
+        x2 = copy.deepcopy(x1)
+        self.compare_distinct_annotables(x1, x2)
+
+    def test_nested_deep_copy(self):
+        x1 = DummyX()
+        a1 = x1.annotations.add_new(name="a1", value="1")
+        a2 = x1.annotations.add_new(name="a2", value="2")
+        a3 = x1.annotations.add_new(name="a3", value="3")
+        a4 = a3.annotations.add_new(name="a4", value="4")
+        a5 = a3.annotations.add_new(name="a5", value="5")
+        a6 = a5.annotations.add_new(name="a6", value="6")
+        x2 = copy.deepcopy(x1)
+        self.compare_distinct_annotables(x1, x2)
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/dendropy/test/test_datamodel_bipartitions.py b/dendropy/test/test_datamodel_bipartitions.py
new file mode 100644
index 0000000..354af12
--- /dev/null
+++ b/dendropy/test/test_datamodel_bipartitions.py
@@ -0,0 +1,105 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Testing of calculation of and operations with bipartitions.
+"""
+
+import warnings
+import unittest
+import re
+import sys
+import json
+try:
+    from StringIO import StringIO # Python 2 legacy support: StringIO in this module is the one needed (not io)
+except ImportError:
+    from io import StringIO # Python 3
+from dendropy.test.support import pathmap
+from dendropy.test.support import paupsplitsreference
+from dendropy.test.support.dendropytest import ExtendedTestCase
+from dendropy.utility import messaging
+from dendropy.interop import paup
+from dendropy.calculate import treecompare
+import dendropy
+
+# @unittest.skip('BipartitionEncodingTestCase skipped. Test in development')
+class BipartitionEncodingTestCase(ExtendedTestCase):
+
+    @classmethod
+    def setUpClass(cls):
+        ref_path = pathmap.tree_source_path("bipartition_encoding_fixture.json")
+        with open(ref_path, "r") as src:
+            cls.reference = json.load(src)
+
+    def test_encoding(self):
+        for source_name in self.reference:
+            # if "multifurcating" in source_name:
+            #     continue
+            tree_filepath = pathmap.tree_source_path(source_name)
+            for rooting in self.reference[source_name]:
+                for collapse_unrooted_basal_bifurcation_desc in self.reference[source_name][rooting]:
+                    if "collapse_unrooted_basal_bifurcation=True" in collapse_unrooted_basal_bifurcation_desc:
+                        collapse_unrooted_basal_bifurcation = True
+                    elif "collapse_unrooted_basal_bifurcation=False" in collapse_unrooted_basal_bifurcation_desc:
+                        collapse_unrooted_basal_bifurcation = False
+                    else:
+                        raise ValueError(collapse_unrooted_basal_bifurcation_desc)
+                    for suppress_unifurcations_desc in self.reference[source_name][rooting][collapse_unrooted_basal_bifurcation_desc]:
+                        if "suppress_unifurcations=True" in suppress_unifurcations_desc:
+                            suppress_unifurcations = True
+                        elif "suppress_unifurcations=False" in suppress_unifurcations_desc:
+                            suppress_unifurcations = False
+                        else:
+                            raise ValueError(suppress_unifurcations_desc)
+                        trees_bipartitions_ref = self.reference[source_name][rooting][collapse_unrooted_basal_bifurcation_desc][suppress_unifurcations_desc]
+                        source_path = pathmap.tree_source_path(source_name)
+                        trees = dendropy.TreeList.get_from_path(
+                                source_path,
+                                "nexus",
+                                rooting=rooting,
+                                suppress_leaf_node_taxa=False,
+                                suppress_internal_node_taxa=False,
+                                )
+                        for tree_idx, tree in enumerate(trees):
+                            tree_bipartitions_ref = trees_bipartitions_ref[str(tree_idx)]
+                            bipartition_encoding = tree.encode_bipartitions(
+                                    suppress_unifurcations=suppress_unifurcations,
+                                    collapse_unrooted_basal_bifurcation=collapse_unrooted_basal_bifurcation,
+                                    )
+                            seen = set()
+                            for edge in tree.postorder_edge_iter():
+                                bipartition = edge.bipartition
+                                assert edge.head_node.taxon is not None
+                                assert edge.head_node.taxon.label is not None
+                                label = edge.head_node.taxon.label
+                                # print("{}: {}: {}: {}".format(source_name, tree_idx, rooting, label, ))
+                                # print("    {}".format(tree_bipartitions_ref[label]))
+                                # print("    {} ({}), {}({})".format(
+                                #     bipartition.split_bitmask,
+                                #     bipartition.as_bitstring(),
+                                #     bipartition.leafset_bitmask,
+                                #     bipartition.leafset_as_bitstring(),
+                                #     ))
+                                expected_leafset_bitmask = int(tree_bipartitions_ref[label]["leafset_bitmask"])
+                                self.assertEqual(bipartition.leafset_bitmask, expected_leafset_bitmask)
+                                expected_split_bitmask = int(tree_bipartitions_ref[label]["split_bitmask"])
+                                self.assertEqual(bipartition.split_bitmask, expected_split_bitmask)
+
+if __name__ == "__main__":
+    unittest.main()
+
diff --git a/dendropy/test/test_datamodel_charmatrix.py b/dendropy/test/test_datamodel_charmatrix.py
new file mode 100644
index 0000000..9d6f5a8
--- /dev/null
+++ b/dendropy/test/test_datamodel_charmatrix.py
@@ -0,0 +1,1126 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Tests character sequence map.
+"""
+
+import copy
+import collections
+import random
+import unittest
+import dendropy
+import itertools
+from dendropy.utility import error
+from dendropy.datamodel import charmatrixmodel
+from dendropy.test.support import dendropytest
+from dendropy.test.support import compare_and_validate
+
+def get_taxon_namespace(ntax):
+    taxon_namespace = dendropy.TaxonNamespace()
+    for i in range(ntax):
+        label = "T{}".format(i)
+        t = taxon_namespace.require_taxon(label=label)
+    return taxon_namespace
+
+class CharacterMatrixBasicCRUDTests(dendropytest.ExtendedTestCase):
+
+    def test_setitem_by_taxon(self):
+        tns = get_taxon_namespace(3)
+        char_matrix = charmatrixmodel.CharacterMatrix(taxon_namespace=tns)
+        self.assertEqual(len(char_matrix), len(char_matrix._taxon_sequence_map))
+        self.assertEqual(len(char_matrix), 0)
+        seqs = [
+                "abcd",
+                [1,2,3,4,],
+                ["a", "b", "c", "d",]
+                ]
+        assert len(seqs) == len(tns)
+        for idx, taxon in enumerate(tns):
+            self.assertFalse(taxon in char_matrix)
+            self.assertNotIn(taxon, char_matrix)
+            char_matrix[taxon] = seqs[idx]
+        self.assertEqual(len(char_matrix._taxon_sequence_map), len(tns))
+        self.assertEqual(len(char_matrix), len(char_matrix._taxon_sequence_map))
+        for idx, taxon in enumerate(tns):
+            self.assertTrue(taxon in char_matrix)
+            self.assertIn(taxon, char_matrix)
+            self.assertTrue(isinstance(char_matrix[taxon], charmatrixmodel.CharacterDataSequence))
+            self.assertEqual(len(char_matrix[taxon]), len(seqs[idx]))
+            for c1, c2 in zip(char_matrix[taxon], seqs[idx]):
+                self.assertEqual(c1, c2)
+
+    def test_setitem_by_taxon_idx(self):
+        tns = get_taxon_namespace(3)
+        char_matrix = charmatrixmodel.CharacterMatrix(taxon_namespace=tns)
+        self.assertEqual(len(char_matrix), len(char_matrix._taxon_sequence_map))
+        self.assertEqual(len(char_matrix), 0)
+        seqs = [
+                "abcd",
+                [1,2,3,4,],
+                ["a", "b", "c", "d",]
+                ]
+        assert len(seqs) == len(tns)
+        for idx, taxon in enumerate(tns):
+            self.assertFalse(taxon in char_matrix)
+            self.assertNotIn(taxon, char_matrix)
+            char_matrix[idx] = seqs[idx]
+        self.assertEqual(len(char_matrix._taxon_sequence_map), len(tns))
+        self.assertEqual(len(char_matrix), len(char_matrix._taxon_sequence_map))
+        for idx, taxon in enumerate(tns):
+            self.assertTrue(taxon in char_matrix)
+            self.assertIn(taxon, char_matrix)
+            self.assertTrue(isinstance(char_matrix[taxon], charmatrixmodel.CharacterDataSequence))
+            self.assertEqual(len(char_matrix[taxon]), len(seqs[idx]))
+            for c1, c2 in zip(char_matrix[taxon], seqs[idx]):
+                self.assertEqual(c1, c2)
+
+    def test_setitem_by_taxon_label(self):
+        tns = get_taxon_namespace(3)
+        char_matrix = charmatrixmodel.CharacterMatrix(taxon_namespace=tns)
+        self.assertEqual(len(char_matrix), len(char_matrix._taxon_sequence_map))
+        self.assertEqual(len(char_matrix), 0)
+        seqs = [
+                "abcd",
+                [1,2,3,4,],
+                ["a", "b", "c", "d",]
+                ]
+        assert len(seqs) == len(tns)
+        for idx, taxon in enumerate(tns):
+            self.assertFalse(taxon in char_matrix)
+            self.assertNotIn(taxon, char_matrix)
+            char_matrix[taxon.label] = seqs[idx]
+        self.assertEqual(len(char_matrix._taxon_sequence_map), len(tns))
+        self.assertEqual(len(char_matrix), len(char_matrix._taxon_sequence_map))
+        for idx, taxon in enumerate(tns):
+            self.assertTrue(taxon in char_matrix)
+            self.assertIn(taxon, char_matrix)
+            self.assertTrue(isinstance(char_matrix[taxon], charmatrixmodel.CharacterDataSequence))
+            self.assertEqual(len(char_matrix[taxon]), len(seqs[idx]))
+            for c1, c2 in zip(char_matrix[taxon], seqs[idx]):
+                self.assertEqual(c1, c2)
+
+    def test_setitem_by_taxon_not_in_namespace(self):
+        tns = get_taxon_namespace(3)
+        char_matrix = charmatrixmodel.CharacterMatrix()
+        t = tns[0]
+        seq = ["a", "b"]
+        with self.assertRaises(ValueError):
+            char_matrix[t] = seq
+        char_matrix.taxon_namespace.add_taxon(t)
+        char_matrix[t] = seq
+        self.assertEqual(len(char_matrix), 1)
+        self.assertIn(t, char_matrix)
+        self.assertEqual(len(char_matrix[t]), len(seq))
+        self.assertTrue(isinstance(char_matrix[t], charmatrixmodel.CharacterDataSequence))
+        for c1, c2 in zip(char_matrix[t], seq):
+            self.assertEqual(c1, c2)
+
+    def test_setitem_by_idx_not_in_namespace(self):
+        tns = get_taxon_namespace(3)
+        char_matrix = charmatrixmodel.CharacterMatrix()
+        with self.assertRaises(IndexError):
+            char_matrix[len(tns)] = []
+
+    def test_setitem_by_idx_not_in_namespace(self):
+        tns = get_taxon_namespace(3)
+        char_matrix = charmatrixmodel.CharacterMatrix()
+        with self.assertRaises(KeyError):
+            char_matrix[tns[0].label] = []
+
+    def test_multi_setitem(self):
+        tns = get_taxon_namespace(3)
+        char_matrix = charmatrixmodel.CharacterMatrix(taxon_namespace=tns)
+        self.assertEqual(len(char_matrix), len(char_matrix._taxon_sequence_map))
+        self.assertEqual(len(char_matrix), 0)
+        seqs = [
+                "abcd",
+                [1,2,3,4,],
+                ["a", "b", "c", "d",]
+                ]
+        t = tns[0]
+        for seq in seqs:
+            char_matrix[t] = seq
+        for taxon in tns:
+            if taxon is t:
+                self.assertIn(taxon, char_matrix)
+            else:
+                self.assertNotIn(taxon, char_matrix)
+        seq = seqs[-1]
+        self.assertEqual(len(char_matrix), 1)
+        self.assertEqual(len(char_matrix), len(char_matrix._taxon_sequence_map))
+        self.assertEqual(len(char_matrix[0]), len(seq))
+        self.assertTrue(isinstance(char_matrix[0], charmatrixmodel.CharacterDataSequence))
+        for c1, c2 in zip(char_matrix[0], seq):
+            self.assertEqual(c1, c2)
+        for c1, c2 in zip(char_matrix[0], seqs[1]):
+            self.assertNotEqual(c1, c2)
+
+    def test_delitem(self):
+        tns = get_taxon_namespace(3)
+        char_matrix = charmatrixmodel.CharacterMatrix(taxon_namespace=tns)
+        self.assertEqual(len(char_matrix), len(char_matrix._taxon_sequence_map))
+        self.assertEqual(len(char_matrix), 0)
+        seqs = [
+                "abcd",
+                [1,2,3,4,],
+                ["a", "b", "c", "d",]
+                ]
+        assert len(seqs) == len(tns)
+        for idx, taxon in enumerate(tns):
+            self.assertFalse(taxon in char_matrix)
+            self.assertNotIn(taxon, char_matrix)
+            char_matrix[taxon] = seqs[idx]
+        self.assertEqual(len(char_matrix._taxon_sequence_map), len(tns))
+        self.assertEqual(len(char_matrix), len(char_matrix._taxon_sequence_map))
+        for idx, taxon in enumerate(tns):
+            self.assertTrue(taxon in char_matrix)
+            self.assertIn(taxon, char_matrix)
+            del char_matrix[taxon]
+            self.assertFalse(taxon in char_matrix)
+            self.assertNotIn(taxon, char_matrix)
+        self.assertEqual(len(char_matrix._taxon_sequence_map), 0)
+        self.assertEqual(len(char_matrix), 0)
+
+    def test_clear(self):
+        tns = get_taxon_namespace(3)
+        char_matrix = charmatrixmodel.CharacterMatrix(taxon_namespace=tns)
+        self.assertEqual(len(char_matrix), len(char_matrix._taxon_sequence_map))
+        self.assertEqual(len(char_matrix), 0)
+        seqs = [
+                "abcd",
+                [1,2,3,4,],
+                ["a", "b", "c", "d",]
+                ]
+        assert len(seqs) == len(tns)
+        for idx, taxon in enumerate(tns):
+            self.assertFalse(taxon in char_matrix)
+            self.assertNotIn(taxon, char_matrix)
+            char_matrix[taxon] = seqs[idx]
+        self.assertEqual(len(char_matrix._taxon_sequence_map), len(tns))
+        self.assertEqual(len(char_matrix), len(char_matrix._taxon_sequence_map))
+        char_matrix.clear()
+        self.assertEqual(len(char_matrix._taxon_sequence_map), 0)
+        self.assertEqual(len(char_matrix), 0)
+        for idx, taxon in enumerate(tns):
+            self.assertFalse(taxon in char_matrix)
+            self.assertNotIn(taxon, char_matrix)
+
+class CharacterMatrixMetricsTest(dendropytest.ExtendedTestCase):
+
+    def test_sequence_sizes(self):
+        seq_sizes = [2, 10, 20, 0, 1]
+        tns = get_taxon_namespace(len(seq_sizes))
+        char_matrix = charmatrixmodel.CharacterMatrix(taxon_namespace=tns)
+        self.assertEqual(len(char_matrix), 0)
+        self.assertEqual(char_matrix.sequence_size, 0)
+        self.assertEqual(char_matrix.max_sequence_size, 0)
+        for taxon, seq_size in zip(tns, seq_sizes):
+            char_matrix[taxon] = ["x"] * seq_size
+        self.assertEqual(len(char_matrix), len(seq_sizes))
+        self.assertEqual(char_matrix.sequence_size, seq_sizes[0])
+        self.assertEqual(char_matrix.max_sequence_size, max(seq_sizes))
+
+class CharacterMatrixFillAndPackTestCase(dendropytest.ExtendedTestCase):
+
+    def test_fill(self):
+        seq_sizes = [2, 10, 20, 0, 1]
+        tns = get_taxon_namespace(len(seq_sizes))
+        original_sequences = []
+        for seq_size in seq_sizes:
+            original_sequences.append( ["1"] * seq_size )
+        for size in (None, 50, 1, 0, 8):
+            for append in (False, True, None):
+                kwargs = {}
+                if size is None:
+                    expected_sizes = [max(seq_sizes)] * len(seq_sizes)
+                else:
+                    kwargs["size"] = size
+                    expected_sizes = [max(size, s) for s in seq_sizes]
+                assert len(expected_sizes) == len(original_sequences)
+                if append is None:
+                    append = True
+                else:
+                    kwargs["append"] = append
+                expected_sequences = []
+                for idx, seq in enumerate(original_sequences):
+                    if expected_sizes[idx] <= len(seq):
+                        expected_sequences.append(list(seq))
+                    else:
+                        s1 = list(seq)
+                        diff = expected_sizes[idx] - len(s1)
+                        s2 = ["0"] * diff
+                        if append:
+                            s = s1 + s2
+                        else:
+                            s = s2 + s1
+                        expected_sequences.append(s)
+                    assert len(expected_sequences[idx]) == expected_sizes[idx], \
+                            "{}: {}/{}: {}: {} ({})".format(idx, size, append, expected_sequences[idx], len(expected_sequences[idx]), expected_sizes[idx])
+                char_matrix = charmatrixmodel.CharacterMatrix(taxon_namespace=tns)
+                for taxon, seq in zip(tns, original_sequences):
+                    char_matrix[taxon] = seq
+                assert len(char_matrix) == len(seq_sizes)
+                char_matrix.fill("0", **kwargs)
+                for taxon, expected_size, expected_seq in zip(char_matrix, expected_sizes, expected_sequences):
+                    obs_seq = char_matrix[taxon]
+                    self.assertEqual(len(obs_seq), expected_size)
+                    for c1, c2 in zip(obs_seq, expected_seq):
+                        self.assertEqual(c1, c2)
+
+    def test_fill_taxa(self):
+        tns = get_taxon_namespace(5)
+        char_matrix = charmatrixmodel.CharacterMatrix(taxon_namespace=tns)
+        for taxon in tns[:3]:
+            char_matrix[taxon] = "z"
+        for taxon in tns[:3]:
+            self.assertIn(taxon, char_matrix)
+        for taxon in tns[3:]:
+            self.assertNotIn(taxon, char_matrix)
+        char_matrix.fill_taxa()
+        for taxon in tns:
+            self.assertIn(taxon, char_matrix)
+
+    def test_fill_taxa(self):
+        tns = get_taxon_namespace(5)
+        char_matrix = charmatrixmodel.CharacterMatrix(taxon_namespace=tns)
+        s = ["z"] * 10
+        char_matrix[tns[0]] = s
+        for taxon in tns[1:3]:
+            char_matrix[taxon] = ["x"]
+        char_matrix.pack()
+        self.assertEqual(len(char_matrix), len(tns))
+        for taxon in tns:
+            self.assertIn(taxon, char_matrix)
+            self.assertEqual(len(char_matrix[taxon]), 10)
+
+class CharacterMatrixBinaryOps(dendropytest.ExtendedTestCase):
+
+    def get_char_matrices(self):
+        tns = get_taxon_namespace(3)
+        c1 = charmatrixmodel.CharacterMatrix(taxon_namespace=tns)
+        c2 = charmatrixmodel.CharacterMatrix(taxon_namespace=tns)
+        c1[tns[0]] = [1, 1, 1]
+        c1[tns[1]] = [2, 2, 2]
+        c2[tns[1]] = [3, 3, 3]
+        c2[tns[2]] = [4, 4, 4]
+
+        assert len(c1) == 2
+        assert tns[0] in c1
+        assert tns[1] in c1
+        assert tns[2] not in c1
+
+        assert len(c2) == 2
+        assert tns[0] not in c2
+        assert tns[1] in c2
+        assert tns[2] in c2
+
+        return c1, c2, tns
+
+    def verify_sequence_equal(self, s1, s2, expected_length=None):
+        if expected_length is not None:
+            self.assertEqual(len(s1), expected_length)
+            self.assertEqual(len(s2), expected_length)
+        self.assertEqual(len(s1), len(s2))
+        self.assertIsNot(s1, s2)
+        for c1, c2 in zip(s1, s2):
+            self.assertEqual(c1, c2)
+
+    def verify_independent_matrices(self, c1, c2):
+        assert c1.taxon_namespace is c2.taxon_namespace
+        for taxon in c1.taxon_namespace:
+            if taxon in c1 and taxon in c2:
+                self.assertIsNot(c1[taxon], c2[taxon])
+
+    def test_add_sequences_fail(self):
+        c1 = charmatrixmodel.CharacterMatrix()
+        c2 = charmatrixmodel.CharacterMatrix()
+        with self.assertRaises(error.TaxonNamespaceIdentityError):
+            c1.add_sequences(c2)
+
+    def test_add_sequences(self):
+        c1, c2, tns = self.get_char_matrices()
+        c1.add_sequences(c2)
+        self.verify_independent_matrices(c1, c2)
+        self.assertEqual(len(c1), 3)
+        self.assertIn(tns[0], c1)
+        self.assertIn(tns[1], c1)
+        self.assertIn(tns[2], c1)
+        self.verify_sequence_equal(c1[tns[0]], [1, 1, 1])
+        self.verify_sequence_equal(c1[tns[1]], [2, 2, 2])
+        self.verify_sequence_equal(c1[tns[2]], [4, 4, 4])
+
+    def test_replace_sequences_fail(self):
+        c1 = charmatrixmodel.CharacterMatrix()
+        c2 = charmatrixmodel.CharacterMatrix()
+        with self.assertRaises(error.TaxonNamespaceIdentityError):
+            c1.replace_sequences(c2)
+
+    def test_replace_sequences(self):
+        c1, c2, tns = self.get_char_matrices()
+        c1.replace_sequences(c2)
+        self.verify_independent_matrices(c1, c2)
+        self.assertEqual(len(c1), 2)
+        self.assertIn(tns[0], c1)
+        self.assertIn(tns[1], c1)
+        self.assertNotIn(tns[2], c1)
+        self.verify_sequence_equal(c1[tns[0]], [1, 1, 1])
+        self.verify_sequence_equal(c1[tns[1]], [3, 3, 3])
+
+    def test_update_sequences_fail(self):
+        c1 = charmatrixmodel.CharacterMatrix()
+        c2 = charmatrixmodel.CharacterMatrix()
+        with self.assertRaises(error.TaxonNamespaceIdentityError):
+            c1.update_sequences(c2)
+
+    def test_update_sequences(self):
+        c1, c2, tns = self.get_char_matrices()
+        c1.update_sequences(c2)
+        self.verify_independent_matrices(c1, c2)
+        self.assertEqual(len(c1), 3)
+        self.assertIn(tns[0], c1)
+        self.assertIn(tns[1], c1)
+        self.assertIn(tns[2], c1)
+        self.verify_sequence_equal(c1[tns[0]], [1, 1, 1])
+        self.verify_sequence_equal(c1[tns[1]], [3, 3, 3])
+        self.verify_sequence_equal(c1[tns[2]], [4, 4, 4])
+
+    def test_extend_sequences_fail(self):
+        c1 = charmatrixmodel.CharacterMatrix()
+        c2 = charmatrixmodel.CharacterMatrix()
+        with self.assertRaises(error.TaxonNamespaceIdentityError):
+            c1.extend_sequences(c2)
+
+    def test_extend_sequences(self):
+        c1, c2, tns = self.get_char_matrices()
+        c1.extend_sequences(c2)
+        self.verify_independent_matrices(c1, c2)
+        self.assertEqual(len(c1), 2)
+        self.assertIn(tns[0], c1)
+        self.assertIn(tns[1], c1)
+        self.assertNotIn(tns[2], c1)
+        self.verify_sequence_equal(c1[tns[0]], [1, 1, 1])
+        self.verify_sequence_equal(c1[tns[1]], [2, 2, 2, 3, 3, 3])
+
+    def test_extend_matrix_fail(self):
+        c1 = charmatrixmodel.CharacterMatrix()
+        c2 = charmatrixmodel.CharacterMatrix()
+        with self.assertRaises(error.TaxonNamespaceIdentityError):
+            c1.extend_matrix(c2)
+
+    def test_extend_matrix(self):
+        c1, c2, tns = self.get_char_matrices()
+        c1.extend_matrix(c2)
+        self.verify_independent_matrices(c1, c2)
+        self.assertEqual(len(c1), 3)
+        self.assertIn(tns[0], c1)
+        self.assertIn(tns[1], c1)
+        self.assertIn(tns[2], c1)
+        self.verify_sequence_equal(c1[tns[0]], [1, 1, 1])
+        self.verify_sequence_equal(c1[tns[1]], [2, 2, 2, 3, 3, 3])
+        self.verify_sequence_equal(c1[tns[2]], [4, 4, 4])
+
+class CharacterMatrixTaxonManagement(dendropytest.ExtendedTestCase):
+
+    def test_assign_taxon_namespace(self):
+        tns = get_taxon_namespace(5)
+        char_matrix = charmatrixmodel.CharacterMatrix(taxon_namespace=tns)
+        self.assertIs(char_matrix.taxon_namespace, tns)
+
+class CharacterMatrixIteratorTests(dendropytest.ExtendedTestCase):
+
+    def setUp(self):
+        self.rng = random.Random()
+
+    def test_standard_iterator(self):
+        tns = get_taxon_namespace(100)
+        char_matrix = charmatrixmodel.CharacterMatrix(taxon_namespace=tns)
+        taxa = list(tns)
+        self.rng.shuffle(taxa)
+        included = set()
+        excluded = set()
+        for idx, taxon in enumerate(taxa):
+            if self.rng.uniform(0, 1) < 0.5:
+                included.add(taxon)
+                char_matrix[taxon] = [0]
+            else:
+                excluded.add(taxon)
+        expected = [taxon for taxon in tns if taxon in included]
+        self.assertEqual(len(char_matrix), len(expected))
+        observed = [taxon for taxon in char_matrix]
+        self.assertEqual(observed, expected)
+
+class CharacterMatrixIdentity(unittest.TestCase):
+
+    def setUp(self):
+        self.tns = dendropy.TaxonNamespace()
+        self.t1 = charmatrixmodel.CharacterMatrix(label="a", taxon_namespace=self.tns)
+        self.t2 = charmatrixmodel.CharacterMatrix(label="a", taxon_namespace=self.tns)
+        self.t3 = charmatrixmodel.CharacterMatrix(label="a")
+
+    def test_equal(self):
+        self.assertNotEqual(self.t1, self.t2)
+
+    def test_hash_dict_membership(self):
+        k = {}
+        k[self.t1] = 1
+        k[self.t2] = 2
+        self.assertEqual(len(k), 2)
+        self.assertEqual(k[self.t1], 1)
+        self.assertEqual(k[self.t2], 2)
+        self.assertIn(self.t1, k)
+        self.assertIn(self.t2, k)
+        del k[self.t1]
+        self.assertNotIn(self.t1, k)
+        self.assertIn(self.t2, k)
+        self.assertEqual(len(k), 1)
+        k1 = {self.t1: 1}
+        k2 = {self.t2: 1}
+        self.assertIn(self.t1, k1)
+        self.assertIn(self.t2, k2)
+        self.assertNotIn(self.t2, k1)
+        self.assertNotIn(self.t1, k2)
+
+    def test_hash_set_membership(self):
+        k = set()
+        k.add(self.t1)
+        k.add(self.t2)
+        self.assertEqual(len(k), 2)
+        self.assertIn(self.t1, k)
+        self.assertIn(self.t2, k)
+        k.discard(self.t1)
+        self.assertNotIn(self.t1, k)
+        self.assertIn(self.t2, k)
+        self.assertEqual(len(k), 1)
+        k1 = {self.t1: 1}
+        k2 = {self.t2: 1}
+        self.assertIn(self.t1, k1)
+        self.assertIn(self.t2, k2)
+        self.assertNotIn(self.t2, k1)
+        self.assertNotIn(self.t1, k2)
+
+class TestCharacterMatrixUpdateTaxonNamespace(
+        dendropytest.ExtendedTestCase):
+
+    def get_char_matrix(self):
+        labels = [
+            "z01" , "<NONE>" , "z03" , "z04" , "z05" ,
+            "z06" , None     , None  , "z09" , "z10" ,
+            "z11" , "<NONE>" , None  , "z14" , "z15" ,
+                ]
+        char_matrix = charmatrixmodel.CharacterMatrix()
+        char_matrix.expected_labels = []
+        char_matrix.expected_taxa = set()
+        random.shuffle(labels)
+        for label in labels:
+            t = dendropy.Taxon(label=None)
+            char_matrix.taxon_namespace.add_taxon(t)
+            char_matrix[t] = [1,1,1]
+            char_matrix.expected_taxa.add(t)
+            char_matrix.expected_labels.append(t.label)
+        char_matrix.taxon_namespace = dendropy.TaxonNamespace()
+        assert len(char_matrix) == len(labels)
+        assert len(char_matrix) == len(char_matrix._taxon_sequence_map)
+        char_matrix.nseqs = len(char_matrix)
+        return char_matrix
+
+    def test_update(self):
+        char_matrix = self.get_char_matrix()
+        char_matrix.taxon_namespace = dendropy.TaxonNamespace()
+        original_tns = char_matrix.taxon_namespace
+        self.assertEqual(len(original_tns), 0)
+        char_matrix.update_taxon_namespace()
+        char_matrix.update_taxon_namespace()
+        char_matrix.update_taxon_namespace()
+        self.assertIs(char_matrix.taxon_namespace, original_tns)
+        self.assertEqual(len(char_matrix.taxon_namespace), len(char_matrix.expected_taxa))
+        for taxon in char_matrix:
+            self.assertIn(taxon, char_matrix.taxon_namespace)
+        new_taxa = [t for t in original_tns]
+        new_labels = [t.label for t in original_tns]
+        self.assertCountEqual(new_taxa, char_matrix.expected_taxa)
+        self.assertCountEqual(new_labels, char_matrix.expected_labels)
+        self.assertEqual(len(char_matrix), char_matrix.nseqs)
+        assert len(char_matrix) == len(char_matrix._taxon_sequence_map)
+
+class TestCharacterMatrixReconstructAndMigrateTaxonNamespace(
+        dendropytest.ExtendedTestCase):
+
+    def get_char_matrix(self, labels=None):
+        char_matrix = charmatrixmodel.CharacterMatrix()
+        if labels is None:
+            labels = [str(i) for i in range(1000)]
+        char_matrix.expected_labels = []
+        char_matrix.original_taxa = []
+        char_matrix.original_seqs = []
+        self.rng.shuffle(labels)
+        for label in labels:
+            t = dendropy.Taxon(label=label)
+            char_matrix.taxon_namespace.add_taxon(t)
+            char_matrix.original_taxa.append(t)
+            char_matrix[t].original_taxon = t
+            char_matrix.expected_labels.append(label)
+            seq = [self.rng.randint(0, 100) for _ in range(4)]
+            char_matrix[t] = seq
+            char_matrix[t].original_seq = char_matrix[t]
+            char_matrix.original_seqs.append(char_matrix[t])
+            char_matrix[t].original_taxon = t
+            char_matrix[t].label = label
+        assert len(char_matrix.taxon_namespace) == len(char_matrix.original_taxa)
+        assert len(char_matrix) == len(char_matrix.original_taxa)
+        assert len(char_matrix) == len(labels)
+        char_matrix.nseqs = len(char_matrix)
+        return char_matrix
+
+    def get_char_matrix_with_case_insensitive_label_collisions(self):
+        labels = [
+                "a", "A", "b", "B", "c", "C",
+                ]
+        char_matrix = self.get_char_matrix(labels=labels)
+        return char_matrix
+
+    def get_char_matrix_with_case_insensitive_and_case_sensitive_label_collisions(self):
+        labels = [
+                "a", "a", "2", "2", "b", "B",
+                "B", "h", "H", "h", None, None,
+                "H", "J", "j",
+                ]
+        char_matrix = self.get_char_matrix(labels=labels)
+        return char_matrix
+
+    def setUp(self):
+        self.rng = random.Random()
+
+    def verify_taxon_namespace_reconstruction(self,
+            char_matrix,
+            unify_taxa_by_label=False,
+            case_sensitive_label_mapping=True,
+            original_tns=None):
+        self.assertEqual(len(char_matrix), char_matrix.nseqs)
+        self.assertEqual(len(char_matrix), len(char_matrix.original_seqs))
+        assert len(char_matrix) == len(char_matrix._taxon_sequence_map)
+        if unify_taxa_by_label:
+            if not case_sensitive_label_mapping:
+                expected_labels = list(set((label.upper() if label is not None else None) for label in char_matrix.expected_labels))
+            else:
+                expected_labels = list(set(label for label in char_matrix.expected_labels))
+        else:
+            expected_labels = [label for label in char_matrix.expected_labels]
+        seen_taxa = []
+        for taxon in char_matrix:
+            seq = char_matrix[taxon]
+            self.assertIs(char_matrix[taxon], char_matrix[taxon].original_seq)
+            self.assertIn(char_matrix[taxon], char_matrix.original_seqs)
+            char_matrix.original_seqs.remove(char_matrix[taxon])
+            self.assertIsNot(taxon, seq.original_taxon)
+            if not case_sensitive_label_mapping and taxon.label is not None:
+                self.assertEqual(taxon.label.upper(), seq.original_taxon.label.upper())
+                self.assertEqual(seq.label.upper(), taxon.label.upper())
+            else:
+                self.assertEqual(taxon.label, seq.original_taxon.label)
+                self.assertEqual(seq.label, taxon.label)
+            self.assertNotIn(seq.original_taxon, char_matrix.taxon_namespace)
+            self.assertIn(seq.original_taxon, char_matrix.original_taxa)
+            self.assertIn(taxon, char_matrix.taxon_namespace)
+            self.assertNotIn(taxon, char_matrix.original_taxa)
+            if original_tns is not None:
+                self.assertNotIn(taxon, original_tns)
+            if taxon not in seen_taxa:
+                seen_taxa.append(taxon)
+            else:
+                self.assertTrue(unify_taxa_by_label)
+                if not case_sensitive_label_mapping:
+                    self.assertIn(taxon.label, [t.label for t in seen_taxa])
+                else:
+                    if taxon.label is None:
+                        self.assertIs(seq.original_taxon.label, None)
+                        self.assertEqual([t.label for t in seen_taxa].count(None), 1)
+                    else:
+                        x1 = [t.label.upper() for t in seen_taxa if t.label is not None]
+                        self.assertIn(taxon.label.upper(), x1)
+        self.assertEqual(len(seen_taxa), len(char_matrix.taxon_namespace))
+        if not case_sensitive_label_mapping:
+            seen_labels = [(t.label.upper() if t.label is not None else None) for t in seen_taxa]
+        else:
+            seen_labels = [t.label for t in seen_taxa]
+        c1 = collections.Counter(expected_labels)
+        c2 = collections.Counter(seen_labels)
+        self.assertEqual(c2-c1, collections.Counter())
+        self.assertEqual(c1-c2, collections.Counter())
+        self.assertEqual(c1, c2)
+        self.assertEqual(len(char_matrix.taxon_namespace), len(expected_labels))
+        if not unify_taxa_by_label:
+            self.assertEqual(len(char_matrix.taxon_namespace), len(char_matrix.original_taxa))
+        self.assertEqual(char_matrix.original_seqs, [])
+
+    def test_basic_reconstruction(self):
+        char_matrix = self.get_char_matrix()
+        tns = char_matrix.taxon_namespace
+        new_tns = dendropy.TaxonNamespace()
+        new_tns.is_case_sensitive = True
+        char_matrix.taxon_namespace = new_tns
+        char_matrix.reconstruct_taxon_namespace(unify_taxa_by_label=False)
+        # print("\n--\n")
+        # for t in self.char_matrix:
+            # print("{}: {}".format(repr(t), self.char_matrix[t]))
+            # assert t in self.char_matrix.taxon_namespace
+        self.assertIsNot(char_matrix.taxon_namespace, tns)
+        self.assertIs(char_matrix.taxon_namespace, new_tns)
+        self.assertEqual(len(char_matrix), char_matrix.nseqs)
+        self.assertEqual(len(char_matrix), len(char_matrix.original_seqs))
+        assert len(char_matrix) == len(char_matrix._taxon_sequence_map)
+        if len(char_matrix.taxon_namespace) != len(tns):
+            x1 = [t.label for t in char_matrix.taxon_namespace]
+            x2 = [t.label for t in tns]
+            c1 = collections.Counter(x1)
+            c2 = collections.Counter(x2)
+            c3 = c2 - c1
+            print(c3)
+        self.assertEqual(len(char_matrix.taxon_namespace), len(tns))
+        original_labels = [t.label for t in tns]
+        new_labels = [t.label for t in new_tns]
+        self.assertCountEqual(new_labels, original_labels)
+        for taxon in char_matrix:
+            self.assertIn(taxon, char_matrix.taxon_namespace)
+            self.assertNotIn(taxon, tns)
+            self.assertIs(char_matrix[taxon], char_matrix[taxon].original_seq)
+            self.assertIn(char_matrix[taxon], char_matrix.original_seqs)
+            char_matrix.original_seqs.remove(char_matrix[taxon])
+        self.assertEqual(char_matrix.original_seqs, [])
+
+    def test_reconstruct_taxon_namespace_non_unifying(self):
+        char_matrix = self.get_char_matrix_with_case_insensitive_and_case_sensitive_label_collisions()
+        original_tns = char_matrix.taxon_namespace
+        new_tns = dendropy.TaxonNamespace()
+        new_tns.is_case_sensitive = True
+        char_matrix._taxon_namespace = new_tns
+        self.assertEqual(len(char_matrix.taxon_namespace), 0)
+        char_matrix.reconstruct_taxon_namespace(unify_taxa_by_label=False)
+        self.assertIsNot(char_matrix.taxon_namespace, original_tns)
+        self.assertIs(char_matrix.taxon_namespace, new_tns)
+        self.verify_taxon_namespace_reconstruction(
+                char_matrix=char_matrix,
+                unify_taxa_by_label=False,
+                case_sensitive_label_mapping=True)
+
+    def test_reconstruct_taxon_namespace_unifying_case_sensitive(self):
+        char_matrix = self.get_char_matrix_with_case_insensitive_label_collisions()
+        original_tns = char_matrix.taxon_namespace
+        new_tns = dendropy.TaxonNamespace()
+        new_tns.is_case_sensitive = True
+        char_matrix._taxon_namespace = new_tns
+        self.assertEqual(len(char_matrix.taxon_namespace), 0)
+        char_matrix.reconstruct_taxon_namespace(unify_taxa_by_label=True)
+        self.assertIsNot(char_matrix.taxon_namespace, original_tns)
+        self.assertIs(char_matrix.taxon_namespace, new_tns)
+        self.verify_taxon_namespace_reconstruction(
+                char_matrix=char_matrix,
+                unify_taxa_by_label=True,
+                case_sensitive_label_mapping=True,
+                original_tns=original_tns)
+
+    def test_reconstruct_taxon_namespace_unifying_case_sensitive_fail(self):
+        char_matrix = self.get_char_matrix_with_case_insensitive_and_case_sensitive_label_collisions()
+        new_tns = dendropy.TaxonNamespace()
+        new_tns.is_case_sensitive = True
+        char_matrix._taxon_namespace = new_tns
+        with self.assertRaises(error.TaxonNamespaceReconstructionError):
+            char_matrix.reconstruct_taxon_namespace(unify_taxa_by_label=True)
+
+    def test_reconstruct_taxon_namespace_unifying_case_insensitive(self):
+        char_matrix = self.get_char_matrix()
+        original_tns = char_matrix.taxon_namespace
+        new_tns = dendropy.TaxonNamespace()
+        new_tns.is_case_sensitive = False
+        char_matrix._taxon_namespace = new_tns
+        self.assertEqual(len(char_matrix.taxon_namespace), 0)
+        char_matrix.reconstruct_taxon_namespace(unify_taxa_by_label=True)
+        self.assertIsNot(char_matrix.taxon_namespace, original_tns)
+        self.assertIs(char_matrix.taxon_namespace, new_tns)
+        self.verify_taxon_namespace_reconstruction(
+                char_matrix=char_matrix,
+                unify_taxa_by_label=True,
+                case_sensitive_label_mapping=False,
+                original_tns=original_tns)
+
+    def test_reconstruct_taxon_namespace_unifying_case_insensitive_fail(self):
+        for char_matrix in (
+                self.get_char_matrix_with_case_insensitive_label_collisions(),
+                self.get_char_matrix_with_case_insensitive_and_case_sensitive_label_collisions(),
+                ):
+            new_tns = dendropy.TaxonNamespace()
+            new_tns.is_case_sensitive = False
+            char_matrix._taxon_namespace = new_tns
+            with self.assertRaises(error.TaxonNamespaceReconstructionError):
+                char_matrix.reconstruct_taxon_namespace(unify_taxa_by_label=True)
+
+    def test_basic_migration(self):
+        char_matrix = self.get_char_matrix()
+        tns = char_matrix.taxon_namespace
+        new_tns = dendropy.TaxonNamespace()
+        new_tns.is_case_sensitive = True
+        char_matrix.migrate_taxon_namespace(
+                new_tns,
+                unify_taxa_by_label=False)
+        self.assertIsNot(char_matrix.taxon_namespace, tns)
+        self.assertIs(char_matrix.taxon_namespace, new_tns)
+        self.assertEqual(len(char_matrix), char_matrix.nseqs)
+        self.assertEqual(len(char_matrix), len(char_matrix.original_seqs))
+        assert len(char_matrix) == len(char_matrix._taxon_sequence_map)
+        if len(char_matrix.taxon_namespace) != len(tns):
+            x1 = [t.label for t in char_matrix.taxon_namespace]
+            x2 = [t.label for t in tns]
+            c1 = collections.Counter(x1)
+            c2 = collections.Counter(x2)
+            c3 = c2 - c1
+            print(c3)
+        self.assertEqual(len(char_matrix.taxon_namespace), len(tns))
+        original_labels = [t.label for t in tns]
+        new_labels = [t.label for t in new_tns]
+        self.assertCountEqual(new_labels, original_labels)
+        for taxon in char_matrix:
+            self.assertIn(taxon, char_matrix.taxon_namespace)
+            self.assertNotIn(taxon, tns)
+            self.assertIs(char_matrix[taxon], char_matrix[taxon].original_seq)
+            self.assertIn(char_matrix[taxon], char_matrix.original_seqs)
+            char_matrix.original_seqs.remove(char_matrix[taxon])
+        self.assertEqual(char_matrix.original_seqs, [])
+
+    def test_migrate_taxon_namespace_non_unifying(self):
+        char_matrix = self.get_char_matrix_with_case_insensitive_and_case_sensitive_label_collisions()
+        original_tns = char_matrix.taxon_namespace
+        new_tns = dendropy.TaxonNamespace()
+        new_tns.is_case_sensitive = True
+        char_matrix.migrate_taxon_namespace(
+                new_tns,
+                unify_taxa_by_label=False)
+        self.assertIsNot(char_matrix.taxon_namespace, original_tns)
+        self.assertIs(char_matrix.taxon_namespace, new_tns)
+        self.verify_taxon_namespace_reconstruction(
+                char_matrix=char_matrix,
+                unify_taxa_by_label=False,
+                case_sensitive_label_mapping=True)
+
+    def test_migrate_taxon_namespace_unifying_case_sensitive(self):
+        char_matrix = self.get_char_matrix_with_case_insensitive_label_collisions()
+        original_tns = char_matrix.taxon_namespace
+        new_tns = dendropy.TaxonNamespace()
+        new_tns.is_case_sensitive = True
+        char_matrix.migrate_taxon_namespace(
+                new_tns,
+                unify_taxa_by_label=True)
+        self.assertIsNot(char_matrix.taxon_namespace, original_tns)
+        self.assertIs(char_matrix.taxon_namespace, new_tns)
+        self.verify_taxon_namespace_reconstruction(
+                char_matrix=char_matrix,
+                unify_taxa_by_label=True,
+                case_sensitive_label_mapping=True,
+                original_tns=original_tns)
+
+    def test_migrate_taxon_namespace_unifying_case_sensitive_fail(self):
+        char_matrix = self.get_char_matrix_with_case_insensitive_and_case_sensitive_label_collisions()
+        new_tns = dendropy.TaxonNamespace()
+        new_tns.is_case_sensitive = True
+        with self.assertRaises(error.TaxonNamespaceReconstructionError):
+            char_matrix.migrate_taxon_namespace(
+                    new_tns,
+                    unify_taxa_by_label=True)
+
+    def test_migrate_taxon_namespace_unifying_case_insensitive(self):
+        char_matrix = self.get_char_matrix()
+        original_tns = char_matrix.taxon_namespace
+        new_tns = dendropy.TaxonNamespace()
+        new_tns.is_case_sensitive = False
+        char_matrix.migrate_taxon_namespace(
+                new_tns,
+                unify_taxa_by_label=True)
+        self.assertIsNot(char_matrix.taxon_namespace, original_tns)
+        self.assertIs(char_matrix.taxon_namespace, new_tns)
+        self.verify_taxon_namespace_reconstruction(
+                char_matrix=char_matrix,
+                unify_taxa_by_label=True,
+                case_sensitive_label_mapping=False,
+                original_tns=original_tns)
+
+    def test_migrate_taxon_namespace_unifying_case_insensitive_fail(self):
+        for char_matrix in (
+                self.get_char_matrix_with_case_insensitive_label_collisions(),
+                self.get_char_matrix_with_case_insensitive_and_case_sensitive_label_collisions(),
+                ):
+            new_tns = dendropy.TaxonNamespace()
+            new_tns.is_case_sensitive = False
+            char_matrix._taxon_namespace = new_tns
+            with self.assertRaises(error.TaxonNamespaceReconstructionError):
+                char_matrix.migrate_taxon_namespace(
+                        new_tns,
+                        unify_taxa_by_label=True)
+
+class MatrixCreatingAndCloningTester(
+        compare_and_validate.Comparator):
+
+    @classmethod
+    def build(cls):
+        cls.rng = random.Random()
+        if not hasattr(cls, "nseqs"):
+            cls.nseqs = 1000
+
+    def add_annotations(self, char_matrix):
+        tns = char_matrix.taxon_namespace
+        for idx, taxon in enumerate(tns):
+            a = taxon.annotations.add_new("!color", str(idx))
+            a.annotations.add_new("setbytest", "a")
+        char_matrix.annotations.add_new("a", 0)
+        char_matrix.label = "hello"
+        b = char_matrix.annotations.add_bound_attribute("label")
+        b.annotations.add_new("c", 3)
+        for idx, taxon in enumerate(char_matrix):
+            seq = char_matrix[taxon]
+            an1 = seq.annotations.add_new("a{}".format(idx),
+                    "{}{}{}".format(seq.label, seq.taxon, idx))
+            an2 = seq.annotations.add_bouseq_attribute("label")
+            an3 = an1.annotations.add_bouseq_attribute("name")
+            ae3 = ae1.annotations.add_bouseq_attribute("name")
+
+    def get_char_matrix(self, taxon_namespace=None):
+        char_matrix = self.__class__.matrix_type(taxon_namespace=taxon_namespace)
+        labels = [str(i) for i in range(self.__class__.nseqs)]
+        self.__class__.rng.shuffle(labels)
+        seq_iter = itertools.cycle(self.__class__.sequence_source)
+        nchar = len(self.__class__.sequence_source) * 2
+        for label in labels:
+            t = dendropy.Taxon(label=label)
+            char_matrix.taxon_namespace.add_taxon(t)
+            seq = [next(seq_iter) for s in range(nchar)]
+            char_matrix[t] = seq
+            self.assertTrue(isinstance(char_matrix[t], self.__class__.sequence_type))
+            self.assertIs(type(char_matrix[t]), self.__class__.sequence_type)
+        return char_matrix
+
+    def test_shallow_copy_with_initializer_list(self):
+        tns1 = dendropy.TaxonNamespace()
+        char_matrix1 = self.get_char_matrix(taxon_namespace=tns1)
+        original_tns_length = len(tns1)
+        self.assertIs(char_matrix1.taxon_namespace, tns1)
+        d = collections.OrderedDict()
+        for taxon in char_matrix1:
+            d[taxon] = char_matrix1[taxon]
+        tns2 = dendropy.TaxonNamespace()
+        char_matrix2 = self.__class__.matrix_type(d, taxon_namespace=tns2)
+        self.assertIs(char_matrix2.taxon_namespace, tns2)
+        self.assertEqual(len(tns1), original_tns_length)
+        self.assertEqual(len(tns2), original_tns_length)
+        self.assertEqual(len(char_matrix2), len(char_matrix1))
+        for tcopy, toriginal in zip(char_matrix2, char_matrix1):
+            self.assertIs(tcopy, toriginal)
+            seq_copy = char_matrix2[tcopy]
+            seq_original = char_matrix1[toriginal]
+            self.assertIs(seq_copy, seq_original)
+
+    def test_clone0(self):
+        char_matrix1 = self.get_char_matrix()
+        for char_matrix2 in (
+                char_matrix1.clone(0),
+                ):
+            self.assertIs(char_matrix2.taxon_namespace, char_matrix1.taxon_namespace)
+            self.assertEqual(len(char_matrix2), len(char_matrix1))
+            for tcopy, toriginal in zip(char_matrix2, char_matrix1):
+                self.assertIs(tcopy, toriginal)
+                seq_copy = char_matrix2[tcopy]
+                seq_original = char_matrix1[toriginal]
+                self.assertIs(seq_copy, seq_original)
+
+    def test_taxon_namespace_scoped_copy(self):
+        char_matrix1 = self.get_char_matrix()
+        for char_matrix2 in (
+                char_matrix1.clone(1),
+                self.__class__.matrix_type(char_matrix1),
+                char_matrix1.taxon_namespace_scoped_copy(),):
+            self.compare_distinct_char_matrix(char_matrix2, char_matrix1,
+                    taxon_namespace_scoped=True,
+                    compare_matrix_annotations=True,
+                    compare_sequence_annotations=True,
+                    compare_taxon_annotations=True)
+
+    def test_deepcopy_including_namespace(self):
+        char_matrix1 = self.get_char_matrix()
+        for idx, char_matrix2 in enumerate((
+                char_matrix1.clone(2),
+                copy.deepcopy(char_matrix1),
+                )):
+            self.compare_distinct_char_matrix(char_matrix2, char_matrix1,
+                    taxon_namespace_scoped=False,
+                    compare_matrix_annotations=True,
+                    compare_sequence_annotations=True,
+                    compare_taxon_annotations=True)
+
+    def test_deepcopy_excluding_namespace(self):
+        char_matrix1 = self.get_char_matrix()
+        char_matrix2 = self.__class__.matrix_type(char_matrix1,
+                taxon_namespace=dendropy.TaxonNamespace())
+        self.compare_distinct_char_matrix(char_matrix2, char_matrix1,
+                taxon_namespace_scoped=False,
+                compare_matrix_annotations=True,
+                compare_sequence_annotations=True,
+                compare_taxon_annotations=False)
+
+class CharacterMatrixCreatingAndCloningTestCase(
+        MatrixCreatingAndCloningTester,
+        dendropytest.ExtendedTestCase):
+
+    @classmethod
+    def setUpClass(cls):
+        cls.matrix_type = dendropy.CharacterMatrix
+        cls.sequence_type = dendropy.CharacterDataSequence
+        cls.sequence_source = [1,2,3,4]
+        cls.nseqs = 1000
+        cls.build()
+
+class ContinuousCharacterMatrixCreatingAndCloningTestCase(
+        MatrixCreatingAndCloningTester,
+        dendropytest.ExtendedTestCase):
+
+    @classmethod
+    def setUpClass(cls):
+        cls.matrix_type = dendropy.ContinuousCharacterMatrix
+        cls.sequence_type = dendropy.ContinuousCharacterMatrix.ContinuousCharacterDataSequence
+        cls.sequence_source = [-1.0e-1, 42, 2.5e-6, 3.14e5, -1]
+        cls.nseqs = 1000
+        cls.build()
+
+class DnaCharacterMatrixCreatingAndCloningTestCase(
+        MatrixCreatingAndCloningTester,
+        dendropytest.ExtendedTestCase):
+
+    @classmethod
+    def setUpClass(cls):
+        cls.matrix_type = dendropy.DnaCharacterMatrix
+        cls.sequence_type = dendropy.DnaCharacterMatrix.DnaCharacterDataSequence
+        cls.sequence_source = list(cls.matrix_type.datatype_alphabet)
+        cls.nseqs = 100
+        cls.build()
+
+class RnaCharacterMatrixCreatingAndCloningTestCase(
+        MatrixCreatingAndCloningTester,
+        dendropytest.ExtendedTestCase):
+
+    @classmethod
+    def setUpClass(cls):
+        cls.matrix_type = dendropy.RnaCharacterMatrix
+        cls.sequence_type = dendropy.RnaCharacterMatrix.RnaCharacterDataSequence
+        cls.sequence_source = list(cls.matrix_type.datatype_alphabet)
+        cls.nseqs = 100
+        cls.build()
+
+class NucleotideCharacterMatrixCreatingAndCloningTestCase(
+        MatrixCreatingAndCloningTester,
+        dendropytest.ExtendedTestCase):
+
+    @classmethod
+    def setUpClass(cls):
+        cls.matrix_type = dendropy.NucleotideCharacterMatrix
+        cls.sequence_type = dendropy.NucleotideCharacterMatrix.NucleotideCharacterDataSequence
+        cls.sequence_source = list(cls.matrix_type.datatype_alphabet)
+        cls.nseqs = 100
+        cls.build()
+
+class ProteinCharacterMatrixCreatingAndCloningTestCase(
+        MatrixCreatingAndCloningTester,
+        dendropytest.ExtendedTestCase):
+
+    @classmethod
+    def setUpClass(cls):
+        cls.matrix_type = dendropy.ProteinCharacterMatrix
+        cls.sequence_type = dendropy.ProteinCharacterMatrix.ProteinCharacterDataSequence
+        cls.sequence_source = list(cls.matrix_type.datatype_alphabet)
+        cls.nseqs = 100
+        cls.build()
+
+class RestrictionSitesCharacterMatrixCreatingAndCloningTestCase(
+        MatrixCreatingAndCloningTester,
+        dendropytest.ExtendedTestCase):
+
+    @classmethod
+    def setUpClass(cls):
+        cls.matrix_type = dendropy.RestrictionSitesCharacterMatrix
+        cls.sequence_type = dendropy.RestrictionSitesCharacterMatrix.RestrictionSitesCharacterDataSequence
+        cls.sequence_source = list(cls.matrix_type.datatype_alphabet)
+        cls.nseqs = 100
+        cls.build()
+
+class InfiniteSitesCharacterMatrixCreatingAndCloningTestCase(
+        MatrixCreatingAndCloningTester,
+        dendropytest.ExtendedTestCase):
+
+    @classmethod
+    def setUpClass(cls):
+        cls.matrix_type = dendropy.InfiniteSitesCharacterMatrix
+        cls.sequence_type = dendropy.InfiniteSitesCharacterMatrix.InfiniteSitesCharacterDataSequence
+        cls.sequence_source = list(cls.matrix_type.datatype_alphabet)
+        cls.nseqs = 100
+        cls.build()
+
+class TestCharacterMatrixTaxa(dendropytest.ExtendedTestCase):
+
+    def setUp(self):
+        self.char_matrix = charmatrixmodel.CharacterMatrix()
+        labels = [
+                "a", "b", "c", "d", "e", "f",
+                ]
+        self.expected_taxa = set()
+        for label in labels:
+            t = dendropy.Taxon(label=label)
+            self.char_matrix.taxon_namespace.add_taxon(t)
+            self.expected_taxa.add(t)
+            seq = [_ for _ in range(4)]
+            self.char_matrix[t] = seq
+
+    def test_basic_taxa(self):
+        self.assertEqual(self.char_matrix.poll_taxa(), self.expected_taxa)
+
+class TestCharacterMatrixTaxa(dendropytest.ExtendedTestCase):
+
+    def setUp(self):
+        self.char_matrix = charmatrixmodel.CharacterMatrix()
+        labels = [
+                "a", "b", "c", "d", "e", "f",
+                ]
+        self.expected_taxa = set()
+        for label in labels:
+            t = dendropy.Taxon(label=label)
+            self.char_matrix.taxon_namespace.add_taxon(t)
+            self.expected_taxa.add(t)
+            seq = [_ for _ in range(4)]
+            self.char_matrix[t] = seq
+
+    def test_noop_purge(self):
+        self.assertEqual(set(self.char_matrix.taxon_namespace), self.expected_taxa)
+        self.char_matrix.purge_taxon_namespace()
+        self.assertEqual(set(self.char_matrix.taxon_namespace), self.expected_taxa)
+
+    def test_basic_purge(self):
+        self.assertEqual(set(self.char_matrix.taxon_namespace), self.expected_taxa)
+        added_taxa = set(self.expected_taxa)
+        for label in ("z1", "z2", "z3", "z4"):
+            t = self.char_matrix.taxon_namespace.new_taxon(label=label)
+            added_taxa.add(t)
+        self.assertEqual(set(self.char_matrix.taxon_namespace), added_taxa)
+        self.char_matrix.purge_taxon_namespace()
+        self.assertEqual(set(self.char_matrix.taxon_namespace), self.expected_taxa)
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/dendropy/test/test_datamodel_dataset.py b/dendropy/test/test_datamodel_dataset.py
new file mode 100644
index 0000000..9aeb2eb
--- /dev/null
+++ b/dendropy/test/test_datamodel_dataset.py
@@ -0,0 +1,237 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Tests basic DataSet curation
+"""
+
+import collections
+import unittest
+import dendropy
+from dendropy.test.support import dendropytest
+from dendropy.test.support import curated_test_tree_list
+from dendropy.test.support import standard_file_test_chars
+
+class EmptyDataSetWithLabelCreationTestCase(dendropytest.ExtendedTestCase):
+
+    def test_basic_create_with_label(self):
+        ds = dendropy.DataSet(label="d1")
+        self.assertEqual(ds.label, "d1")
+
+class DataSetAddTestCase(dendropytest.ExtendedTestCase):
+
+    @classmethod
+    def setUpClass(cls):
+        standard_file_test_chars.DnaTestChecker.build()
+        standard_file_test_chars.ProteinTestChecker.build()
+
+    def setUp(self):
+        self.expected_taxon_namespaces = []
+        self.standalone_taxon_namespaces = []
+        self.standalone_taxon_namespaces.append(dendropy.TaxonNamespace(["t1", "t2", "t3"]))
+        self.standalone_taxon_namespaces.append(dendropy.TaxonNamespace(["s1", "s2", "s3"]))
+        self.expected_taxon_namespaces.extend(self.standalone_taxon_namespaces)
+        self.expected_tree_lists = collections.OrderedDict()
+        for i in range(2):
+            pdo1 = curated_test_tree_list.get_tree_list(4)
+            self.expected_tree_lists[pdo1] = pdo1.taxon_namespace
+            self.expected_taxon_namespaces.append(pdo1.taxon_namespace)
+            for j in range(2):
+                pdo2 = curated_test_tree_list.get_tree_list(4,
+                        taxon_namespace=pdo1.taxon_namespace)
+                self.expected_tree_lists[pdo2] = pdo2.taxon_namespace
+
+        self.expected_char_matrices = collections.OrderedDict()
+        for i in range(2):
+            pdo1 = standard_file_test_chars.DnaTestChecker.get_char_matrix_from_class_data()
+            self.expected_char_matrices[pdo1] = pdo1.taxon_namespace
+            self.expected_taxon_namespaces.append(pdo1.taxon_namespace)
+            for j in range(2):
+                pdo2 = standard_file_test_chars.ProteinTestChecker.get_char_matrix_from_class_data(taxon_namespace=pdo1.taxon_namespace)
+                self.expected_char_matrices[pdo2] = pdo2.taxon_namespace
+
+    def test_basic_add_taxon_namespace(self):
+        expected_fundamental_states = set()
+        ds = dendropy.DataSet()
+        for tns in self.expected_taxon_namespaces:
+            ds.add_taxon_namespace(tns)
+        self.assertEqual(len(ds.taxon_namespaces), len(self.expected_taxon_namespaces))
+        for x1, x2 in zip(ds.taxon_namespaces, self.expected_taxon_namespaces):
+            self.assertIs(x1, x2)
+
+    def test_basic_add_tree_list(self):
+        ds = dendropy.DataSet()
+        expected_taxon_namespaces = collections.OrderedDict()
+        for tree_list in self.expected_tree_lists:
+            ds.add_tree_list(tree_list)
+            expected_taxon_namespaces[self.expected_tree_lists[tree_list]] = True
+        self.assertEqual(len(ds.taxon_namespaces), len(expected_taxon_namespaces))
+        for x1, x2 in zip(ds.taxon_namespaces, expected_taxon_namespaces):
+            self.assertIs(x1, x2)
+        self.assertEqual(len(ds.tree_lists), len(self.expected_tree_lists))
+        for x1, x2 in zip(ds.tree_lists, self.expected_tree_lists):
+            self.assertIs(x1, x2)
+            self.assertIs(x1.taxon_namespace, self.expected_tree_lists[x1])
+            for t in x1:
+                self.assertIs(t.taxon_namespace, x1.taxon_namespace)
+
+    def test_basic_add_char_matrix(self):
+        ds = dendropy.DataSet()
+        expected_taxon_namespaces = collections.OrderedDict()
+        for char_matrix in self.expected_char_matrices:
+            ds.add_char_matrix(char_matrix)
+            expected_taxon_namespaces[self.expected_char_matrices[char_matrix]] = True
+        self.assertEqual(len(ds.taxon_namespaces), len(expected_taxon_namespaces))
+        for x1, x2 in zip(ds.taxon_namespaces, expected_taxon_namespaces):
+            self.assertIs(x1, x2)
+        self.assertEqual(len(ds.char_matrices), len(self.expected_char_matrices))
+        for x1, x2 in zip(ds.char_matrices, self.expected_char_matrices):
+            self.assertIs(x1, x2)
+            self.assertIs(x1.taxon_namespace, self.expected_char_matrices[x1])
+
+    def test_basic_add(self):
+        ds = dendropy.DataSet()
+        for tns in self.standalone_taxon_namespaces:
+            ds.add(tns)
+        for tree_list in self.expected_tree_lists:
+            ds.add(tree_list)
+        for char_matrix in self.expected_char_matrices:
+            ds.add(char_matrix)
+        self.assertEqual(len(ds.taxon_namespaces), len(self.expected_taxon_namespaces))
+        for x1, x2 in zip(ds.taxon_namespaces, self.expected_taxon_namespaces):
+            self.assertIs(x1, x2)
+        for x1, x2 in zip(ds.tree_lists, self.expected_tree_lists):
+            self.assertIs(x1, x2)
+        for x1, x2 in zip(ds.char_matrices, self.expected_char_matrices):
+            self.assertIs(x1, x2)
+
+    def test_construction(self):
+        item_list = []
+        item_list.extend(self.standalone_taxon_namespaces)
+        item_list.extend(self.expected_tree_lists)
+        item_list.extend(self.expected_char_matrices)
+        ds = dendropy.DataSet(item_list)
+        self.assertEqual(len(ds.taxon_namespaces), len(self.expected_taxon_namespaces))
+        for x1, x2 in zip(ds.taxon_namespaces, self.expected_taxon_namespaces):
+            self.assertIs(x1, x2)
+        for x1, x2 in zip(ds.tree_lists, self.expected_tree_lists):
+            self.assertIs(x1, x2)
+        for x1, x2 in zip(ds.char_matrices, self.expected_char_matrices):
+            self.assertIs(x1, x2)
+
+class DataSetNewTestCase(dendropytest.ExtendedTestCase):
+
+    def test_basic_new_taxon_namespace(self):
+        ds = dendropy.DataSet()
+        tax_labels = ["a", "b", "c", "d", "e"]
+        tns_labels = ["t1", "t2", "t3"]
+        tns_list = []
+        for tns_label in tns_labels:
+             tns = ds.new_taxon_namespace(tax_labels, label=tns_label)
+             self.assertTrue(isinstance(tns, dendropy.TaxonNamespace))
+             tns_list.append(tns)
+        self.assertEqual(len(tns_list), len(tns_labels))
+        for tns, tns_label in zip(tns_list, tns_labels):
+            self.assertEqual(tns.label, tns_label)
+            self.assertEqual(len(tns), len(tax_labels))
+            for taxon, taxon_label in zip(tns, tax_labels):
+                self.assertEqual(taxon.label, taxon_label)
+
+    def test_basic_new_tree_list(self):
+        ds = dendropy.DataSet()
+        item_labels = ["a", "b", "c", "d", "e"]
+        item_list = []
+        for item_idx, item_label in enumerate(item_labels):
+            item = ds.new_tree_list(label=item_label)
+            item_list.append(item)
+        self.assertEqual(len(ds.tree_lists), len(item_labels))
+        self.assertEqual(len(ds.tree_lists), len(item_list))
+        for t1, t2, label in zip(ds.tree_lists, item_list, item_labels):
+            self.assertTrue(isinstance(t1, dendropy.TreeList))
+            self.assertIs(t1, t2)
+            self.assertEqual(t1.label, label)
+
+    def test_basic_new_char_matrix(self):
+        ds = dendropy.DataSet()
+        item_labels = ["a", "b", "c", "d", "e", "f"]
+        cm_type = [
+                "dna",
+                "protein",
+                "standard",
+                dendropy.DnaCharacterMatrix,
+                dendropy.ProteinCharacterMatrix,
+                dendropy.StandardCharacterMatrix,
+                ]
+        expected_cm_types = [
+                dendropy.DnaCharacterMatrix,
+                dendropy.ProteinCharacterMatrix,
+                dendropy.StandardCharacterMatrix,
+                dendropy.DnaCharacterMatrix,
+                dendropy.ProteinCharacterMatrix,
+                dendropy.StandardCharacterMatrix,
+                ]
+        item_list = []
+        for item_label, cm_type in zip(item_labels, cm_type):
+            item = ds.new_char_matrix(label=item_label,
+                    char_matrix_type=cm_type)
+            item_list.append(item)
+        self.assertEqual(len(ds.char_matrices), len(item_labels))
+        self.assertEqual(len(ds.char_matrices), len(item_list))
+        for t1, t2, label, expected_cm_types in zip(ds.char_matrices, item_list, item_labels, expected_cm_types):
+            self.assertTrue(isinstance(t1, expected_cm_types))
+            self.assertIs(t1, t2)
+            self.assertEqual(t1.label, label)
+
+class DataSetAttachedTaxonNamespaceModeTestCase(dendropytest.ExtendedTestCase):
+
+    def test_attached_taxon_namespace_default(self):
+        ds = dendropy.DataSet()
+        tns = dendropy.TaxonNamespace()
+        ds.attach_taxon_namespace(tns)
+        self.assertTrue(isinstance(tns, dendropy.TaxonNamespace))
+        self.assertEqual(len(ds.taxon_namespaces), 1)
+        self.assertIn(tns, ds.taxon_namespaces)
+        self.assertIs(ds.taxon_namespaces[0], tns)
+        self.assertIs(ds.attached_taxon_namespace, tns)
+        tns2 = ds.detach_taxon_namespace()
+        self.assertIs(ds.attached_taxon_namespace, None)
+        self.assertIs(tns2, tns)
+
+    def test_attached_taxon_namespace_new_tree_list(self):
+        ds = dendropy.DataSet()
+        tns = dendropy.TaxonNamespace()
+        ds.attach_taxon_namespace(tns)
+        tree_list = ds.new_tree_list(label="q")
+        self.assertEqual(tree_list.label, "q")
+        self.assertIn(tree_list, ds.tree_lists)
+        self.assertIs(tree_list.taxon_namespace, ds.attached_taxon_namespace)
+        self.assertEqual(len(ds.taxon_namespaces), 1)
+
+    def test_attached_taxon_namespace_new_char_matrix(self):
+        ds = dendropy.DataSet()
+        tns = dendropy.TaxonNamespace()
+        ds.attach_taxon_namespace(tns)
+        char_matrix = ds.new_char_matrix(label="q", char_matrix_type="dna")
+        self.assertEqual(char_matrix.label, "q")
+        self.assertIn(char_matrix, ds.char_matrices)
+        self.assertIs(char_matrix.taxon_namespace, ds.attached_taxon_namespace)
+        self.assertEqual(len(ds.taxon_namespaces), 1)
+
+if __name__ == "__main__":
+    unittest.main()
+
diff --git a/dendropy/test/test_datamodel_split_bitmasks.py b/dendropy/test/test_datamodel_split_bitmasks.py
new file mode 100644
index 0000000..5131838
--- /dev/null
+++ b/dendropy/test/test_datamodel_split_bitmasks.py
@@ -0,0 +1,380 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Testing of calculation of and operations with split bitmask hashes.
+"""
+
+import warnings
+import unittest
+import re
+import sys
+try:
+    from StringIO import StringIO # Python 2 legacy support: StringIO in this module is the one needed (not io)
+except ImportError:
+    from io import StringIO # Python 3
+
+from dendropy.test.support import pathmap
+from dendropy.test.support import paupsplitsreference
+from dendropy.test.support.dendropytest import ExtendedTestCase
+from dendropy.utility import messaging
+from dendropy.utility import bitprocessing
+from dendropy.interop import paup
+from dendropy.calculate import treecompare
+import dendropy
+
+_LOG = messaging.get_logger(__name__)
+
+class SplitDistributionTestCases(ExtendedTestCase):
+
+    def check_splits_distribution(self,
+            tree_filename,
+            splits_filename,
+            use_tree_weights,
+            is_rooted,
+            expected_num_trees,
+            ):
+        if is_rooted is None:
+            key_column_index = 2 # default to unrooted: normalized split bitmask
+        elif is_rooted:
+            key_column_index = 1 # leafset_bitmask / unnormalized split bitmask
+        else:
+            key_column_index = 2 # normalized split bitmask
+        splits_ref = paupsplitsreference.get_splits_reference(
+                splits_filename=splits_filename,
+                key_column_index=key_column_index,
+                )
+        # print("* {} ({})".format(tree_filename, splits_filename))
+        tree_filepath = pathmap.tree_source_path(tree_filename)
+        trees = dendropy.TreeList.get_from_path(
+                tree_filepath,
+                "nexus",
+                store_tree_weights=use_tree_weights)
+        sd = dendropy.SplitDistribution(
+                taxon_namespace=trees.taxon_namespace,
+                use_tree_weights=use_tree_weights)
+        for tree in trees:
+            sd.count_splits_on_tree(tree)
+
+        # trees counted ...
+        self.assertEqual(sd.total_trees_counted, len(trees))
+        # frequencies have not yet been calculated
+        self.assertEqual(sd._trees_counted_for_freqs, 0)
+        self.assertFalse(sd.is_mixed_rootings_counted())
+        if is_rooted:
+            self.assertTrue(sd.is_all_counted_trees_rooted())
+        else:
+            self.assertFalse(sd.is_all_counted_trees_rooted())
+            self.assertTrue(sd.is_all_counted_trees_treated_as_unrooted() or sd.is_all_counted_trees_strictly_unrooted())
+
+        # splits_distribution also counts trivial splits, so this will not work
+        # self.assertEqual(len(splits_ref), len(sd))
+
+        expected_nontrivial_splits = list(splits_ref.keys())
+        observed_splits = set(sd.split_counts.keys())
+        visited_splits = []
+        # for k in sorted(observed_splits):
+        #     print("{}: {}, {}".format(k, sd.split_counts[k], sd[k]))
+        all_taxa_bitmask = sd.taxon_namespace.all_taxa_bitmask()
+        for split in expected_nontrivial_splits:
+            self.assertAlmostEqual(sd.split_counts[split], splits_ref[split]["count"], 2,
+                    "{} (using '{}'): {}".format(tree_filename, splits_filename, split))
+            self.assertAlmostEqual(sd[split], splits_ref[split]["frequency"], 2,
+                    "{} (using '{}'): {}".format(tree_filename, splits_filename, split))
+            self.assertAlmostEqual(sd.split_frequencies[split], splits_ref[split]["frequency"], 2,
+                    "{} (using '{}'): {}".format(tree_filename, splits_filename, split))
+            if split in observed_splits:
+                observed_splits.remove(split)
+            visited_splits.append(split)
+        self.assertEqual(len(visited_splits), len(expected_nontrivial_splits))
+
+        # ensure remaining splits (not given in PAUP splits file) are trivial ones (which are not tracked by PAUP)
+        for split in observed_splits:
+            self.assertTrue(dendropy.Bipartition.is_trivial_bitmask(split, all_taxa_bitmask))
+
+    def test_group1(self):
+        sources = [
+                ("cetaceans.mb.no-clock.mcmc.trees"    , 251, False, False), # Trees explicitly unrooted
+                ("cetaceans.mb.no-clock.mcmc.weighted-01.trees" , 251, False , True), # Weighted
+                ("cetaceans.mb.no-clock.mcmc.weighted-02.trees" , 251, False , True), # Weighted
+                ("cetaceans.mb.no-clock.mcmc.weighted-03.trees" , 251, False , True), # Weighted
+                ("cetaceans.mb.strict-clock.mcmc.trees", 251, True , False), # Trees explicitly rooted
+                ("cetaceans.mb.strict-clock.mcmc.weighted-01.trees" , 251, True , True), # Weighted
+                ("cetaceans.mb.strict-clock.mcmc.weighted-02.trees" , 251, True , True), # Weighted
+                ("cetaceans.mb.strict-clock.mcmc.weighted-03.trees" , 251, True , True), # Weighted
+                ("issue_mth_2009-02-03.rooted.nexus"   , 100, True , False), # 100 trees (frequency column not reported by PAUP)
+                ("issue_mth_2009-02-03.unrooted.nexus" , 100, False , False), # 100 trees (frequency column not reported by PAUP)
+                ("cetaceans.raxml.bootstraps.trees"    , 250, None , False), # No tree rooting statement; PAUP defaults to rooted, DendroPy defaults to unrooted
+                ("cetaceans.raxml.bootstraps.weighted-01.trees"    , 250, None , False), # No tree rooting statement; PAUP defaults to rooted, DendroPy defaults to unrooted
+                ("cetaceans.raxml.bootstraps.weighted-02.trees"    , 250, None , False), # No tree rooting statement; PAUP defaults to rooted, DendroPy defaults to unrooted
+                ("cetaceans.raxml.bootstraps.weighted-03.trees"    , 250, None , False), # No tree rooting statement; PAUP defaults to rooted, DendroPy defaults to unrooted
+        ]
+        splits_filename_template = "{stemname}.is-rooted-{is_rooted}.use-tree-weights-{use_weights}.burnin-{burnin}.splits.txt"
+        for tree_filename, num_trees, treefile_is_rooted, treefile_is_weighted in sources:
+            stemname = tree_filename
+            for use_weights in (False, True, None):
+                expected_is_rooted = treefile_is_rooted
+                splits_filename = splits_filename_template.format(
+                        stemname=stemname,
+                        is_rooted=expected_is_rooted,
+                        use_weights=use_weights,
+                        burnin=0)
+                self.check_splits_distribution(
+                        tree_filename=tree_filename,
+                        splits_filename=splits_filename,
+                        is_rooted=treefile_is_rooted,
+                        use_tree_weights=use_weights,
+                        expected_num_trees=num_trees)
+
+class SplitCountTest(ExtendedTestCase):
+
+    @classmethod
+    def setUpClass(cls):
+        if sys.version_info.major < 3:
+            cls.assertRaisesRegex = cls.assertRaisesRegexp
+
+    def check_split_counting(self,
+            tree_filename,
+            test_as_rooted,
+            parser_rooting_interpretation,
+            test_ignore_tree_weights=False,
+            dp_ignore_tree_weights=False,
+            ):
+        tree_filepath = pathmap.tree_source_path(tree_filename)
+        ps = paup.PaupService()
+        paup_sd = ps.get_split_distribution_from_files(
+                tree_filepaths=[tree_filepath],
+                is_rooted=test_as_rooted,
+                use_tree_weights=not test_ignore_tree_weights,
+                burnin=0,
+                taxa_definition_filepath=tree_filepath
+                )
+        taxon_namespace = paup_sd.taxon_namespace
+        dp_sd = dendropy.SplitDistribution(taxon_namespace=taxon_namespace)
+        dp_sd.ignore_edge_lengths = True
+        dp_sd.ignore_node_ages = True
+        dp_sd.ignore_tree_weights = dp_ignore_tree_weights
+        taxa_mask = taxon_namespace.all_taxa_bitmask()
+        taxon_namespace.is_mutable = False
+        trees = dendropy.TreeList.get_from_path(tree_filepath,
+                "nexus",
+                rooting=parser_rooting_interpretation,
+                taxon_namespace=taxon_namespace)
+        for tree in trees:
+            self.assertIs(tree.taxon_namespace, taxon_namespace)
+            self.assertIs(tree.taxon_namespace, dp_sd.taxon_namespace)
+            dp_sd.count_splits_on_tree(
+                    tree,
+                    is_bipartitions_updated=False)
+        self.assertEqual(dp_sd.total_trees_counted, paup_sd.total_trees_counted)
+        taxa_mask = taxon_namespace.all_taxa_bitmask()
+        for split in dp_sd.split_counts:
+            if not dendropy.Bipartition.is_trivial_bitmask(split, taxa_mask):
+                # if split not in paup_sd.split_counts:
+                #     print("{}: {}".format(split, split in paup_sd.split_counts))
+                #     s2 = taxon_namespace.normalize_bitmask(split)
+                #     print("{}: {}".format(s2, s2 in paup_sd.split_counts))
+                #     s3 = ~split & taxon_namespace.all_taxa_bitmask()
+                #     print("{}: {}".format(s3, s3 in paup_sd.split_counts))
+                self.assertIn(split, paup_sd.split_counts, "split not found")
+                self.assertEqual(dp_sd.split_counts[split], paup_sd.split_counts[split], "incorrect split frequency")
+                del paup_sd.split_counts[split]
+        remaining_splits = list(paup_sd.split_counts.keys())
+        for split in remaining_splits:
+            if dendropy.Bipartition.is_trivial_bitmask(split, taxa_mask):
+                del paup_sd.split_counts[split]
+        self.assertEqual(len(paup_sd.split_counts), 0)
+
+    def test_basic_split_count_with_incorrect_rootings_raises_error(self):
+        assertion_error_regexp1 = re.compile("(incorrect split frequency|split not found)")
+        test_cases = (
+            ('pythonidae.reference-trees.nexus', True, "force-unrooted", assertion_error_regexp1),
+            ('feb032009.trees.nexus', False, "force-rooted", assertion_error_regexp1),
+            )
+        for test_case, test_as_rooted, parser_rooting_interpretation, assertion_error_regexp in test_cases:
+            with self.assertRaisesRegex(AssertionError, assertion_error_regexp):
+                self.check_split_counting(
+                        test_case,
+                        test_as_rooted=test_as_rooted,
+                        parser_rooting_interpretation=parser_rooting_interpretation)
+
+    def test_basic_split_count_with_incorrect_weight_treatment_raises_error(self):
+        assertion_error_regexp1 = re.compile("incorrect split frequency")
+        test_cases = (
+                ("cetaceans.mb.no-clock.mcmc.weighted-01.trees", False),
+                ("cetaceans.mb.strict-clock.mcmc.weighted-01.trees", True),
+            )
+        for test_case, test_as_rooted in test_cases:
+            with self.assertRaisesRegex(AssertionError, assertion_error_regexp1):
+                self.check_split_counting(
+                        test_case,
+                        test_as_rooted=test_as_rooted,
+                        parser_rooting_interpretation="default-rooted",
+                        test_ignore_tree_weights=False,
+                        dp_ignore_tree_weights=False,
+                        )
+
+    def test_basic_split_counting_under_different_rootings(self):
+        test_cases = (
+            'pythonidae.reference-trees.nexus',
+            'feb032009.trees.nexus',
+            'maj-rule-bug1.trees.nexus',
+            'maj-rule-bug2.trees.nexus',
+            )
+        for is_rooted in (True, False):
+            if is_rooted:
+                rooting = "force-rooted"
+            else:
+                rooting = "force-unrooted"
+            for test_case in test_cases:
+                self.check_split_counting(
+                        test_case,
+                        test_as_rooted=is_rooted,
+                        parser_rooting_interpretation=rooting)
+
+class CladeMaskTest(unittest.TestCase):
+
+    def runTest(self):
+        # rooted tree: so clade bitmasks
+        tree_list = dendropy.TreeList.get_from_stream(
+            StringIO("""[&R]((t5:0.161175,t6:0.161175):0.392293,((t4:0.104381,(t2:0.075411,t1:0.075411):1):0.065840,t3:0.170221):0.383247);"""),
+            "newick")
+        for i in tree_list:
+            _LOG.debug(i._get_indented_form())
+            i.encode_bipartitions()
+            _LOG.debug(i._get_indented_form(splits=True))
+            i._debug_check_tree(splits=True, logger_obj=_LOG)
+        root1 = tree_list[0].seed_node
+        root1e = root1.edge
+        self.assertEqual(bitprocessing.indexes_of_set_bits(root1e.split_bitmask), list(range(6)))
+        self.assertEqual(bitprocessing.indexes_of_set_bits(root1e.split_bitmask, one_based=True), list(range(1,7)))
+        self.assertEqual(bitprocessing.indexes_of_set_bits(root1e.split_bitmask, fill_bitmask=21, one_based=True), [1, 3, 5])
+        self.assertEqual(bitprocessing.indexes_of_set_bits(root1e.split_bitmask, fill_bitmask=21), [0, 2, 4])
+        self.assertEqual(bitprocessing.num_set_bits(root1e.split_bitmask), 6)
+
+        fc1 = root1.child_nodes()[0]
+        fc1e = fc1.edge
+        self.assertEqual(bitprocessing.indexes_of_set_bits(fc1e.split_bitmask), [0, 1])
+        self.assertEqual(bitprocessing.indexes_of_set_bits(fc1e.split_bitmask, one_based=True), [1, 2])
+        self.assertEqual(bitprocessing.indexes_of_set_bits(fc1e.split_bitmask, fill_bitmask=0x15, one_based=True), [1])
+        self.assertEqual(bitprocessing.indexes_of_set_bits(fc1e.split_bitmask, fill_bitmask=0x15), [0])
+        self.assertEqual(bitprocessing.num_set_bits(fc1e.split_bitmask), 2)
+
+class CountBitsTest(unittest.TestCase):
+
+    def runTest(self):
+        self.assertEqual(bitprocessing.num_set_bits(21), 3)
+
+class LowestBitTest(unittest.TestCase):
+
+    def runTest(self):
+        for n, expected in enumerate([0, 1, 2, 1, 4, 1, 2, 1, 8, 1, 2, 1, 4, 1, 2, 1, 16]):
+            self.assertEqual(bitprocessing.least_significant_set_bit(n), expected)
+
+class IsTrivialTest(unittest.TestCase):
+
+    def runTest(self):
+        y = True
+        n = False
+        for i, r in enumerate([y, y, y, n, y, n, n, y, y, n, n, y, n, y, y, y, y, y, y, n, y, n, n, y, y, n, n, y, n, y, y, y, ]):
+            self.assertEqual(r, dendropy.Bipartition.is_trivial_bitmask(i, 0xF))
+        for i, r in enumerate([y, y, y, n, y, n, n, n, y, n, n, n, n, n, n, y, y, n, n, n, n, n, n, y, n, n, n, y, n, y, y, y, ]):
+            self.assertEqual(r, dendropy.Bipartition.is_trivial_bitmask(i, 0x1F))
+                              #0  1  2  3  4  5  6  7  8  9  0  1  2  3  4  5  6  7  8  9  0  1  2  3  4  5  6  7  8  9  0  1
+        for i, r in enumerate([y, y, y, n, y, n, n, y, y, y, y, n, y, n, n, y, y, n, n, y, n, y, y, y, y, n, n, y, n, y, y, y, ]):
+            self.assertEqual(r, dendropy.Bipartition.is_trivial_bitmask(i, 0x17))
+
+class IncompleteLeafSetSplitTest(unittest.TestCase):
+
+    def check(self, title, src_prefix):
+        tns = dendropy.TaxonNamespace()
+        input_ds = dendropy.DataSet.get_from_path(
+                src=pathmap.tree_source_path(src_prefix + ".dendropy-pruned.nex"),
+                schema='nexus',
+                attached_taxon_namespace=tns)
+        input_taxa = input_ds.taxon_namespaces[0]
+        output_ds = dendropy.DataSet.get_from_path(
+                src=pathmap.tree_source_path(src_prefix + ".paup-pruned.nex"),
+                schema='nexus',
+                taxon_namespace=input_taxa)
+        for set_idx, src_trees in enumerate(input_ds.tree_lists):
+            src_trees = input_ds.tree_lists[set_idx]
+            ref_trees = output_ds.tree_lists[set_idx]
+            for tree_idx, src_tree in enumerate(src_trees):
+                _LOG.debug("%s Set %d/%d, Tree %d/%d" % (title, set_idx+1, len(input_ds.tree_lists), tree_idx+1, len(src_trees)))
+                ref_tree = ref_trees[tree_idx]
+                # tree_dist = paup.symmetric_difference(src_tree, ref_tree)
+                # d = src_tree.symmetric_difference(ref_tree)
+                # if d > 0:
+                #     print d
+                self.assertEqual(treecompare.symmetric_difference(src_tree, ref_tree), 0)
+
+    def testUnrooted(self):
+        self.check("Unrooted", "incomplete_leaves_unrooted")
+
+    def testRooted(self):
+        self.check("Rooted", "incomplete_leaves_rooted")
+
+    def testPrunedThenEncoding(self):
+        inp = StringIO('''(a,b,c,(d,e));
+        (b,d,(c,e));''')
+        first, second = dendropy.TreeList.get_from_stream(inp, schema='newick')
+        # prune tree 1 to have the same leaf set as tree 2.
+        #   this removes the first taxon in the taxon list "A"
+        retain_list = set([node.taxon for node in second.leaf_nodes()])
+        exclude_list = [node for node in first.leaf_nodes() if node.taxon not in retain_list]
+        for nd in exclude_list:
+            first.prune_subtree(nd)
+        # the trees are now (b,c,(d,e)) and (b,d,(c,e)) so the symmetric diff is 2
+        self.assertEqual(2, treecompare.symmetric_difference(first, second))
+
+class TestTreeSplitSupportCredibilityScoring(unittest.TestCase):
+
+    def setUp(self):
+        self.trees = dendropy.TreeList.get_from_path(
+                pathmap.tree_source_path("issue_mth_2009-02-03.rooted.nexus"),
+                "nexus")
+        self.split_distribution = dendropy.SplitDistribution(taxon_namespace=self.trees.taxon_namespace)
+        for tree in self.trees:
+            self.split_distribution.count_splits_on_tree(
+                    tree,
+                    is_bipartitions_updated=False)
+
+    def test_product_of_split_support_on_tree(self):
+        t1 = self.trees[70]
+        self.assertAlmostEqual(
+                self.split_distribution.log_product_of_split_support_on_tree(t1),
+                -33.888380488585284)
+
+    def test_sum_of_split_support_on_tree(self):
+        t1 = self.trees[73]
+        self.assertAlmostEqual(
+                self.split_distribution.sum_of_split_support_on_tree(t1),
+                30.89000000000001)
+
+    def test_sum_of_split_support_on_tree2(self):
+        t1 = self.trees[73]
+        self.assertAlmostEqual(
+                self.split_distribution.sum_of_split_support_on_tree(t1, include_external_splits=True),
+                30.89000000000001 + len(self.trees.taxon_namespace))
+
+if __name__ == "__main__":
+    if paup.DENDROPY_PAUP_INTEROPERABILITY:
+        unittest.main()
+    else:
+        _LOG.warn("PAUP interoperability not available: skipping split counting tests")
diff --git a/dendropy/test/test_datamodel_statealphabet.py b/dendropy/test/test_datamodel_statealphabet.py
new file mode 100644
index 0000000..6ffc95d
--- /dev/null
+++ b/dendropy/test/test_datamodel_statealphabet.py
@@ -0,0 +1,581 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Tests state alphabet definition and management.
+"""
+
+import sys
+import random
+import itertools
+import unittest
+import collections
+import dendropy
+from dendropy.utility import container
+from dendropy.test.support import dendropytest
+
+class StateAlphabetTester(object):
+
+    def validate_state_identities(self,
+            state_container,
+            state_iter,
+            symbol_iter,
+            expected_symbols,
+            expected_denomination,
+            member_state_map,
+            additional_synonyms_map,
+            case_sensitive=True):
+        self.assertEqual(len(state_container), len(expected_symbols))
+        states = list(state_iter())
+        self.assertEqual(len(states), len(expected_symbols))
+        canonical_symbols = list(symbol_iter(include_synonyms=False))
+        self.assertEqual(canonical_symbols, expected_symbols)
+        for state, symbol in zip(states, expected_symbols):
+            all_synonyms = []
+            self.assertEqual(state.symbol, symbol)
+            self.assertEqual(state.state_denomination, expected_denomination)
+            if member_state_map:
+                expected_member_state_symbols = tuple(member_state_map[symbol])
+                self.assertEqual(state.fundamental_symbols, expected_member_state_symbols)
+                fundamental_states = self.sa.get_fundamental_states_for_symbols(state.symbol)
+                fss = [fs.symbol for fs in fundamental_states]
+                self.assertEqual(tuple(fss), expected_member_state_symbols)
+            else:
+                self.assertEqual(state.fundamental_states, (state,))
+                self.assertEqual(state.fundamental_symbols, (state.symbol,))
+            if case_sensitive:
+                self.assertNotIn(symbol.upper(), state.symbol_synonyms)
+                self.assertNotIn(symbol.lower(), state.symbol_synonyms)
+            else:
+                if symbol.upper() != symbol:
+                    self.assertIn(symbol.upper(), state.symbol_synonyms)
+                    all_synonyms.append(symbol.upper())
+                if symbol.lower() != symbol:
+                    self.assertIn(symbol.lower(), state.symbol_synonyms)
+                    all_synonyms.append(symbol.lower())
+            if additional_synonyms_map:
+                x = additional_synonyms_map.get(state.symbol, None)
+                if x is not None:
+                    all_synonyms.extend(x)
+            self.assertEqual(set(all_synonyms), set(state.symbol_synonyms),
+                    state)
+            self.assertEqual(len(all_synonyms), len(state.symbol_synonyms), state.symbol)
+
+            expected_fundamental_state_symbols = member_state_map.get(state.symbol, None)
+            if expected_fundamental_state_symbols is None:
+                expected_fundamental_states = set()
+            else:
+                expected_fundamental_states = self.sa.get_states_for_symbols(expected_fundamental_state_symbols)
+                check_ss = [x.symbol for x in expected_fundamental_states]
+                self.assertEqual(set(check_ss), set(expected_fundamental_state_symbols))
+
+    def test_fundamental_state_definitions(self):
+        self.validate_state_identities(
+                state_container=self.sa._fundamental_states,
+                state_iter=self.sa.fundamental_state_iter,
+                symbol_iter=self.sa.fundamental_symbol_iter,
+                expected_symbols=self.expected_fundamental_state_symbols,
+                expected_denomination=self.sa.FUNDAMENTAL_STATE,
+                member_state_map={},
+                additional_synonyms_map=self.additional_synonyms_map,
+                case_sensitive=False)
+
+    def test_ambiguous_state_definitions(self):
+        self.validate_state_identities(
+                state_container=self.sa._ambiguous_states,
+                state_iter=self.sa.ambiguous_state_iter,
+                symbol_iter=self.sa.ambiguous_symbol_iter,
+                expected_symbols=self.expected_ambiguous_state_symbols,
+                expected_denomination=self.sa.AMBIGUOUS_STATE,
+                member_state_map=self.ambiguous_symbol_mappings,
+                additional_synonyms_map=self.additional_synonyms_map,
+                case_sensitive=False)
+
+    def test_polymorphic_state_definitions(self):
+        self.validate_state_identities(
+                state_container=self.sa._polymorphic_states,
+                state_iter=self.sa.polymorphic_state_iter,
+                symbol_iter=self.sa.polymorphic_symbol_iter,
+                expected_symbols=self.expected_polymorphic_state_symbols,
+                expected_denomination=self.sa.POLYMORPHIC_STATE,
+                member_state_map=self.polymorphic_symbol_mappings,
+                additional_synonyms_map=self.additional_synonyms_map,
+                case_sensitive=False)
+
+    def test_state_iter(self):
+        states = list(self.sa.state_iter())
+        self.assertEqual(len(states), self.num_total_states)
+        self.assertEqual(len(self.sa), len(states))
+        expected_state_symbol_iter = itertools.chain(
+                self.expected_fundamental_state_symbols,
+                self.ambiguous_symbol_mappings,
+                self.polymorphic_symbol_mappings
+                )
+        for state, symbol in zip(states, expected_state_symbol_iter):
+            self.assertEqual(state.symbol, symbol)
+
+    def test_symbol_iter(self):
+        # assumes that the state iterators -- fundamental_state_iter,
+        # ambiguous_state_iter, etc. -- all work as advertised
+        iter_groups = (
+                (self.sa.fundamental_symbol_iter, self.sa.fundamental_state_iter),
+                (self.sa.ambiguous_symbol_iter, self.sa.ambiguous_state_iter),
+                (self.sa.polymorphic_symbol_iter, self.sa.polymorphic_state_iter),
+                (self.sa.multistate_symbol_iter, self.sa.multistate_state_iter),
+                )
+        for symbol_iter, state_iter in iter_groups:
+            states = list(state_iter())
+            for include_synonyms in (False, True):
+                expected_symbols = []
+                for state in states:
+                    if state.symbol:
+                        expected_symbols.append(state.symbol)
+                    if include_synonyms:
+                        for ss in state.symbol_synonyms:
+                            expected_symbols.append(ss)
+                obs_symbols = list(symbol_iter(include_synonyms=include_synonyms))
+                self.assertEqual(expected_symbols, obs_symbols)
+
+    def test_symbol_state_pair_iter(self):
+        states = list(self.sa.state_iter())
+        for include_synonyms in (False, True):
+            expected_pairs = []
+            for state in states:
+                if state.symbol:
+                    expected_pairs.append((state.symbol, state,))
+                if include_synonyms:
+                    for ss in state.symbol_synonyms:
+                        expected_pairs.append((ss, state,))
+            obs_pairs = list(self.sa.symbol_state_pair_iter(include_synonyms=include_synonyms))
+            self.assertEqual(expected_pairs, obs_pairs)
+
+    def test_state_denomination(self):
+        for state in self.sa.fundamental_state_iter():
+            self.assertEqual(state.state_denomination, self.sa.FUNDAMENTAL_STATE)
+        for state in self.sa.ambiguous_state_iter():
+            self.assertEqual(state.state_denomination, self.sa.AMBIGUOUS_STATE)
+        for state in self.sa.polymorphic_state_iter():
+            self.assertEqual(state.state_denomination, self.sa.POLYMORPHIC_STATE)
+
+    def test_compiled_lookup_immutability(self):
+        self.sa.compile_lookup_mappings()
+        for m in (
+                self.sa.canonical_symbol_state_map,
+                self.sa.full_symbol_state_map,
+                self.sa._fundamental_states_to_ambiguous_state_map,
+                self.sa._fundamental_states_to_polymorphic_state_map
+                ):
+            if m:
+                k = list(m.keys())[0]
+            else:
+                k = 1
+            with self.assertRaises(container.FrozenOrderedDict.ImmutableTypeError):
+                m[k] = 1
+            with self.assertRaises(container.FrozenOrderedDict.ImmutableTypeError):
+                del m[k]
+            with self.assertRaises(container.FrozenOrderedDict.ImmutableTypeError):
+                m.pop(k)
+            with self.assertRaises(container.FrozenOrderedDict.ImmutableTypeError):
+                m.clear()
+            with self.assertRaises(container.FrozenOrderedDict.ImmutableTypeError):
+                m.update({})
+            with self.assertRaises(container.FrozenOrderedDict.ImmutableTypeError):
+                m.fromkeys([1,2,3])
+        # check if re-compilation is possible
+        self.sa.compile_lookup_mappings()
+
+    def test_canonical_symbol_state_map(self):
+        m = self.sa.canonical_symbol_state_map
+        states = list(self.sa.state_iter())
+        exp_symbols = [s.symbol for s in states if s.symbol]
+        obs_symbols = list(m)
+        self.assertEqual(obs_symbols, exp_symbols)
+        self.assertEqual(len(m), len(states))
+        for obs_symbol, exp_state in zip(m, states):
+            self.assertEqual(obs_symbol, exp_state.symbol)
+            self.assertIs(m[obs_symbol], exp_state)
+
+    def test_full_symbol_state_map(self):
+        m = self.sa.full_symbol_state_map
+        states = list(self.sa.state_iter())
+        exp_symbols = []
+        exp_symbol_state_pairs = []
+        for state in states:
+            if state.symbol:
+                exp_symbols.append(state.symbol)
+                exp_symbol_state_pairs.append((state.symbol, state))
+                for s in state.symbol_synonyms:
+                    exp_symbols.append(s)
+                    exp_symbol_state_pairs.append((s, state))
+        obs_symbols = list(m)
+        if self.sa.no_data_state is not None:
+            exp_symbols.insert(0, None)
+            exp_symbol_state_pairs.insert(0, (None, self.sa.no_data_state))
+        self.assertEqual(obs_symbols, exp_symbols)
+        self.assertEqual(len(m), len(exp_symbols))
+        self.assertEqual(len(m), len(exp_symbol_state_pairs))
+        for obs_symbol, exp_symbol, sspair in zip(m, exp_symbols, exp_symbol_state_pairs):
+            self.assertEqual(obs_symbol, exp_symbol)
+            self.assertEqual(obs_symbol, sspair[0])
+            self.assertIs(m[obs_symbol], sspair[1])
+
+    def test_no_data_state(self):
+        if self.sa.no_data_state is not None:
+
+            # some setup
+            expected_fundamental_states = list(self.sa.fundamental_state_iter())
+            expected_fundamental_symbols = [s.symbol for s in expected_fundamental_states]
+            test_symbols = [None] + expected_fundamental_symbols
+            expected_states = [self.sa.no_data_state] + expected_fundamental_states
+
+            # check definitions
+            self.assertIn(self.sa.no_data_state, self.sa._ambiguous_states)
+            self.assertEqual(self.sa.no_data_state.symbol, self.sa.no_data_symbol)
+            self.assertEqual(self.sa.no_data_state._member_states, tuple(expected_fundamental_states))
+
+            # check look-up map
+            full_map = self.sa.full_symbol_state_map
+            self.assertIn(None, full_map)
+            self.assertIs(full_map[None], self.sa.no_data_state)
+
+            # __getitem__
+            self.assertIs(self.sa[None], self.sa.no_data_state)
+
+            # get_states_for_symbols
+            s = self.sa.get_states_for_symbols(test_symbols)
+            self.assertEqual(s, expected_states)
+            self.assertIs(s[0], self.sa.no_data_state)
+
+            # get_fundamental_states_for_symbols
+            self.assertEqual(self.sa.get_fundamental_states_for_symbols([None]), expected_fundamental_states)
+
+            # get_canonical_symbol_for_symbol
+            self.assertEqual(self.sa.get_canonical_symbol_for_symbol(None), self.sa.no_data_symbol)
+
+            # match_ambiguous_state
+            self.assertIs(self.sa.match_ambiguous_state(expected_fundamental_symbols), self.sa.no_data_state)
+
+        else:
+            full_map = self.sa.full_symbol_state_map
+            self.assertNotIn(None, full_map)
+
+    def test_getitem(self):
+        alphabet = self.sa
+        for state in self.sa.state_iter():
+            self.assertIs(alphabet[state.symbol], state)
+            for ss in state.symbol_synonyms:
+                self.assertIs(alphabet[ss], state)
+            if state._index is not None:
+                self.assertIs(alphabet[state._index], state)
+
+#     def test_get_states_for_symbol(self):
+#         states = list(self.sa.state_iter())
+#         for rep in range(3):
+#             n = random.randint(5, 100)
+#             selected_states = [self.rng.choice(states) for _ in range(n)]
+#             selected_symbols = [s.symbol for s in selected_states]
+#             obs_states = self.sa.get_states_for_symbols(selected_symbols)
+#             self.assertEqual(obs_states, selected_states)
+
+    def test_get_states_for_symbols(self):
+        all_symbols = list(self.sa.full_symbol_state_map.keys())
+        for rep in range(3):
+            n = random.randint(5, 100)
+            selected_symbols = [self.rng.choice(all_symbols) for _ in range(n)]
+            selected_states = [self.sa[s] for s in selected_symbols]
+            obs_states = self.sa.get_states_for_symbols(selected_symbols)
+            self.assertEqual(obs_states, selected_states, "random seed: {}".format(self.random_seed))
+
+    def test_states_property(self):
+        check = list(self.sa.state_iter())
+        self.assertEqual(len(check), len(self.sa.states))
+        for s1, s2 in zip(check, self.sa.states):
+            self.assertIs(s1, s2)
+
+    def test_canonical_symbols_property(self):
+        check = list(self.sa.canonical_symbol_state_map.keys())
+        self.assertEqual(len(check), len(self.sa.states))
+        for s1, s2 in zip(check, self.sa.symbols):
+            self.assertEqual(s1, s2)
+
+    def test_get_canonical_symbol_for_symbol(self):
+        states = list(self.sa.state_iter())
+        expected = {}
+        no_data_state = None
+        for state in states:
+            if state.symbol:
+                expected[state.symbol] = state.symbol
+            if state is self.sa.no_data_state:
+                no_data_state = state
+            for ss in state.symbol_synonyms:
+                expected[ss] = state.symbol
+        for symbol in self.sa.full_symbol_state_map:
+            if symbol is None:
+                self.assertIsNot(self.sa.no_data_state, None)
+                self.assertIsNot(self.sa.no_data_symbol, None)
+                self.assertIs(self.sa.no_data_state, no_data_state)
+            else:
+                self.assertEqual(self.sa.get_canonical_symbol_for_symbol(symbol), expected[symbol])
+
+    def test_get_fundamental_states_for_symbols(self):
+        all_symbols = list(self.sa.full_symbol_state_map.keys())
+        for rep in range(3):
+            n = random.randint(5, 100)
+            selected_symbols = [self.rng.choice(all_symbols) for _ in range(n)]
+            selected_states = []
+            for symbol in selected_symbols:
+                state = self.sa[symbol]
+                if state.state_denomination == self.sa.FUNDAMENTAL_STATE:
+                    selected_states.append(state)
+                else:
+                    if state.state_denomination == self.sa.AMBIGUOUS_STATE:
+                        mapping_src = self.ambiguous_symbol_mappings
+                    elif state.state_denomination == StateAlphabet.POLYMORPHIC_STATE:
+                        mapping_src = self.polymorphic_symbol_mappings
+                    else:
+                        raise Exception("Unrecognized denomination: {}".format(state.state_denomination))
+                    member_states = []
+                    canonical_symbol = self.sa.get_canonical_symbol_for_symbol(symbol)
+                    for member_symbol in mapping_src[canonical_symbol]:
+                        member_states.append(self.sa[member_symbol])
+                    selected_states.extend(member_states)
+            obs_states = self.sa.get_fundamental_states_for_symbols(selected_symbols)
+            if obs_states != selected_states:
+                print("\nSelected Symbols: {}\n Selected States: {}\nObserved Symbols: {}\nrandom seed: {}".format(
+                        "".join(selected_symbols),
+                        "".join([s.symbol for s in selected_states]),
+                        "".join([s.symbol for s in obs_states]),
+                        self.random_seed))
+            self.assertEqual(obs_states, selected_states)
+
+    def test_match_state(self):
+        multistate_states = [list(self.sa.ambiguous_state_iter()), list(self.sa.polymorphic_state_iter())]
+        match_fns = [self.sa.match_ambiguous_state, self.sa.match_polymorphic_state]
+        for multistate_states, match_fn in zip(multistate_states, match_fns):
+            for multistate in multistate_states:
+                member_states = list(multistate.member_states)
+                potential_symbols = []
+                for member_state in member_states:
+                    member_symbols = [member_state.symbol]
+                    for ss in member_state.symbol_synonyms:
+                        member_symbols.append(ss)
+                    potential_symbols.append(member_symbols)
+                for rep in range(5):
+                    selected_symbols = [self.rng.choice(x) for x in potential_symbols]
+                    self.rng.shuffle(selected_symbols)
+                    if self.rng.uniform(0, 1) < 0.5:
+                        selected_symbols = "".join(selected_symbols)
+                    matched_state = match_fn(selected_symbols)
+                    self.assertIs(matched_state, multistate, "random seed: {}".format(self.random_seed))
+
+    def test_on_the_fly_creation_of_multistate(self):
+        multistate_states = [list(self.sa.ambiguous_state_iter()), list(self.sa.polymorphic_state_iter())]
+        match_fns = [self.sa.match_ambiguous_state, self.sa.match_polymorphic_state]
+        add_fns = [self.sa.new_ambiguous_state, self.sa.new_polymorphic_state]
+        state_collections = [self.sa._ambiguous_states, self.sa._polymorphic_states]
+        symbol_pool = list(self.sa.fundamental_symbol_iter())
+        for multistate_states, match_fn, add_fn, state_collection in zip(multistate_states, match_fns, add_fns, state_collections):
+            pre_existing_symbol_combinations = []
+            new_symbol_combinations = []
+            nreps = 0
+            while len(new_symbol_combinations) < 3 and nreps < 5:
+                nreps += 1
+                max_sample_size = min(5, len(self.sa))
+                n = self.rng.randint(2, max_sample_size)
+                selected_symbols = self.rng.sample(symbol_pool, n)
+                try:
+                    matched_state = match_fn(selected_symbols)
+                except KeyError:
+                    new_symbol_combinations.append(selected_symbols)
+                    new_state = add_fn(symbol=None, member_state_symbols=selected_symbols)
+                    # self.sa.compile_lookup_mappings()
+                    try:
+                        m2 = match_fn(selected_symbols)
+                    except KeyError:
+                        raise
+                    else:
+                        self.assertIs(m2, new_state)
+                        self.assertIn(new_state, state_collection, "random seed: {}".format(self.random_seed))
+                    finally:
+                        state_collection.remove(new_state)
+                        self.sa.compile_lookup_mappings()
+                else:
+                    pre_existing_symbol_combinations.append(selected_symbols)
+
+class DnaStateAlphabetTest(
+        StateAlphabetTester,
+        dendropytest.ExtendedTestCase):
+
+    def setUp(self):
+        self.random_seed = random.randint(0, sys.maxsize)
+        self.rng = random.Random(self.random_seed)
+
+        self.expected_fundamental_state_symbols = ["A", "C", "G", "T", "-"]
+        self.ambiguous_symbol_mappings = collections.OrderedDict()
+        self.ambiguous_symbol_mappings["?"] = "ACGT-"
+        self.ambiguous_symbol_mappings["N"] = "ACGT"
+        self.ambiguous_symbol_mappings["R"] = "AG"
+        self.ambiguous_symbol_mappings["Y"] = "CT"
+        self.ambiguous_symbol_mappings["M"] = "AC"
+        self.ambiguous_symbol_mappings["W"] = "AT"
+        self.ambiguous_symbol_mappings["S"] = "CG"
+        self.ambiguous_symbol_mappings["K"] = "GT"
+        self.ambiguous_symbol_mappings["V"] = "ACG"
+        self.ambiguous_symbol_mappings["H"] = "ACT"
+        self.ambiguous_symbol_mappings["D"] = "AGT"
+        self.ambiguous_symbol_mappings["B"] = "CGT"
+
+        self.polymorphic_symbol_mappings = collections.OrderedDict()
+
+        # note reverse polarity here: from referenced to referencing
+        self.additional_synonyms_map = collections.OrderedDict()
+        self.additional_synonyms_map["N"] = "X"
+
+        self.expected_polymorphic_state_symbols = list(self.polymorphic_symbol_mappings.keys())
+        self.expected_ambiguous_state_symbols = list(self.ambiguous_symbol_mappings.keys())
+
+        self.sa = dendropy.DNA_STATE_ALPHABET
+        self.num_total_states = len(self.expected_fundamental_state_symbols) + len(self.ambiguous_symbol_mappings) + len(self.polymorphic_symbol_mappings)
+
+class RnaStateAlphabetTest(
+        StateAlphabetTester,
+        dendropytest.ExtendedTestCase):
+
+    def setUp(self):
+        self.random_seed = random.randint(0, sys.maxsize)
+        self.rng = random.Random(self.random_seed)
+
+        self.expected_fundamental_state_symbols = ["A", "C", "G", "U", "-"]
+        self.ambiguous_symbol_mappings = collections.OrderedDict()
+        self.ambiguous_symbol_mappings["?"] = "ACGU-"
+        self.ambiguous_symbol_mappings["N"] = "ACGU"
+        self.ambiguous_symbol_mappings["R"] = "AG"
+        self.ambiguous_symbol_mappings["Y"] = "CU"
+        self.ambiguous_symbol_mappings["M"] = "AC"
+        self.ambiguous_symbol_mappings["W"] = "AU"
+        self.ambiguous_symbol_mappings["S"] = "CG"
+        self.ambiguous_symbol_mappings["K"] = "GU"
+        self.ambiguous_symbol_mappings["V"] = "ACG"
+        self.ambiguous_symbol_mappings["H"] = "ACU"
+        self.ambiguous_symbol_mappings["D"] = "AGU"
+        self.ambiguous_symbol_mappings["B"] = "CGU"
+
+        self.polymorphic_symbol_mappings = collections.OrderedDict()
+
+        # note reverse polarity here: from referenced to referencing
+        self.additional_synonyms_map = collections.OrderedDict()
+        self.additional_synonyms_map["N"] = "X"
+
+        self.expected_polymorphic_state_symbols = list(self.polymorphic_symbol_mappings.keys())
+        self.expected_ambiguous_state_symbols = list(self.ambiguous_symbol_mappings.keys())
+
+        self.sa = dendropy.RNA_STATE_ALPHABET
+        self.num_total_states = len(self.expected_fundamental_state_symbols) + len(self.ambiguous_symbol_mappings) + len(self.polymorphic_symbol_mappings)
+
+class NucleotideStateAlphabetTest(
+        StateAlphabetTester,
+        dendropytest.ExtendedTestCase):
+
+    def setUp(self):
+        self.random_seed = random.randint(0, sys.maxsize)
+        self.rng = random.Random(self.random_seed)
+
+        self.expected_fundamental_state_symbols = ["A", "C", "G", "T", "U", "-"]
+        self.ambiguous_symbol_mappings = collections.OrderedDict()
+        self.ambiguous_symbol_mappings["?"] = "ACGTU-"
+        self.ambiguous_symbol_mappings["N"] = "ACGTU"
+        self.ambiguous_symbol_mappings["R"] = "AG"
+        self.ambiguous_symbol_mappings["Y"] = "CTU"
+        self.ambiguous_symbol_mappings["M"] = "AC"
+        self.ambiguous_symbol_mappings["W"] = "ATU"
+        self.ambiguous_symbol_mappings["S"] = "CG"
+        self.ambiguous_symbol_mappings["K"] = "GTU"
+        self.ambiguous_symbol_mappings["V"] = "ACG"
+        self.ambiguous_symbol_mappings["H"] = "ACTU"
+        self.ambiguous_symbol_mappings["D"] = "AGTU"
+        self.ambiguous_symbol_mappings["B"] = "CGTU"
+
+        self.polymorphic_symbol_mappings = collections.OrderedDict()
+
+        # note reverse polarity here: from referenced to referencing
+        self.additional_synonyms_map = collections.OrderedDict()
+        self.additional_synonyms_map["N"] = "X"
+
+        self.expected_polymorphic_state_symbols = list(self.polymorphic_symbol_mappings.keys())
+        self.expected_ambiguous_state_symbols = list(self.ambiguous_symbol_mappings.keys())
+
+        self.sa = dendropy.NUCLEOTIDE_STATE_ALPHABET
+        self.num_total_states = len(self.expected_fundamental_state_symbols) + len(self.ambiguous_symbol_mappings) + len(self.polymorphic_symbol_mappings)
+
+class ProteinStateAlphabetTest(
+        StateAlphabetTester,
+        dendropytest.ExtendedTestCase):
+
+    def setUp(self):
+        self.random_seed = random.randint(0, sys.maxsize)
+        self.rng = random.Random(self.random_seed)
+
+        self.expected_fundamental_state_symbols = [
+                "A", "C", "D", "E", "F", "G", "H", "I",
+                "K", "L", "M", "N", "P", "Q", "R", "S",
+                "T",  "V", "W", "Y", "*", "-",
+                ]
+        self.ambiguous_symbol_mappings = collections.OrderedDict()
+        self.ambiguous_symbol_mappings["?"] = "ACDEFGHIKLMNPQRSTVWY*-"
+        self.ambiguous_symbol_mappings["B"] = "DN"
+        self.ambiguous_symbol_mappings["Z"] = "EQ"
+        self.ambiguous_symbol_mappings["X"] = "ACDEFGHIKLMNPQRSTVWY*"
+
+        self.polymorphic_symbol_mappings = collections.OrderedDict()
+
+        # note reverse polarity here: from referenced to referencing
+        self.additional_synonyms_map = collections.OrderedDict()
+        # self.additional_synonyms_map["N"] = "X"
+
+        self.expected_polymorphic_state_symbols = list(self.polymorphic_symbol_mappings.keys())
+        self.expected_ambiguous_state_symbols = list(self.ambiguous_symbol_mappings.keys())
+
+        self.sa = dendropy.PROTEIN_STATE_ALPHABET
+        self.num_total_states = len(self.expected_fundamental_state_symbols) + len(self.ambiguous_symbol_mappings) + len(self.polymorphic_symbol_mappings)
+
+class BinaryStateAlphabetTest(
+        StateAlphabetTester,
+        dendropytest.ExtendedTestCase):
+
+    def setUp(self):
+        self.random_seed = random.randint(0, sys.maxsize)
+        self.rng = random.Random(self.random_seed)
+
+        self.expected_fundamental_state_symbols = ["1", "0"]
+        self.ambiguous_symbol_mappings = collections.OrderedDict()
+
+        self.polymorphic_symbol_mappings = collections.OrderedDict()
+
+        # note reverse polarity here: from referenced to referencing
+        self.additional_synonyms_map = collections.OrderedDict()
+        # self.additional_synonyms_map["N"] = "X"
+
+        self.expected_polymorphic_state_symbols = list(self.polymorphic_symbol_mappings.keys())
+        self.expected_ambiguous_state_symbols = list(self.ambiguous_symbol_mappings.keys())
+
+        self.sa = dendropy.BINARY_STATE_ALPHABET
+        self.num_total_states = len(self.expected_fundamental_state_symbols) + len(self.ambiguous_symbol_mappings) + len(self.polymorphic_symbol_mappings)
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/dendropy/test/test_datamodel_taxon.py b/dendropy/test/test_datamodel_taxon.py
new file mode 100644
index 0000000..c59fb84
--- /dev/null
+++ b/dendropy/test/test_datamodel_taxon.py
@@ -0,0 +1,897 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Tests creation, reading, update, deletion of Taxon and TaxonNamespace objects.
+"""
+
+import collections
+import unittest
+import copy
+from dendropy import Taxon, TaxonNamespace
+from dendropy.test.support import compare_and_validate
+
+class TaxonIdentity(compare_and_validate.Comparator, unittest.TestCase):
+
+    def setUp(self):
+        self.t1 = Taxon("a")
+        self.t2 = Taxon("a")
+
+    def test_equal(self):
+        # two distinct |Taxon| objects are never equal, even if all
+        # member values are the same.
+        self.assertNotEqual(self.t1, self.t2)
+
+    def test_hash_dict_membership(self):
+        k = {}
+        k[self.t1] = 1
+        k[self.t2] = 2
+        self.assertEqual(len(k), 2)
+        self.assertEqual(k[self.t1], 1)
+        self.assertEqual(k[self.t2], 2)
+        self.assertIn(self.t1, k)
+        self.assertIn(self.t2, k)
+        del k[self.t1]
+        self.assertNotIn(self.t1, k)
+        self.assertIn(self.t2, k)
+        self.assertEqual(len(k), 1)
+        k1 = {self.t1: 1}
+        k2 = {self.t2: 1}
+        self.assertIn(self.t1, k1)
+        self.assertIn(self.t2, k2)
+        self.assertNotIn(self.t2, k1)
+        self.assertNotIn(self.t1, k2)
+
+    def test_hash_set_membership(self):
+        k = set()
+        k.add(self.t1)
+        k.add(self.t2)
+        self.assertEqual(len(k), 2)
+        self.assertIn(self.t1, k)
+        self.assertIn(self.t2, k)
+        k.discard(self.t1)
+        self.assertNotIn(self.t1, k)
+        self.assertIn(self.t2, k)
+        self.assertEqual(len(k), 1)
+        k1 = {self.t1: 1}
+        k2 = {self.t2: 1}
+        self.assertIn(self.t1, k1)
+        self.assertIn(self.t2, k2)
+        self.assertNotIn(self.t2, k1)
+        self.assertNotIn(self.t1, k2)
+
+# note that compare_and_validate.Comparator must be listed first,
+# otherwise setUp will not be called
+class TaxonCloning(compare_and_validate.Comparator, unittest.TestCase):
+
+    def test_construct_from_another(self):
+        t1 = Taxon("a")
+        for t2 in (Taxon(t1), copy.deepcopy(t1), t1.clone(2)):
+            self.assertIsNot(t1, t2)
+            self.assertNotEqual(t1, t2)
+            self.assertEqual(t1.label, t2.label)
+
+    def test_construct_from_another_with_simple_annotations(self):
+        t1 = Taxon("a")
+        t1.annotations.add_new("a", 0)
+        t1.annotations.add_new("b", 1)
+        t1.annotations.add_new("c", 3)
+        for t2 in (Taxon(t1), copy.deepcopy(t1), t1.clone(2)):
+            self.assertIsNot(t1, t2)
+            self.assertNotEqual(t1, t2)
+            self.assertEqual(t1.label, t2.label)
+            self.assertTrue(hasattr(t1, "annotations"))
+            self.assertTrue(hasattr(t2, "annotations"))
+            self.assertEqual(len(t1.annotations), len(t2.annotations))
+            self.compare_distinct_annotables(t1, t2)
+
+    def test_construct_from_another_with_complex_annotations(self):
+        t1 = Taxon("a")
+        t1.annotations.add_new("a", 0)
+        b = t1.annotations.add_new("b", (t1, "label"), is_attribute=True)
+        b.annotations.add_new("c", 3)
+        for t2 in (Taxon(t1), copy.deepcopy(t1), t1.clone(2)):
+            self.assertIsNot(t1, t2)
+            self.assertNotEqual(t1, t2)
+            self.assertEqual(t1.label, t2.label)
+            self.assertTrue(hasattr(t1, "annotations"))
+            self.assertTrue(hasattr(t2, "annotations"))
+            self.assertEqual(len(t1.annotations), len(t2.annotations))
+            self.compare_distinct_annotables(t1, t2)
+            t1.label = "x"
+            self.assertEqual(t1.annotations[1].value, "x")
+            self.assertEqual(t2.annotations[1].value, "a")
+            t2.label = "z"
+            self.assertEqual(t1.annotations[1].value, "x")
+            self.assertEqual(t2.annotations[1].value, "z")
+            t1.label = "a"
+
+    def test_simple_copy(self):
+        t1 = Taxon("a")
+        with self.assertRaises(TypeError):
+            copy.copy(t1)
+        with self.assertRaises(TypeError):
+            t1.clone(0)
+
+    def test_taxon_namespace_scoped_copy(self):
+        t1 = Taxon("a")
+        for t2 in (t1.clone(1), t1.taxon_namespace_scoped_copy()):
+            self.assertIs(t2, t1)
+
+class TaxonNamespaceTaxonManagement(unittest.TestCase):
+
+    def setUp(self):
+        self.str_labels = ["a", "a", "b", "c", "d", "e", "_", "_", "_", "z", "z", "z"]
+        self.taxa = [
+                Taxon("t1"), Taxon("t2"), Taxon("t3"),
+                ]
+        self.taxa_labels = [t.label for t in self.taxa]
+
+    def validate_taxon_concepts(self, tns, labels, respect_order=False):
+        expected_labels = labels
+        discovered_labels = []
+        for t in tns._taxa:
+            discovered_labels.append(t.label)
+        self.assertEqual(len(discovered_labels), len(expected_labels))
+        if respect_order:
+            for x1, x2 in zip(discovered_labels, expected_labels):
+                self.assertEqual(x1, x2)
+        else:
+            c1 = collections.Counter(discovered_labels)
+            c2 = collections.Counter(expected_labels)
+            self.assertEqual(c1, c2)
+
+    ### initialization ###
+
+    def test_initialize_from_str_list(self):
+        tns = TaxonNamespace(self.str_labels)
+        self.validate_taxon_concepts(tns, self.str_labels)
+
+    def test_initialize_from_taxon_list(self):
+        tns = TaxonNamespace(self.taxa)
+        self.validate_taxon_concepts(tns, self.taxa_labels)
+        for t in self.taxa:
+            self.assertIn(t, tns._taxa)
+
+    def test_initialize_other_taxon_namespace(self):
+        tns1 = TaxonNamespace(self.taxa)
+        tns2 = TaxonNamespace(tns1)
+        self.assertIsNot(tns1, tns2)
+        self.validate_taxon_concepts(tns1, self.taxa_labels)
+        self.validate_taxon_concepts(tns2, self.taxa_labels)
+        for t in self.taxa:
+            self.assertIn(t, tns1._taxa)
+            self.assertIn(t, tns2._taxa)
+        for t1, t2 in zip(tns1, tns2):
+            self.assertIs(t1, t2)
+        # self.assertEqual(tns1, tns2)
+
+    ### adding ###
+
+    def test_basic_adding(self):
+        tns = TaxonNamespace()
+        self.assertEqual(len(tns), 0)
+        for idx, label in enumerate(self.str_labels):
+            tns.add_taxon(Taxon(label=label))
+            self.assertEqual(len(tns), idx+1)
+        self.validate_taxon_concepts(tns, self.str_labels)
+
+    def test_basic_adding_to_immutable(self):
+        tns = TaxonNamespace()
+        self.assertEqual(len(tns), 0)
+        tns.is_mutable = False
+        for idx, label in enumerate(self.str_labels):
+            with self.assertRaises(TypeError):
+                tns.add_taxon(Taxon(label=label))
+            self.assertEqual(len(tns), 0)
+
+    def test_add_taxon(self):
+        tns = TaxonNamespace()
+        for t in self.taxa:
+            tns.add_taxon(t)
+        self.validate_taxon_concepts(tns, self.taxa_labels)
+        for t in self.taxa:
+            self.assertIn(t, tns._taxa)
+
+    def test_add_taxa(self):
+        tns = TaxonNamespace()
+        tns.add_taxa(self.taxa)
+        self.validate_taxon_concepts(tns, self.taxa_labels)
+        for t in self.taxa:
+            self.assertIn(t, tns._taxa)
+
+    def test_add_taxon_duplicate(self):
+        tns = TaxonNamespace(self.taxa)
+        self.validate_taxon_concepts(tns, self.taxa_labels)
+        tns.add_taxon(self.taxa[0])
+        self.assertEqual(len(tns), len(self.taxa))
+        for t1, t2 in zip(tns, self.taxa):
+            self.assertIs(t1, t2)
+
+    def test_add_taxa_duplicate(self):
+        tns = TaxonNamespace(self.taxa)
+        self.validate_taxon_concepts(tns, self.taxa_labels)
+        tns.add_taxa(self.taxa)
+        self.assertEqual(len(tns), len(self.taxa))
+        for t1, t2 in zip(tns, self.taxa):
+            self.assertIs(t1, t2)
+
+    def test_new_taxon(self):
+        tns = TaxonNamespace()
+        for idx, label in enumerate(self.str_labels):
+            t = tns.new_taxon(label)
+            self.assertTrue(isinstance(t, Taxon))
+            self.assertEqual(t.label, label)
+            self.assertEqual(len(tns), idx+1)
+        self.validate_taxon_concepts(tns, self.str_labels)
+
+    def test_new_taxa(self):
+        tns = TaxonNamespace()
+        tns.new_taxa(self.str_labels)
+        self.validate_taxon_concepts(tns, self.str_labels)
+
+    def test_new_taxon_to_immutable(self):
+        tns = TaxonNamespace()
+        tns.is_mutable = False
+        for idx, label in enumerate(self.str_labels):
+            with self.assertRaises(TypeError):
+                t = tns.new_taxon(label)
+            self.assertEqual(len(tns), 0)
+
+    def test_new_taxa_to_immutable(self):
+        tns = TaxonNamespace()
+        tns.is_mutable = False
+        with self.assertRaises(TypeError):
+            tns.new_taxa(self.str_labels)
+        self.assertEqual(len(tns), 0)
+
+    ### access ###
+
+    def test_len(self):
+        tns = TaxonNamespace(self.str_labels)
+        self.assertEqual(len(tns), len(self.str_labels))
+
+    def test_getitem_by_index(self):
+        tns = TaxonNamespace(self.str_labels)
+        for index, label in enumerate(self.str_labels):
+            t = tns[index]
+            self.assertEqual(t.label, label)
+
+    def test_getitem_by_index_error(self):
+        tns = TaxonNamespace(self.str_labels)
+        with self.assertRaises(IndexError):
+            t = tns[len(self.str_labels)+1]
+
+    def test_getitem_slices(self):
+        tns = TaxonNamespace(self.str_labels)
+        slices = [
+                (-1,1,-1),
+                (2,4),
+                (1,),
+                (None,1)
+                ]
+        for s in slices:
+            labels = self.str_labels[slice(*s)]
+            taxa = tns[slice(*s)]
+            taxa_labels = [t.label for t in taxa]
+            self.assertEqual(taxa_labels, labels)
+
+    def test_getitem_by_label_error(self):
+        tns = TaxonNamespace(self.str_labels)
+        with self.assertRaises(ValueError):
+            tns[self.str_labels[0]]
+
+#     def test_getitem_by_label_error(self):
+#         tns = TaxonNamespace(self.str_labels)
+#         check = ["u", "x", "y",]
+#         for label in check:
+#             assert label not in self.str_labels
+#             with self.assertRaises(LookupError):
+#                 t = tns[label]
+
+    def test_contains_taxa(self):
+        tns = TaxonNamespace(self.taxa)
+        for taxon in self.taxa:
+            self.assertIn(taxon, tns)
+
+    def test_no_contains_taxa(self):
+        tns = TaxonNamespace(self.taxa)
+        taxa2 = [Taxon(label=t.label) for t in self.taxa]
+        for taxon in taxa2:
+            self.assertNotIn(taxon, tns)
+
+    def test_has_label(self):
+        tns = TaxonNamespace(self.str_labels)
+        for label in self.str_labels:
+            self.assertTrue(tns.has_taxon_label(label))
+
+    def test_no_has_label(self):
+        tns = TaxonNamespace(self.str_labels)
+        check = ["u", "x", "y",]
+        for label in check:
+            assert label not in self.str_labels
+            self.assertFalse(tns.has_taxon_label(label))
+
+    def test_has_label_case_sensitivity(self):
+        tns = TaxonNamespace(self.str_labels)
+        labels_upper = [label.upper() for label in self.str_labels if label.upper() != label]
+        assert labels_upper
+        for label in labels_upper:
+            tns.is_case_sensitive = True
+            self.assertFalse(tns.has_taxon_label(label))
+            tns.is_case_sensitive = False
+            self.assertTrue(tns.has_taxon_label(label))
+
+    def test_has_labels(self):
+        tns = TaxonNamespace(self.str_labels)
+        self.assertTrue(tns.has_taxa_labels(self.str_labels))
+
+    def test_no_has_labels(self):
+        tns = TaxonNamespace(self.str_labels)
+        check = ["u", "x", "y",]
+        for label in check:
+            assert label not in self.str_labels
+        self.assertFalse(tns.has_taxa_labels(check))
+        self.assertFalse(tns.has_taxa_labels(check + self.str_labels))
+
+    def test_has_labels_case_sensitivity(self):
+        tns = TaxonNamespace(self.str_labels)
+        labels_upper = [label.upper() for label in self.str_labels if label.upper() != label]
+        assert labels_upper
+        tns.is_case_sensitive = True
+        self.assertFalse(tns.has_taxa_labels(labels_upper))
+        tns.is_case_sensitive = False
+        self.assertTrue(tns.has_taxa_labels(labels_upper))
+
+    def test_findall_multiple(self):
+        tns = TaxonNamespace(self.str_labels)
+        multilabels= ["_", "z"]
+        for label in multilabels:
+            tns.is_case_sensitive=True
+            taxa = tns.findall(label=label)
+            self.assertTrue(isinstance(taxa, collections.Iterable))
+            self.assertEqual(len(taxa), len([s for s in self.str_labels if s == label]))
+            for t in taxa:
+                self.assertEqual(t.label, label)
+
+    def test_findall_not_found(self):
+        tns = TaxonNamespace(self.str_labels)
+        tns.is_case_sensitive=True
+        taxa = tns.findall(label="x")
+        self.assertEqual(taxa, [])
+
+    def test_get_taxon_by_label(self):
+        tns = TaxonNamespace(self.str_labels)
+        for label in self.str_labels:
+            t = tns.get_taxon(label)
+            self.assertEqual(t.label, label)
+
+    def test_get_nonexistant_taxon_by_label(self):
+        tns = TaxonNamespace(self.str_labels)
+        check = ["u", "x", "y",]
+        for label in check:
+            assert label not in self.str_labels
+            t = tns.get_taxon(check)
+            self.assertIs(t, None)
+
+    def test_case_insensitive_get_taxon_by_label(self):
+        tns = TaxonNamespace(self.str_labels)
+        labels_upper = [label.upper() for label in self.str_labels if label.upper() != label]
+        assert labels_upper
+        # default: case insensitive
+        for label in labels_upper:
+            t = tns.get_taxon(label)
+            self.assertIsNot(t, None)
+            self.assertEqual(t.label.lower(), label.lower())
+        # test: case sensitive
+        tns.is_case_sensitive = True
+        for label in labels_upper:
+            t = tns.get_taxon(label)
+            self.assertIs(t, None)
+
+    def test_require_taxon_by_label_noadd(self):
+        tns = TaxonNamespace(self.str_labels)
+        for label in self.str_labels:
+            t = tns.get_taxon(label)
+            self.assertEqual(t.label, label)
+        self.assertEqual(len(tns), len(self.str_labels))
+        self.validate_taxon_concepts(tns, self.str_labels)
+
+    def test_require_taxon_by_label_add(self):
+        tns = TaxonNamespace(self.str_labels)
+        check = ["u", "x", "y",]
+        for label in check:
+            assert label not in self.str_labels
+            t = tns.require_taxon(label)
+            self.assertTrue(isinstance(t, Taxon))
+            self.assertEqual(t.label, label)
+        total = self.str_labels + check
+        self.assertEqual(len(tns), len(total))
+        self.validate_taxon_concepts(tns, total)
+
+    def test_case_insensitive_require_taxon_by_label1(self):
+        tns = TaxonNamespace(self.str_labels)
+        labels_upper = [label.upper() for label in self.str_labels if label.upper() != label]
+        assert labels_upper
+        for label in labels_upper:
+            tns.is_case_sensitive = False
+            t = tns.require_taxon(label)
+            self.assertEqual(t.label.lower(), label.lower())
+            self.assertEqual(len(tns), len(self.str_labels))
+        self.validate_taxon_concepts(tns, self.str_labels)
+
+    def test_case_insensitive_require_taxon_by_label2(self):
+        tns = TaxonNamespace(self.str_labels)
+        labels_upper = [label.upper() for label in self.str_labels if label.upper() != label]
+        labels_upper = list(set(labels_upper))
+        assert labels_upper
+        for label in labels_upper:
+            tns.is_case_sensitive = True
+            t = tns.require_taxon(label)
+            self.assertEqual(t.label, label)
+        self.validate_taxon_concepts(tns, self.str_labels + labels_upper)
+
+    def test_require_taxon_by_label_add_to_immutable(self):
+        tns = TaxonNamespace(self.str_labels)
+        tns.is_mutable = False
+        check = ["u", "x", "y",]
+        for label in check:
+            assert label not in self.str_labels
+            with self.assertRaises(TypeError):
+                t = tns.require_taxon(label)
+
+    def test_get_taxa_by_label(self):
+        tns = TaxonNamespace(self.str_labels)
+        # label_set = set(self.str_labels)
+        # taxa = tns.get_taxa(label_set)
+        taxa = tns.get_taxa(self.str_labels + ["u", "x", "y"])
+        self.assertEqual(len(taxa), len(self.str_labels))
+        tx = [t.label for t in taxa]
+        self.assertEqual(tx, self.str_labels)
+
+    def test_get_nonexistant_taxa_by_label(self):
+        tns = TaxonNamespace(self.str_labels)
+        check = ["u", "x", "y",]
+        taxa = tns.get_taxa(check)
+        self.assertEqual(len(taxa), 0)
+
+    def test_case_insensitive_get_taxa_by_label(self):
+        tns = TaxonNamespace(self.str_labels)
+        labels_upper = [label.upper() for label in self.str_labels if label.upper() != label]
+        assert labels_upper
+        # default: case-insensitive
+        t2 = tns.get_taxa(labels_upper)
+        self.assertEqual(len(t2), len(labels_upper))
+        for t, label in zip(t2, labels_upper):
+            self.assertEqual(t.label.lower(), label.lower())
+        # test: case sensitive
+        tns.is_case_sensitive = True
+        t1 = tns.get_taxa(labels_upper)
+        self.assertEqual(len(t1), 0)
+
+    ### iteration ###
+
+    def test_iter1(self):
+        tns = TaxonNamespace(self.str_labels)
+        for t1, label in zip(tns, self.str_labels):
+            self.assertEqual(t1.label, label)
+
+    def test_iter2(self):
+        tns = TaxonNamespace(self.str_labels)
+        for idx, t1 in enumerate(tns):
+            self.assertEqual(t1.label, self.str_labels[idx])
+
+    def test_reverse_iter(self):
+        tns = TaxonNamespace(self.str_labels)
+        r = self.str_labels[:]
+        r.reverse()
+        assert r != self.str_labels
+        for idx, t1 in enumerate(reversed(tns)):
+            self.assertEqual(t1.label, r[idx])
+
+    ### sorting ###
+
+    def test_sort(self):
+        r = self.str_labels[:]
+        r.sort()
+        r.reverse()
+        tns = TaxonNamespace(r)
+        tns.sort()
+        r2 = sorted(r)
+        assert r != r2
+        for idx, t1 in enumerate(tns):
+            self.assertEqual(t1.label, r2[idx])
+
+    def test_reverse(self):
+        r = self.str_labels[:]
+        r.sort()
+        tns = TaxonNamespace(r)
+        tns.reverse()
+        r2 = r[:]
+        r2.reverse()
+        assert r != r2
+        for idx, t1 in enumerate(tns):
+            self.assertEqual(t1.label, r2[idx])
+
+    def test_sorted(self):
+        r = self.str_labels[:]
+        r.sort()
+        r.reverse()
+        tns = TaxonNamespace(r)
+        r2 = sorted(r)
+        assert r != r2
+        for idx, t1 in enumerate(sorted(tns)):
+            self.assertEqual(t1.label, r2[idx])
+
+    def test_reversed(self):
+        r = self.str_labels[:]
+        r.sort()
+        tns = TaxonNamespace(r)
+        r2 = r[:]
+        r2.reverse()
+        assert r != r2
+        for idx, t1 in enumerate(reversed(tns)):
+            self.assertEqual(t1.label, r2[idx])
+
+    ### deletion ###
+
+    def test_delete_by_index(self):
+        for idx in range(len(self.taxa)):
+            tns = TaxonNamespace(self.taxa)
+            del tns[idx]
+            for idx2, taxon in enumerate(self.taxa):
+                if idx2 == idx:
+                    self.assertNotIn(taxon, tns)
+                else:
+                    self.assertIn(taxon, tns)
+
+    def test_remove_taxon(self):
+        taxa = [Taxon(s) for s in self.str_labels]
+        tns = TaxonNamespace(taxa)
+        expected = taxa[:]
+        for idx, taxon in enumerate(taxa):
+            tns.remove_taxon(taxon)
+            expected.remove(taxon)
+            self.assertEqual(len(tns), len(expected))
+            for idx2, taxon2 in enumerate(expected):
+                if taxon2 in expected:
+                    self.assertIn(taxon2, tns)
+                elif taxon2 not in expected:
+                    self.assertNotIn(taxon2, tns)
+
+    def test_remove_taxon_error(self):
+        tns = TaxonNamespace(self.str_labels)
+        with self.assertRaises(ValueError):
+            tns.remove_taxon(self.taxa[0])
+
+    def test_remove_taxon_label(self):
+        taxa = [Taxon(s) for s in self.str_labels]
+        tns = TaxonNamespace(taxa)
+        expected = taxa[:]
+        for idx, label in enumerate(set(self.str_labels)):
+            tns.remove_taxon_label(label)
+            for t in taxa:
+                if t.label == label and t in expected:
+                    expected.remove(t)
+            self.assertEqual(len(tns), len(expected))
+            for t1, t2 in zip(tns, expected):
+                self.assertIs(t1, t2)
+
+    def test_remove_taxon_label_error(self):
+        tns = TaxonNamespace(self.str_labels)
+        key = "zzz"
+        assert key not in self.str_labels
+        with self.assertRaises(LookupError):
+            tns.remove_taxon_label(key)
+
+    def test_remove_taxon_label_case_insensitive(self):
+        ucase_labels = [s.upper() for s in self.str_labels]
+        assert ucase_labels
+        assert ucase_labels != self.str_labels
+        taxa = [Taxon(s) for s in self.str_labels]
+        tns = TaxonNamespace(taxa)
+        expected = taxa[:]
+        for idx, label in enumerate(set(ucase_labels)):
+            if label != label.lower():
+                with self.assertRaises(LookupError):
+                    tns.is_case_sensitive = True
+                    tns.remove_taxon_label(label)
+            tns.is_case_sensitive = False
+            tns.remove_taxon_label(label)
+            for t in taxa:
+                if t.label.upper() == label.upper() and t in expected:
+                    expected.remove(t)
+            self.assertEqual(len(tns), len(expected))
+            for t1, t2 in zip(tns, expected):
+                self.assertIs(t1, t2)
+
+    def test_discard_taxon_label(self):
+        taxa = [Taxon(s) for s in self.str_labels]
+        tns = TaxonNamespace(taxa)
+        expected = taxa[:]
+        for idx, label in enumerate(set(self.str_labels)):
+            tns.discard_taxon_label(label)
+            for t in taxa:
+                if t.label == label and t in expected:
+                    expected.remove(t)
+            self.assertEqual(len(tns), len(expected))
+            for t1, t2 in zip(tns, expected):
+                self.assertIs(t1, t2)
+
+    def test_discard_taxon_label_error(self):
+        tns = TaxonNamespace(self.str_labels)
+        key = "zzz"
+        assert key not in self.str_labels
+        try:
+            tns.discard_taxon_label(key)
+        except LookupError:
+            self.fail()
+        else:
+            self.validate_taxon_concepts(tns, self.str_labels)
+
+    def test_discard_taxon_label_case_insensitive(self):
+        ucase_labels = [s.upper() for s in self.str_labels]
+        assert ucase_labels
+        assert ucase_labels != self.str_labels
+        taxa = [Taxon(s) for s in self.str_labels]
+        tns = TaxonNamespace(taxa)
+        expected = taxa[:]
+        # default: case-insensitive
+        for idx, label in enumerate(set(ucase_labels)):
+            tns.discard_taxon_label(label)
+            for t in taxa:
+                if t.label.upper() == label.upper() and t in expected:
+                    expected.remove(t)
+            self.assertEqual(len(tns), len(expected))
+            for t1, t2 in zip(tns, expected):
+                self.assertIs(t1, t2)
+
+    def test_discard_taxon_label_case_sensitive(self):
+        ucase_labels = [s.upper() for s in self.str_labels]
+        assert ucase_labels
+        assert ucase_labels != self.str_labels
+        taxa = [Taxon(s) for s in self.str_labels]
+        tns = TaxonNamespace(taxa)
+        expected = taxa[:]
+        # test: case sensitive
+        tns.is_case_sensitive = True
+        for idx, label in enumerate(set(ucase_labels)):
+            if label != label.lower():
+                x1 = len(tns)
+                try:
+                    tns.discard_taxon_label(label)
+                except LookupError:
+                    self.fail()
+                else:
+                    self.assertEqual(len(tns), x1)
+
+    def test_clear(self):
+        tns = TaxonNamespace(self.str_labels)
+        self.assertEqual(len(tns), len(self.str_labels))
+        tns.clear()
+        self.assertEqual(len(tns), 0)
+        x = []
+        for t in tns:
+            x.append(t)
+        self.assertEqual(len(x), 0)
+
+class TaxonNamespaceIdentity(unittest.TestCase):
+
+    def setUp(self):
+        self.str_labels = ["a", "a", "b", "c", "d", "e", "_", "_", "_", "z", "z", "z"]
+        self.taxa = [ Taxon(label) for label in self.str_labels ]
+        self.tns1 = TaxonNamespace(self.taxa)
+        self.tns2 = TaxonNamespace(self.taxa)
+
+    # def test_separate_but_equal(self):
+    #     self.assertIsNot(self.tns1, self.tns2)
+    #     self.assertEqual(self.tns1, self.tns2)
+
+    # def test_different_labels(self):
+    #     self.assertIsNot(self.tns1, self.tns2)
+    #     self.assertEqual(self.tns1, self.tns2)
+    #     self.tns1.label = "hello"
+    #     self.tns2.label = "goodbye"
+    #     self.assertNotEqual(self.tns1, self.tns2)
+    #     self.tns1.label = self.tns2.label
+    #     self.assertIsNot(self.tns1, self.tns2)
+    #     self.assertEqual(self.tns1, self.tns2)
+
+    # def test_same_annotations(self):
+    #     self.assertIsNot(self.tns1, self.tns2)
+    #     self.assertEqual(self.tns1, self.tns2)
+    #     self.tns1.annotations.add_new("hello", 0)
+    #     self.tns2.annotations.add_new("hello", 0)
+    #     # not equal because each |AnnotationSet| has a ``target``
+    #     # attribute that holds reference to the object being annotated. As
+    #     # these the target objects are necessarily different (even if they
+    #     # evaluate being equal), the |AnnotationSet| objects are not
+    #     # considered equal, and thus the target objects that have the
+    #     # AnnotationSets are not equal.
+    #     self.assertNotEqual(self.tns1, self.tns2)
+
+    # def test_different_annotations1(self):
+    #     self.assertIsNot(self.tns1, self.tns2)
+    #     self.assertEqual(self.tns1, self.tns2)
+    #     self.tns1.annotations.add_new("hello", 0)
+    #     self.assertNotEqual(self.tns1, self.tns2)
+
+    def test_hash_dict_membership(self):
+        k = {}
+        k[self.tns1] = 1
+        k[self.tns2] = 2
+        self.assertEqual(len(k), 2)
+        self.assertEqual(k[self.tns1], 1)
+        self.assertEqual(k[self.tns2], 2)
+        self.assertIn(self.tns1, k)
+        self.assertIn(self.tns2, k)
+        del k[self.tns1]
+        self.assertNotIn(self.tns1, k)
+        self.assertIn(self.tns2, k)
+        self.assertEqual(len(k), 1)
+        k1 = {self.tns1: 1}
+        k2 = {self.tns2: 1}
+        self.assertIn(self.tns1, k1)
+        self.assertIn(self.tns2, k2)
+        self.assertNotIn(self.tns2, k1)
+        self.assertNotIn(self.tns1, k2)
+
+    def test_hash_set_membership(self):
+        k = set()
+        k.add(self.tns1)
+        k.add(self.tns2)
+        self.assertEqual(len(k), 2)
+        self.assertIn(self.tns1, k)
+        self.assertIn(self.tns2, k)
+        k.discard(self.tns1)
+        self.assertNotIn(self.tns1, k)
+        self.assertIn(self.tns2, k)
+        self.assertEqual(len(k), 1)
+        k1 = {self.tns1: 1}
+        k2 = {self.tns2: 1}
+        self.assertIn(self.tns1, k1)
+        self.assertIn(self.tns2, k2)
+        self.assertNotIn(self.tns2, k1)
+        self.assertNotIn(self.tns1, k2)
+
+# note that compare_and_validate.Comparator must be listed first,
+# otherwise setUp will not be called
+class TaxonNamespaceCloning(compare_and_validate.Comparator, unittest.TestCase):
+
+    def setUp(self):
+        self.str_labels = ["a", "a", "b", "c", "d", "e", "_", "_", "_", "z", "z", "z"]
+        self.taxa = [ Taxon(label) for label in self.str_labels ]
+        self.tns1 = TaxonNamespace(self.taxa, label="T1")
+
+    def test_taxon_namespace_scoped_copy(self):
+        for tns2 in (self.tns1.clone(1),
+                self.tns1.taxon_namespace_scoped_copy()):
+            self.assertIs(tns2, self.tns1)
+
+    def test_construct_from_another(self):
+        tns2 = TaxonNamespace(self.tns1)
+        for tns2 in (TaxonNamespace(self.tns1),
+                self.tns1.clone(0),
+                copy.copy(self.tns1)):
+            self.assertIsNot(tns2, self.tns1)
+            self.assertEqual(tns2.label, self.tns1.label)
+            self.assertEqual(tns2._taxa, self.tns1._taxa)
+            for t1, t2 in zip(self.tns1, tns2):
+                self.assertIs(t1, t2)
+            self.compare_distinct_annotables(tns2, self.tns1)
+
+    def test_construct_from_another_different_label(self):
+        tns2 = TaxonNamespace(self.tns1, label="T2")
+        self.assertIsNot(tns2, self.tns1)
+        self.assertNotEqual(tns2.label, self.tns1.label)
+        self.assertEqual(self.tns1.label, "T1")
+        self.assertEqual(tns2.label, "T2")
+        self.assertEqual(tns2._taxa, self.tns1._taxa)
+        for t1, t2 in zip(self.tns1, tns2):
+            self.assertIs(t1, t2)
+        self.compare_distinct_annotables(tns2, self.tns1)
+
+    def test_construct_from_changed_label(self):
+        for tns2 in (TaxonNamespace(self.tns1),
+                self.tns1.clone(0),
+                copy.copy(self.tns1)):
+            tns2.label = "T2"
+            self.assertNotEqual(tns2.label, self.tns1.label)
+            self.assertEqual(self.tns1.label, "T1")
+            self.assertEqual(tns2.label, "T2")
+            self.assertEqual(tns2._taxa, self.tns1._taxa)
+            for t1, t2 in zip(self.tns1, tns2):
+                self.assertIs(t1, t2)
+            self.compare_distinct_annotables(tns2, self.tns1)
+
+    def test_construct_from_another_with_simple_annotations(self):
+        self.tns1.annotations.add_new("A", 1)
+        self.tns1.annotations.add_new("B", 2)
+        self.tns1.annotations.add_new("C", 3)
+        for tns2 in (TaxonNamespace(self.tns1),
+                self.tns1.clone(0),
+                copy.copy(self.tns1)):
+            self.assertIsNot(tns2, self.tns1)
+            self.assertEqual(tns2._taxa, self.tns1._taxa)
+            for t1, t2 in zip(tns2, self.tns1):
+                self.assertIs(t1, t2)
+            self.compare_distinct_annotables(tns2, self.tns1)
+
+    def test_construct_from_another_with_complex_annotations(self):
+        self.tns1.annotations.add_new("a", 0)
+        b = self.tns1.annotations.add_new("b", (self.tns1, "label"), is_attribute=True)
+        b.annotations.add_new("c", 3)
+        self.tns1.annotations.add_new("A", 1)
+        self.tns1.annotations.add_new("B", 2)
+        self.tns1.annotations.add_new("C", 3)
+        for tns2 in (TaxonNamespace(self.tns1), self.tns1.clone(0), copy.copy(self.tns1)):
+            self.assertIsNot(tns2, self.tns1)
+            self.assertEqual(tns2._taxa, self.tns1._taxa)
+            for t1, t2 in zip(tns2, self.tns1):
+                self.assertIs(t1, t2)
+            self.compare_distinct_annotables(tns2, self.tns1)
+
+    def test_deepcopy_from_another(self):
+        for tns2 in (copy.deepcopy(self.tns1),
+                self.tns1.clone(2)):
+            self.assertIsNot(tns2, self.tns1)
+            self.assertEqual(tns2.label, self.tns1.label)
+            self.assertEqual(len(tns2), len(self.tns1))
+            for t1, t2 in zip(self.tns1, tns2):
+                self.assertIsNot(t1, t2)
+                self.assertEqual(t1.label, t2.label)
+                self.compare_distinct_annotables(t1, t2)
+            self.compare_distinct_annotables(tns2, self.tns1)
+
+    def test_deepcopy_from_another_with_simple_annotations(self):
+        self.tns1.annotations.add_new("a", 0)
+        self.tns1.annotations.add_new("b", 1)
+        self.tns1.annotations.add_new("c", 3)
+        for tns2 in (copy.deepcopy(self.tns1),
+                self.tns1.clone(2)):
+            self.assertIsNot(tns2, self.tns1)
+            self.assertEqual(tns2.label, self.tns1.label)
+            self.assertEqual(len(tns2), len(self.tns1))
+            for t1, t2 in zip(self.tns1, tns2):
+                self.assertIsNot(t1, t2)
+                self.assertEqual(t1.label, t2.label)
+                self.compare_distinct_annotables(t1, t2)
+            self.compare_distinct_annotables(tns2, self.tns1)
+
+    def test_deepcopy_from_another_with_complex_annotations(self):
+        self.tns1.annotations.add_new("a", 0)
+        b = self.tns1.annotations.add_new("b", (self.tns1, "label"), is_attribute=True)
+        b.annotations.add_new("c", 3)
+        for tns2 in (copy.deepcopy(self.tns1),
+                self.tns1.clone(2)):
+            self.assertIsNot(tns2, self.tns1)
+            self.assertEqual(tns2.label, self.tns1.label)
+            self.assertEqual(len(tns2), len(self.tns1))
+            for t1, t2 in zip(self.tns1, tns2):
+                self.assertIsNot(t1, t2)
+                self.assertEqual(t1.label, t2.label)
+                self.compare_distinct_annotables(t1, t2)
+            self.compare_distinct_annotables(tns2, self.tns1)
+            self.tns1.label = "x"
+            tns2.label = "y"
+            self.assertEqual(self.tns1.annotations[1].value, "x")
+            self.assertEqual(tns2.annotations[1].value, "y")
+            self.tns1.label = "T1"
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/dendropy/test/test_datamodel_tree_construction_copying_and_identity.py b/dendropy/test/test_datamodel_tree_construction_copying_and_identity.py
new file mode 100644
index 0000000..a968c09
--- /dev/null
+++ b/dendropy/test/test_datamodel_tree_construction_copying_and_identity.py
@@ -0,0 +1,214 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Tests basic Tree copying etc.
+"""
+
+import unittest
+import dendropy
+import copy
+from dendropy.test.support import curated_test_tree
+from dendropy.test.support import compare_and_validate
+
+class TestTreeIdentity(unittest.TestCase):
+
+    def setUp(self):
+        self.t1 = dendropy.Tree()
+        self.t2 = dendropy.Tree()
+
+    def test_equal(self):
+        self.assertNotEqual(self.t1, self.t2)
+
+    def test_hash_dict_membership(self):
+        k = {}
+        k[self.t1] = 1
+        k[self.t2] = 2
+        self.assertEqual(len(k), 2)
+        self.assertEqual(k[self.t1], 1)
+        self.assertEqual(k[self.t2], 2)
+        self.assertIn(self.t1, k)
+        self.assertIn(self.t2, k)
+        del k[self.t1]
+        self.assertNotIn(self.t1, k)
+        self.assertIn(self.t2, k)
+        self.assertEqual(len(k), 1)
+        k1 = {self.t1: 1}
+        k2 = {self.t2: 1}
+        self.assertIn(self.t1, k1)
+        self.assertIn(self.t2, k2)
+        self.assertNotIn(self.t2, k1)
+        self.assertNotIn(self.t1, k2)
+
+    def test_hash_set_membership(self):
+        k = set()
+        k.add(self.t1)
+        k.add(self.t2)
+        self.assertEqual(len(k), 2)
+        self.assertIn(self.t1, k)
+        self.assertIn(self.t2, k)
+        k.discard(self.t1)
+        self.assertNotIn(self.t1, k)
+        self.assertIn(self.t2, k)
+        self.assertEqual(len(k), 1)
+        k1 = {self.t1: 1}
+        k2 = {self.t2: 1}
+        self.assertIn(self.t1, k1)
+        self.assertIn(self.t2, k2)
+        self.assertNotIn(self.t2, k1)
+        self.assertNotIn(self.t1, k2)
+
+class TestTreeCopying(
+        curated_test_tree.CuratedTestTree,
+        compare_and_validate.Comparator,
+        unittest.TestCase):
+
+    def add_annotations(self, tree):
+        for idx, nd in enumerate(tree):
+            if idx % 2 == 0:
+                nd.edge.label = "E{}".format(idx)
+                nd.edge.length = idx
+            an1 = nd.annotations.add_new("a{}".format(idx),
+                    "{}{}{}".format(nd.label, nd.taxon, idx))
+            an2 = nd.annotations.add_bound_attribute("label")
+            an3 = an1.annotations.add_bound_attribute("name")
+            ae1 = nd.edge.annotations.add_new("a{}".format(idx),
+                    "{}{}".format(nd.edge.label, idx))
+            ae2 = nd.edge.annotations.add_bound_attribute("label")
+            ae3 = ae1.annotations.add_bound_attribute("name")
+        tree.annotations.add_new("a", 0)
+        tree.label = "hello"
+        b = tree.annotations.add_bound_attribute("label")
+        b.annotations.add_new("c", 3)
+        for idx, taxon in enumerate(tree.taxon_namespace):
+            a = taxon.annotations.add_new("!color", str(idx))
+            a.annotations.add_new("setbytest", "a")
+
+    def test_copy(self):
+        tree1, anodes1, lnodes1, inodes1 = self.get_tree(suppress_internal_node_taxa=False,
+                suppress_leaf_node_taxa=False)
+        self.add_annotations(tree1)
+        for tree2 in (
+                # tree1.clone(0),
+                # copy.copy(tree1),
+                # tree1.clone(1),
+                # tree1.taxon_namespace_scoped_copy(),
+                dendropy.Tree(tree1),
+                ):
+            self.compare_distinct_trees(tree1, tree2,
+                    taxon_namespace_scoped=True,
+                    compare_tree_annotations=True,
+                    compare_taxon_annotations=False)
+            # Redundant, given the above
+            # But for sanity's sake ...
+            nodes1 = [nd for nd in tree1]
+            nodes2 = [nd for nd in tree2]
+            self.assertEqual(len(nodes1), len(nodes2))
+            for nd1, nd2 in zip(nodes1, nodes2):
+                self.assertIsNot(nd1, nd2)
+                self.assertEqual(nd1.label, nd2.label)
+                self.assertIs(nd1.taxon, nd2.taxon)
+
+    def test_deepcopy_including_namespace(self):
+        tree1, anodes1, lnodes1, inodes1 = self.get_tree(suppress_internal_node_taxa=False,
+                suppress_leaf_node_taxa=False)
+        self.add_annotations(tree1)
+        for idx, tree2 in enumerate((
+                tree1.clone(2),
+                copy.deepcopy(tree1),
+                )):
+            self.compare_distinct_trees(tree1, tree2,
+                    taxon_namespace_scoped=False,
+                    compare_tree_annotations=True,
+                    compare_taxon_annotations=False)
+            # Redundant, given the above
+            # But for sanity's sake ...
+            nodes1 = [nd for nd in tree1]
+            nodes2 = [nd for nd in tree2]
+            self.assertEqual(len(nodes1), len(nodes2))
+            for nd1, nd2 in zip(nodes1, nodes2):
+                self.assertIsNot(nd1, nd2)
+                self.assertEqual(nd1.label, nd2.label)
+                self.assertIsNot(nd1.taxon, nd2.taxon)
+                self.assertEqual(nd1.taxon.label, nd2.taxon.label)
+
+    def test_deepcopy_excluding_namespace(self):
+        tree1, anodes1, lnodes1, inodes1 = self.get_tree(suppress_internal_node_taxa=False,
+                suppress_leaf_node_taxa=False)
+        self.add_annotations(tree1)
+        tree2 = dendropy.Tree(tree1, taxon_namespace=dendropy.TaxonNamespace())
+        self.compare_distinct_trees(tree1, tree2,
+                taxon_namespace_scoped=False,
+                compare_tree_annotations=True,
+                compare_taxon_annotations=False)
+
+class TestSpecialTreeConstruction(
+        curated_test_tree.CuratedTestTree,
+        unittest.TestCase):
+
+    def test_construction_from_another_tree_different_label(self):
+        tree1, anodes1, lnodes1, inodes1 = self.get_tree(
+                suppress_internal_node_taxa=False,
+                suppress_leaf_node_taxa=False)
+        tree1.label = "tree1"
+        self.assertEqual(tree1.label, "tree1")
+        tree2 = dendropy.Tree(tree1, label="tree2")
+        self.assertEqual(tree2.label, "tree2")
+        self.assertNotEqual(tree1.label, "tree2")
+        self.assertNotEqual(tree1.label, tree2.label)
+
+    def test_construction_from_given_seed_node(self):
+        tree1, anodes1, lnodes1, inodes1 = self.get_tree(
+                suppress_internal_node_taxa=False,
+                suppress_leaf_node_taxa=False)
+        tree2 = dendropy.Tree(seed_node=tree1.seed_node)
+        self.assertIs(tree2.seed_node, tree1.seed_node)
+
+    def test_construction_from_given_seed_node(self):
+        tree1, anodes1, lnodes1, inodes1 = self.get_tree(
+                suppress_internal_node_taxa=False,
+                suppress_leaf_node_taxa=False)
+        tree2 = dendropy.Tree(seed_node=tree1.seed_node)
+        self.assertIs(tree2.seed_node, tree1.seed_node)
+        self.assertIsNot(tree1.taxon_namespace, tree2.taxon_namespace)
+        self.assertEqual(len(tree1.taxon_namespace), len(tree2.taxon_namespace))
+        for taxon1 in tree1.taxon_namespace:
+            self.assertIn(taxon1, tree2.taxon_namespace)
+        for taxon2 in tree2.taxon_namespace:
+            self.assertIn(taxon2, tree1.taxon_namespace)
+        for nd in tree2:
+            self.assertIn(nd.taxon, tree2.taxon_namespace)
+
+    def test_cloning_construction_with_taxon_namespace(self):
+        tree1, anodes1, lnodes1, inodes1 = self.get_tree(
+                suppress_internal_node_taxa=False,
+                suppress_leaf_node_taxa=False)
+        tns = dendropy.TaxonNamespace()
+        tree2 = dendropy.Tree(tree1, taxon_namespace=tns)
+        self.assertIs(tree2.taxon_namespace, tns)
+        self.assertIsNot(tree1.taxon_namespace, tree2.taxon_namespace)
+        self.assertEqual(len(tree1.taxon_namespace), len(tree2.taxon_namespace))
+        for nd in tree2:
+            self.assertIn(nd.taxon, tree2.taxon_namespace)
+            self.assertNotIn(nd.taxon, tree1.taxon_namespace)
+        for nd in tree1:
+            self.assertIn(nd.taxon, tree1.taxon_namespace)
+            self.assertNotIn(nd.taxon, tree2.taxon_namespace)
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/dendropy/test/test_datamodel_tree_edge_fundamentals.py b/dendropy/test/test_datamodel_tree_edge_fundamentals.py
new file mode 100644
index 0000000..1c618df
--- /dev/null
+++ b/dendropy/test/test_datamodel_tree_edge_fundamentals.py
@@ -0,0 +1,169 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Tests basic Edge operations.
+"""
+
+import copy
+import unittest
+from dendropy.test.support import compare_and_validate
+import dendropy
+
+class EdgeIdentity(unittest.TestCase):
+
+    def setUp(self):
+        self.e1 = dendropy.Edge(label="a")
+        self.e2 = dendropy.Edge(label="a")
+
+    def test_equal(self):
+        # two distinct |Edge| objects are never equal, even if all
+        # member values are the same.
+        self.assertNotEqual(self.e1, self.e2)
+
+    def test_hash_dict_membership(self):
+        k = {}
+        k[self.e1] = 1
+        k[self.e2] = 2
+        self.assertEqual(len(k), 2)
+        self.assertEqual(k[self.e1], 1)
+        self.assertEqual(k[self.e2], 2)
+        self.assertIn(self.e1, k)
+        self.assertIn(self.e2, k)
+        del k[self.e1]
+        self.assertNotIn(self.e1, k)
+        self.assertIn(self.e2, k)
+        self.assertEqual(len(k), 1)
+        k1 = {self.e1: 1}
+        k2 = {self.e2: 1}
+        self.assertIn(self.e1, k1)
+        self.assertIn(self.e2, k2)
+        self.assertNotIn(self.e2, k1)
+        self.assertNotIn(self.e1, k2)
+
+    def test_hash_set_membership(self):
+        k = set()
+        k.add(self.e1)
+        k.add(self.e2)
+        self.assertEqual(len(k), 2)
+        self.assertIn(self.e1, k)
+        self.assertIn(self.e2, k)
+        k.discard(self.e1)
+        self.assertNotIn(self.e1, k)
+        self.assertIn(self.e2, k)
+        self.assertEqual(len(k), 1)
+        k1 = {self.e1: 1}
+        k2 = {self.e2: 1}
+        self.assertIn(self.e1, k1)
+        self.assertIn(self.e2, k2)
+        self.assertNotIn(self.e2, k1)
+        self.assertNotIn(self.e1, k2)
+
+class EdgeCloning(compare_and_validate.Comparator, unittest.TestCase):
+
+    def setUp(self):
+        self.taxa = [dendropy.Taxon(label=label) for label in ["a", "b", "c", "d"]]
+        self.n0 = dendropy.Node(label="0", taxon=self.taxa[0])
+        self.c1 = dendropy.Node(label="1", taxon=None)
+        self.c2 = dendropy.Node(label=None, taxon=self.taxa[1])
+        self.c3 = dendropy.Node(label=None, taxon=None)
+        self.c3 = dendropy.Node(label=None, taxon=self.taxa[2])
+        self.p1 = dendropy.Node(label="-1", taxon=self.taxa[3])
+        self.n0.parent_node = self.p1
+        self.n0.set_child_nodes([self.c1, self.c2])
+        self.c2.set_child_nodes([self.c3])
+        self.nodes = [self.n0, self.c1, self.c2, self.c3, self.p1]
+        for idx, nd in enumerate(self.nodes):
+            if idx % 2 == 0:
+                nd.edge.label = "E{}".format(idx)
+                nd.edge.length = idx
+            an1 = nd.annotations.add_new("a{}".format(idx),
+                    "{}{}{}".format(nd.label, nd.taxon, idx))
+            an2 = nd.annotations.add_bound_attribute("label")
+            an3 = an1.annotations.add_bound_attribute("name")
+            ae1 = nd.edge.annotations.add_new("a{}".format(idx),
+                    "{}{}".format(nd.edge.label, idx))
+            ae2 = nd.edge.annotations.add_bound_attribute("label")
+            ae3 = ae1.annotations.add_bound_attribute("name")
+        self.e0 = self.n0._edge
+
+    def test_unsupported_copy(self):
+        with self.assertRaises(TypeError):
+            self.e0.clone(0)
+        with self.assertRaises(TypeError):
+            copy.copy(self.e0)
+        with self.assertRaises(TypeError):
+            self.e0.clone(1)
+        with self.assertRaises(TypeError):
+            self.e0.taxon_namespace_scoped_copy()
+
+    def test_deepcopy(self):
+        for clone in (
+                self.e0.clone(2),
+                copy.deepcopy(self.e0),
+                ):
+            self.compare_distinct_nodes(
+                    clone._head_node, self.n0,
+                    taxon_namespace_scoped=False,
+                    compare_tree_annotations=True)
+
+class EdgeNodeManagement(unittest.TestCase):
+
+
+    def test_edge_tail_node_setting(self):
+        parent = dendropy.Node(label="parent")
+        assigned_ch = [dendropy.Node(label=c) for c in ["c1", "c2", "c3"]]
+        for ch in assigned_ch:
+            ch.edge.tail_node = parent
+        for ch in assigned_ch:
+            self.assertEqual(parent._child_nodes, assigned_ch)
+            for nd in parent.child_node_iter():
+                self.assertIs(nd.parent_node, parent)
+                self.assertIs(nd.edge.tail_node, parent)
+                self.assertIs(nd.edge.head_node, nd)
+
+    def test_edge_head_node_setting(self):
+
+        node1 = dendropy.Node()
+        parent_node1 = dendropy.Node()
+        node1.parent_node = parent_node1
+        self.assertIs(node1._parent_node, parent_node1)
+        edge1 = node1.edge
+        self.assertIs(edge1.head_node, node1)
+        self.assertIs(edge1.tail_node, parent_node1)
+
+        node2 = dendropy.Node()
+        parent_node2 = dendropy.Node()
+        node2.parent_node = parent_node2
+        edge2 = node2.edge
+        self.assertIs(edge2.head_node, node2)
+        self.assertIs(edge2.tail_node, parent_node2)
+
+        new_edge1 = dendropy.Edge()
+        new_edge1.head_node = node1
+        self.assertIs(node1.edge, new_edge1)
+        self.assertIs(new_edge1.head_node, node1)
+        self.assertIs(new_edge1.tail_node, parent_node1)
+
+        edge2.head_node = node1
+        self.assertIs(node1.edge, edge2)
+        self.assertIs(edge2.head_node, node1)
+        self.assertIs(edge2.tail_node, parent_node1)
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/dendropy/test/test_datamodel_tree_list.py b/dendropy/test/test_datamodel_tree_list.py
new file mode 100644
index 0000000..791df2f
--- /dev/null
+++ b/dendropy/test/test_datamodel_tree_list.py
@@ -0,0 +1,1174 @@
+# !/usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Tests for dendropy.TreeList.
+"""
+
+import copy
+import sys
+import unittest
+import collections
+import dendropy
+import random
+from dendropy.test.support import dendropytest
+from dendropy.test.support import curated_test_tree
+from dendropy.test.support import curated_test_tree_list
+from dendropy.test.support import compare_and_validate
+
+class TestTreeListBasicOperations(dendropytest.ExtendedTestCase):
+
+    def test_insert_simple_list_foreign_namespace(self):
+        for idx in range(6):
+            tlist = curated_test_tree_list.get_tree_list(5)
+            self.assertEqual(len(tlist), 5)
+            self.assertEqual(len(tlist._trees), 5)
+            original_tns = tlist.taxon_namespace
+            tree = curated_test_tree_list.get_tree()
+            tlist.insert(idx, tree)
+            self.assertEqual(len(tlist), 6)
+            self.assertEqual(len(tlist._trees), 6)
+            self.assertIs(tlist.taxon_namespace, original_tns)
+            self.assertIn(tree, tlist)
+            self.assertIs(tree.taxon_namespace, tlist.taxon_namespace)
+            self.assertEqual(len(tlist.taxon_namespace), 7)
+            for t1 in tlist:
+                self.assertIs(t1.taxon_namespace, tlist.taxon_namespace)
+                for nd in t1:
+                    self.assertIn(nd.taxon, tlist.taxon_namespace)
+
+    def test_insert_simple_list_native_namespace(self):
+        for idx in range(6):
+            tns = dendropy.TaxonNamespace()
+            tlist = curated_test_tree_list.get_tree_list(5, taxon_namespace=tns)
+            self.assertEqual(len(tlist), 5)
+            self.assertEqual(len(tlist._trees), 5)
+            original_tns = tlist.taxon_namespace
+            tree = curated_test_tree_list.get_tree(taxon_namespace=tns)
+            tlist.insert(idx, tree)
+            self.assertEqual(len(tlist), 6)
+            self.assertEqual(len(tlist._trees), 6)
+            self.assertIs(tlist.taxon_namespace, original_tns)
+            self.assertIn(tree, tlist)
+            self.assertIs(tree.taxon_namespace, tlist.taxon_namespace)
+            self.assertEqual(len(tlist.taxon_namespace), 7)
+            for t1 in tlist:
+                self.assertIs(t1.taxon_namespace, tlist.taxon_namespace)
+                for nd in t1:
+                    self.assertIn(nd.taxon, tlist.taxon_namespace)
+
+    def test_append_simple_list_foreign_namespace(self):
+        tlist, trees = curated_test_tree_list.get_tree_list_and_list_of_trees(num_trees=curated_test_tree_list.DEFAULT_NUM_TREES)
+        original_tns = tlist.taxon_namespace
+        for t in trees:
+            tlist.append(t)
+        self.assertEqual(len(tlist), curated_test_tree_list.DEFAULT_NUM_TREES)
+        self.assertIs(tlist.taxon_namespace, original_tns)
+        # self.assertEqual(len(tlist.taxon_namespace), len(tlist[0].tax_labels))
+        self.assertEqual(len(tlist.taxon_namespace), 7)
+        for t1, t2 in zip(tlist, trees):
+            self.assertIs(t1, t2)
+            self.assertIs(t1.taxon_namespace, tlist.taxon_namespace)
+            for nd in t1:
+                self.assertIn(nd.taxon, tlist.taxon_namespace)
+
+    def test_append_simple_list_same_namespace(self):
+        tns = dendropy.TaxonNamespace()
+        tlist, trees = curated_test_tree_list.get_tree_list_and_list_of_trees(
+                num_trees=curated_test_tree_list.DEFAULT_NUM_TREES,
+                tree_list_taxon_namespace=tns,
+                list_of_trees_taxon_namespace=tns)
+        original_tns = tlist.taxon_namespace
+        for t in trees:
+            tlist.append(t)
+        self.assertEqual(len(tlist), curated_test_tree_list.DEFAULT_NUM_TREES)
+        self.assertIs(tlist.taxon_namespace, original_tns)
+        # self.assertEqual(len(tlist.taxon_namespace), len(tlist[0].tax_labels))
+        self.assertEqual(len(tlist.taxon_namespace), 7)
+        for t1, t2 in zip(tlist, trees):
+            self.assertIs(t1, t2)
+            self.assertIs(t1.taxon_namespace, tlist.taxon_namespace)
+            for nd in t1:
+                self.assertIn(nd.taxon, tlist.taxon_namespace)
+
+    def test_iadd_from_another_tree_list_different_namespace(self):
+        tlist = curated_test_tree_list.get_tree_list(num_trees=3)
+        original_tns = tlist.taxon_namespace
+        original_tlist_len = len(tlist)
+        original_tree_labels = [t.label for t in tlist]
+        self.assertEqual(len(original_tree_labels), len(tlist))
+        self.assertEqual(original_tlist_len, 3)
+
+        tlist_source = curated_test_tree_list.get_tree_list(num_trees=5)
+        self.assertEqual(len(tlist_source), 5)
+        source_tree_labels = [t.label for t in tlist_source]
+        self.assertEqual(len(source_tree_labels), len(tlist_source))
+
+        tlist += tlist_source
+
+        self.assertEqual(len(tlist), original_tlist_len + len(tlist_source))
+        self.assertIs(tlist.taxon_namespace, original_tns)
+        # self.assertEqual(len(tlist.taxon_namespace), len(tlist[0].tax_labels))
+        self.assertEqual(len(tlist.taxon_namespace), 7)
+        expected_tree_labels = original_tree_labels + source_tree_labels
+        self.assertEqual(len(tlist), len(expected_tree_labels))
+        for t1, tlabel in zip(tlist, expected_tree_labels):
+            self.assertIn(t1, tlist)
+            self.assertNotIn(t1, tlist_source)
+            self.assertIs(t1.taxon_namespace, tlist.taxon_namespace)
+            self.assertEqual(t1.label, tlabel)
+            for nd in t1:
+                self.assertIn(nd.taxon, tlist.taxon_namespace)
+
+    def test_iadd_from_list_of_trees_different_namespace(self):
+        tlist = curated_test_tree_list.get_tree_list(num_trees=3)
+        original_tns = tlist.taxon_namespace
+        original_tlist_len = len(tlist)
+        original_tree_labels = [t.label for t in tlist]
+        self.assertEqual(len(original_tree_labels), len(tlist))
+        self.assertEqual(original_tlist_len, 3)
+        source_trees = curated_test_tree_list.get_trees(
+                num_trees=5,
+                taxon_namespace=None,
+                label=None,
+                suppress_internal_node_taxa=False,
+                suppress_leaf_node_taxa=False)
+        self.assertEqual(len(source_trees), 5)
+        source_tree_labels = [t.label for t in source_trees]
+        self.assertEqual(len(source_tree_labels), len(source_trees))
+
+        tlist += source_trees
+
+        self.assertEqual(len(tlist), original_tlist_len + len(source_trees))
+        self.assertIs(tlist.taxon_namespace, original_tns)
+        # self.assertEqual(len(tlist.taxon_namespace), len(tlist[0].tax_labels))
+        self.assertEqual(len(tlist.taxon_namespace), 7)
+        expected_tree_labels = original_tree_labels + source_tree_labels
+        self.assertEqual(len(tlist), len(expected_tree_labels))
+        for t1, tlabel in zip(tlist, expected_tree_labels):
+            self.assertIn(t1, tlist)
+            if tlabel in source_tree_labels:
+                self.assertIn(t1, source_trees)
+            else:
+                self.assertNotIn(t1, source_trees)
+            self.assertIs(t1.taxon_namespace, tlist.taxon_namespace)
+            self.assertEqual(t1.label, tlabel)
+            for nd in t1:
+                self.assertIn(nd.taxon, tlist.taxon_namespace)
+
+    def test_add_from_another_tree_list_different_namespace(self):
+        tlist_source1 = curated_test_tree_list.get_tree_list(num_trees=3)
+        original_tns = tlist_source1.taxon_namespace
+        source1_tree_labels = [t.label for t in tlist_source1]
+        self.assertEqual(len(source1_tree_labels), len(tlist_source1))
+        self.assertEqual(len(tlist_source1), 3)
+
+        tlist_source2 = curated_test_tree_list.get_trees(num_trees=5)
+        self.assertEqual(len(tlist_source2), 5)
+        source2_tree_labels = [t.label for t in tlist_source2]
+        self.assertEqual(len(source2_tree_labels), len(tlist_source2))
+
+        tlist = tlist_source1 + tlist_source2
+
+        self.assertEqual(len(tlist_source1), 3)
+        self.assertEqual(len(tlist_source2), 5)
+
+        self.assertEqual(len(tlist), len(tlist_source1) + len(tlist_source2))
+        self.assertIs(tlist.taxon_namespace, original_tns)
+        self.assertEqual(len(tlist.taxon_namespace), 7)
+        expected_tree_labels = source1_tree_labels + source2_tree_labels
+        self.assertEqual(len(tlist), len(expected_tree_labels))
+        for t1, tlabel in zip(tlist, expected_tree_labels):
+            self.assertIn(t1, tlist)
+            self.assertIs(t1.taxon_namespace, tlist.taxon_namespace)
+            self.assertEqual(t1.label, tlabel)
+            if t1.label in source1_tree_labels:
+                self.assertNotIn(t1, tlist_source1)
+                self.assertNotIn(t1, tlist_source2)
+            else:
+                self.assertNotIn(t1, tlist_source1)
+                self.assertIn(t1, tlist_source2)
+            for nd in t1:
+                self.assertIn(nd.taxon, tlist.taxon_namespace)
+
+    def test_contains(self):
+        tlist = curated_test_tree_list.get_tree_list(5)
+        self.assertEqual(len(tlist._trees), len(tlist))
+        self.assertEqual(len(tlist), 5)
+        trees = curated_test_tree_list.get_trees(5)
+        self.assertEqual(len(trees), 5)
+        for t in tlist:
+            self.assertTrue(t in tlist._trees)
+            self.assertTrue(t in tlist)
+        for t in trees:
+            self.assertFalse(t in tlist._trees)
+            self.assertFalse(t in tlist)
+        tlist += trees
+        for t in trees:
+            self.assertTrue(t in tlist._trees)
+            self.assertTrue(t in tlist)
+
+    def test_delitem(self):
+        tsize = 5
+        for del_idx in range(-tsize, tsize):
+            tlist = curated_test_tree_list.get_tree_list(tsize)
+            original_trees = list(tlist._trees)
+            self.assertIn(original_trees[del_idx], tlist._trees)
+            del tlist[del_idx]
+            self.assertNotIn(original_trees[del_idx], tlist._trees)
+            self.assertEqual(len(tlist), tsize - 1)
+            del original_trees[del_idx]
+            self.assertEqual(tlist._trees, original_trees)
+
+    def test_iter(self):
+        tlist = curated_test_tree_list.get_tree_list(5)
+        self.assertEqual(len(tlist), 5)
+        self.assertEqual(len(tlist._trees), len(tlist))
+        for t1, t2 in zip(tlist, tlist._trees):
+            self.assertIs(t1, t2)
+
+    def test_reversed(self):
+        tlist = curated_test_tree_list.get_tree_list(5)
+        self.assertEqual(len(tlist), 5)
+        self.assertEqual(len(tlist._trees), len(tlist))
+        for t1, t2 in zip(reversed(tlist), reversed(tlist._trees)):
+            self.assertIs(t1, t2)
+
+    def test_getitem_simple(self):
+        tsize = 5
+        tlist = curated_test_tree_list.get_tree_list(tsize)
+        self.assertEqual(len(tlist), tsize)
+        self.assertEqual(len(tlist._trees), len(tlist))
+        for idx in range(-tsize, tsize):
+            self.assertIs(tlist[idx], tlist._trees[idx])
+            self.assertTrue(isinstance(tlist[idx], dendropy.Tree))
+
+    def test_getitem_slice(self):
+        tsize = 5
+        tlist = curated_test_tree_list.get_tree_list(tsize)
+        self.assertEqual(len(tlist), tsize)
+        self.assertEqual(len(tlist._trees), len(tlist))
+        for a in range(-tsize, tsize):
+            for b in range(-tsize, tsize):
+                for step in range(-tsize, tsize):
+                    if step == 0:
+                        continue
+                    tt = tlist[a:b:step]
+                    k = tlist._trees[a:b:step]
+                    self.assertEqual(len(tt), len(k))
+                    for t1, t2 in zip(tt, k):
+                        self.assertIn(t1, tlist)
+                        self.assertIn(t1, tlist._trees)
+                        self.assertIs(t1.taxon_namespace, tlist.taxon_namespace)
+
+    def test_setitem_simple(self):
+        tsize = 5
+        for idx in range(-tsize, tsize):
+            tlist = curated_test_tree_list.get_tree_list(tsize)
+            self.assertEqual(len(tlist), tsize)
+            self.assertEqual(len(tlist._trees), len(tlist))
+            old_tree = tlist[idx]
+            new_tree = curated_test_tree_list.get_tree()
+            tlist[idx] = new_tree
+            self.assertIs(tlist[idx], new_tree)
+            self.assertIsNot(tlist[idx], old_tree)
+            self.assertIn(new_tree, tlist)
+            self.assertNotIn(old_tree, tlist)
+            self.assertIs(new_tree.taxon_namespace,
+                    tlist.taxon_namespace)
+            self.assertEqual(len(tlist.taxon_namespace), 7)
+            for tree in tlist:
+                for nd in tree:
+                    self.assertIn(nd.taxon, tlist.taxon_namespace)
+
+    def test_setitem_slice_from_list(self):
+        tsize = 5
+        for a in range(-tsize, tsize):
+            for b in range(-tsize, tsize):
+                for step in range(-tsize, tsize):
+                    if step == 0:
+                        continue
+                    slice_obj = slice(a, b, step)
+                    slice_len = len(range(*slice_obj.indices(tsize)))
+                    if slice_len <= 0:
+                        continue
+                    tlist = curated_test_tree_list.get_tree_list(tsize)
+                    self.assertEqual(len(tlist), tsize)
+                    self.assertEqual(len(tlist._trees), len(tlist))
+                    copy_list = list(tlist._trees)
+                    source = curated_test_tree_list.get_trees(slice_len)
+                    tlist[a:b:step] = source
+                    copy_list[a:b:step] = source
+                    expected_tree_labels = [t.label for t in copy_list]
+                    self.assertEqual(len(tlist), len(copy_list))
+                    self.assertEqual(len(tlist), len(tlist._trees))
+                    self.assertEqual(len(tlist.taxon_namespace), 7)
+                    for t1, t2, tlabel in zip(tlist, copy_list, expected_tree_labels):
+                        self.assertIs(t1, t2)
+                        self.assertIn(t1, tlist)
+                        self.assertIn(t1, tlist._trees)
+                        self.assertEqual(t1.label, tlabel)
+                        self.assertIs(t1.taxon_namespace, tlist.taxon_namespace)
+                        for nd in t1:
+                            self.assertIn(nd.taxon, tlist.taxon_namespace)
+
+    def test_setitem_slice_from_tree_list(self):
+        tsize = 5
+        for a in range(-tsize, tsize):
+            for b in range(-tsize, tsize):
+                for step in range(-tsize, tsize):
+                    if step == 0:
+                        continue
+                    slice_obj = slice(a, b, step)
+                    slice_indexes = list(range(*slice_obj.indices(tsize)))
+                    slice_len = len(slice_indexes)
+                    if slice_len <= 0:
+                        continue
+                    tlist = curated_test_tree_list.get_tree_list(tsize)
+                    self.assertEqual(len(tlist), tsize)
+                    self.assertEqual(len(tlist._trees), len(tlist))
+                    copy_list = list(tlist._trees)
+                    source = curated_test_tree_list.get_tree_list(slice_len)
+                    copy_list[a:b:step] = source._trees
+                    tlist[a:b:step] = source
+                    expected_tree_labels = [t.label for t in copy_list]
+                    self.assertEqual(len(tlist), len(copy_list))
+                    self.assertEqual(len(tlist), len(tlist._trees))
+                    self.assertEqual(len(tlist.taxon_namespace), 7)
+                    for idx, (t1, t2, tlabel) in enumerate(zip(tlist, copy_list, expected_tree_labels)):
+                        self.assertIn(t1, tlist)
+                        self.assertIn(t1, tlist._trees)
+                        self.assertEqual(t1.label, tlabel)
+                        self.assertIs(t1.taxon_namespace, tlist.taxon_namespace)
+                        if idx in slice_indexes:
+                            self.assertIsNot(t1, t2)
+                            self.assertIn(t1, tlist)
+                            self.assertIn(t1, tlist._trees)
+                            self.assertNotIn(t2, tlist)
+                            self.assertNotIn(t2, tlist._trees)
+                            self.assertNotIn(t1, source)
+                            self.assertNotIn(t1, source._trees)
+                        else:
+                            self.assertIs(t1, t2)
+                        for nd in t1:
+                            self.assertIn(nd.taxon, tlist.taxon_namespace)
+
+    def test_clear(self):
+        tlist = curated_test_tree_list.get_tree_list(5)
+        self.assertEqual(len(tlist._trees), 5)
+        tlist.clear()
+        self.assertEqual(len(tlist), 0)
+        self.assertEqual(len(tlist._trees), 0)
+
+    def test_extend_from_another_tree_list_different_namespace(self):
+        tlist = curated_test_tree_list.get_tree_list(num_trees=3)
+        original_tns = tlist.taxon_namespace
+        original_tlist_len = len(tlist)
+        original_tree_labels = [t.label for t in tlist]
+        self.assertEqual(len(original_tree_labels), len(tlist))
+        self.assertEqual(original_tlist_len, 3)
+
+        tlist_source = curated_test_tree_list.get_tree_list(num_trees=5)
+        self.assertEqual(len(tlist_source), 5)
+        source_tree_labels = [t.label for t in tlist_source]
+        self.assertEqual(len(source_tree_labels), len(tlist_source))
+
+        tlist.extend(tlist_source)
+
+        self.assertEqual(len(tlist), original_tlist_len + len(tlist_source))
+        self.assertIs(tlist.taxon_namespace, original_tns)
+        # self.assertEqual(len(tlist.taxon_namespace), len(tlist[0].tax_labels))
+        self.assertEqual(len(tlist.taxon_namespace), 7)
+        expected_tree_labels = original_tree_labels + source_tree_labels
+        self.assertEqual(len(tlist), len(expected_tree_labels))
+        for t1, tlabel in zip(tlist, expected_tree_labels):
+            self.assertIn(t1, tlist)
+            self.assertNotIn(t1, tlist_source)
+            self.assertIs(t1.taxon_namespace, tlist.taxon_namespace)
+            self.assertEqual(t1.label, tlabel)
+            for nd in t1:
+                self.assertIn(nd.taxon, tlist.taxon_namespace)
+
+    def test_extend_from_list_of_trees_different_namespace(self):
+        tlist = curated_test_tree_list.get_tree_list(num_trees=3)
+        original_tns = tlist.taxon_namespace
+        original_tlist_len = len(tlist)
+        original_tree_labels = [t.label for t in tlist]
+        self.assertEqual(len(original_tree_labels), len(tlist))
+        self.assertEqual(original_tlist_len, 3)
+        source_trees = curated_test_tree_list.get_trees(
+                num_trees=5,
+                taxon_namespace=None,
+                label=None,
+                suppress_internal_node_taxa=False,
+                suppress_leaf_node_taxa=False)
+        self.assertEqual(len(source_trees), 5)
+        source_tree_labels = [t.label for t in source_trees]
+        self.assertEqual(len(source_tree_labels), len(source_trees))
+
+        tlist.extend(source_trees)
+
+        self.assertEqual(len(tlist), original_tlist_len + len(source_trees))
+        self.assertIs(tlist.taxon_namespace, original_tns)
+        # self.assertEqual(len(tlist.taxon_namespace), len(tlist[0].tax_labels))
+        self.assertEqual(len(tlist.taxon_namespace), 7)
+        expected_tree_labels = original_tree_labels + source_tree_labels
+        self.assertEqual(len(tlist), len(expected_tree_labels))
+        for t1, tlabel in zip(tlist, expected_tree_labels):
+            self.assertIn(t1, tlist)
+            if tlabel in source_tree_labels:
+                self.assertIn(t1, source_trees)
+            else:
+                self.assertNotIn(t1, source_trees)
+            self.assertIs(t1.taxon_namespace, tlist.taxon_namespace)
+            self.assertEqual(t1.label, tlabel)
+            for nd in t1:
+                self.assertIn(nd.taxon, tlist.taxon_namespace)
+
+    def test_index(self):
+        tlist = curated_test_tree_list.get_tree_list(5)
+        for idx, t in enumerate(tlist):
+            self.assertIs(t, tlist[idx])
+            self.assertEqual(tlist.index(t), idx)
+
+    def test_pop1(self):
+        tlist = curated_test_tree_list.get_tree_list(5)
+        k = tlist[-1]
+        t = tlist.pop()
+        self.assertIs(t, k)
+        self.assertEqual(len(tlist), 4)
+        self.assertNotIn(t, tlist)
+
+    def test_pop2(self):
+        for idx in range(5):
+            tlist = curated_test_tree_list.get_tree_list(5)
+            k = tlist[idx]
+            t = tlist.pop(idx)
+            self.assertIs(t, k)
+            self.assertEqual(len(tlist), 4)
+            self.assertNotIn(t, tlist)
+
+    def test_remove(self):
+        tlist = curated_test_tree_list.get_tree_list(5)
+        t = tlist[0]
+        tlist.remove(t)
+        self.assertEqual(len(tlist), 4)
+        self.assertNotIn(t, tlist)
+
+    def test_remove(self):
+        tlist = curated_test_tree_list.get_tree_list(5)
+        clist = list(tlist._trees)
+        tlist.reverse()
+        clist.reverse()
+        for t1, t2 in zip(tlist, clist):
+            self.assertIs(t1, t2)
+
+    def test_sort(self):
+        for r in (True, False):
+            tlist = curated_test_tree_list.get_tree_list(5)
+            clist = list(tlist._trees)
+            tlist.sort(key=lambda x: x.label, reverse=r)
+            clist.sort(key=lambda x: x.label, reverse=r)
+            for t1, t2 in zip(tlist, clist):
+                self.assertIs(t1, t2)
+
+class TreeListCreatingAndCloning(
+        curated_test_tree.CuratedTestTree,
+        compare_and_validate.Comparator,
+        unittest.TestCase):
+
+    def add_tree_annotations(self, tree):
+        for idx, nd in enumerate(tree):
+            if idx % 2 == 0:
+                nd.edge.label = "E{}".format(idx)
+                nd.edge.length = idx
+            an1 = nd.annotations.add_new("a{}".format(idx),
+                    "{}{}{}".format(nd.label, nd.taxon, idx))
+            an2 = nd.annotations.add_bound_attribute("label")
+            an3 = an1.annotations.add_bound_attribute("name")
+            ae1 = nd.edge.annotations.add_new("a{}".format(idx),
+                    "{}{}".format(nd.edge.label, idx))
+            ae2 = nd.edge.annotations.add_bound_attribute("label")
+            ae3 = ae1.annotations.add_bound_attribute("name")
+        tree.annotations.add_new("a", 0)
+        tree.label = "hello"
+        b = tree.annotations.add_bound_attribute("label")
+        b.annotations.add_new("c", 3)
+
+    def add_tree_list_annotations(self, tree_list):
+        tree_list.annotations.add_new("a", 0)
+        tree_list.label = "hello"
+        b = tree_list.annotations.add_bound_attribute("label")
+        b.annotations.add_new("c", 3)
+
+    def add_taxon_namespace_annotations(self, tns):
+        for idx, taxon in enumerate(tns):
+            a = taxon.annotations.add_new("!color", str(idx))
+            a.annotations.add_new("setbytest", "a")
+
+    def setUp(self):
+        self.num_trees = 5
+        tree1, anodes1, lnodes1, inodes1 = self.get_tree(
+                suppress_internal_node_taxa=False,
+                suppress_leaf_node_taxa=False)
+        self.original_taxon_labels = [t.label for t in tree1.taxon_namespace]
+        assert len(self.original_taxon_labels) == len(anodes1)
+
+    def get_tree_list(self):
+        tlist1 = dendropy.TreeList()
+        self.num_trees = 5
+        for idx in range(self.num_trees):
+            tree1, anodes1, lnodes1, inodes1 = self.get_tree(
+                    suppress_internal_node_taxa=False,
+                    suppress_leaf_node_taxa=False,
+                    taxon_namespace=tlist1.taxon_namespace)
+            self.add_tree_annotations(tree1)
+            tlist1.append(tree1)
+        self.add_tree_list_annotations(tlist1)
+        self.add_taxon_namespace_annotations(tlist1.taxon_namespace)
+        return tlist1
+
+    def test_shallow_copy_with_initializer_list(self):
+        tlist1 = self.get_tree_list()
+        trees = tlist1._trees
+        tlist2 = dendropy.TreeList(trees)
+        self.assertEqual(len(tlist2), self.num_trees)
+        for tcopy, toriginal in zip(tlist2, trees):
+            self.assertIs(tcopy, toriginal)
+            self.assertIs(tcopy.taxon_namespace, tlist2.taxon_namespace)
+
+    def test_clone0(self):
+        tlist1 = self.get_tree_list()
+        for tlist2 in (
+                tlist1.clone(0),
+                ):
+            self.assertIs(tlist2.taxon_namespace, tlist1.taxon_namespace)
+            self.assertEqual(len(tlist2), self.num_trees)
+            for tcopy, toriginal in zip(tlist2, tlist1):
+                self.assertIs(tcopy, toriginal)
+                self.assertIs(tcopy.taxon_namespace, tlist2.taxon_namespace)
+
+    def test_taxon_namespace_scoped_copy(self):
+        tlist1 = self.get_tree_list()
+        for tlist2 in (
+                tlist1.clone(1),
+                dendropy.TreeList(tlist1),
+                tlist1.taxon_namespace_scoped_copy(),):
+            self.compare_distinct_tree_list(tlist2, tlist1,
+                    taxon_namespace_scoped=True,
+                    compare_tree_annotations=True,
+                    compare_taxon_annotations=True)
+
+    def test_deepcopy_including_namespace(self):
+        tlist1 = self.get_tree_list()
+        for idx, tlist2 in enumerate((
+                tlist1.clone(2),
+                copy.deepcopy(tlist1),
+                )):
+            self.compare_distinct_tree_list(tlist2, tlist1,
+                    taxon_namespace_scoped=False,
+                    compare_tree_annotations=True,
+                    compare_taxon_annotations=True)
+
+    def test_deepcopy_excluding_namespace(self):
+        tlist1 = self.get_tree_list()
+        tlist2 = dendropy.TreeList(tlist1,
+                taxon_namespace=dendropy.TaxonNamespace())
+        self.compare_distinct_tree_list(tlist2, tlist1,
+                taxon_namespace_scoped=False,
+                compare_tree_annotations=True,
+                compare_taxon_annotations=False)
+
+class TreeListIdentity(unittest.TestCase):
+
+    def setUp(self):
+        self.tns = dendropy.TaxonNamespace()
+        self.t1 = dendropy.TreeList(label="a", taxon_namespace=self.tns)
+        self.t2 = dendropy.TreeList(label="a", taxon_namespace=self.tns)
+        self.t3 = dendropy.TreeList(label="a")
+
+    def test_equal(self):
+        # two distinct |TreeList| objects are equal
+        # if they have the same namespace and trees
+        trees = [dendropy.Tree() for i in range(5)]
+        for tree in trees:
+            self.t1._trees.append(tree)
+            self.t2._trees.append(tree)
+        self.assertEqual(self.t1, self.t2)
+
+    def test_unequal1(self):
+        # two distinct |TreeList| objects are equal
+        # if they have the same namespace and trees
+        trees1 = [dendropy.Tree() for i in range(5)]
+        for tree in trees1:
+            self.t1._trees.append(tree)
+        trees2 = [dendropy.Tree() for i in range(5)]
+        for tree in trees2:
+            self.t2._trees.append(tree)
+        self.assertNotEqual(self.t1, self.t2)
+
+    def test_unequal2(self):
+        # two distinct |TreeList| objects are equal
+        # if they have the same namespace and trees
+        trees1 = [dendropy.Tree() for i in range(5)]
+        for tree in trees1:
+            self.t1._trees.append(tree)
+            self.t3._trees.append(tree)
+        self.assertNotEqual(self.t1, self.t3)
+
+    def test_hash_dict_membership(self):
+        k = {}
+        k[self.t1] = 1
+        k[self.t2] = 2
+        self.assertEqual(len(k), 2)
+        self.assertEqual(k[self.t1], 1)
+        self.assertEqual(k[self.t2], 2)
+        self.assertIn(self.t1, k)
+        self.assertIn(self.t2, k)
+        del k[self.t1]
+        self.assertNotIn(self.t1, k)
+        self.assertIn(self.t2, k)
+        self.assertEqual(len(k), 1)
+        k1 = {self.t1: 1}
+        k2 = {self.t2: 1}
+        self.assertIn(self.t1, k1)
+        self.assertIn(self.t2, k2)
+        self.assertNotIn(self.t2, k1)
+        self.assertNotIn(self.t1, k2)
+
+    def test_hash_set_membership(self):
+        k = set()
+        k.add(self.t1)
+        k.add(self.t2)
+        self.assertEqual(len(k), 2)
+        self.assertIn(self.t1, k)
+        self.assertIn(self.t2, k)
+        k.discard(self.t1)
+        self.assertNotIn(self.t1, k)
+        self.assertIn(self.t2, k)
+        self.assertEqual(len(k), 1)
+        k1 = {self.t1: 1}
+        k2 = {self.t2: 1}
+        self.assertIn(self.t1, k1)
+        self.assertIn(self.t2, k2)
+        self.assertNotIn(self.t2, k1)
+        self.assertNotIn(self.t1, k2)
+
+class TestTreeListUpdateTaxonNamespace(
+        curated_test_tree.CuratedTestTree,
+        dendropytest.ExtendedTestCase):
+
+    def setUp(self):
+        trees = []
+        for idx in range(5):
+            tree1, anodes1, lnodes1, inodes1 = self.get_tree(
+                    suppress_internal_node_taxa=True,
+                    suppress_leaf_node_taxa=True)
+            trees.append(tree1)
+        self.expected_labels = set()
+        self.expected_taxa = set()
+        node_label_to_taxon_label_map = {
+            "a" : "z01",
+            "b" : "<NONE>",
+            "c" : "z03",
+            "e" : "z04",
+            "f" : "z05",
+            "g" : "z06",
+            "h" : None,
+            "i" : None,
+            "j" : "z09",
+            "k" : "z10",
+            "l" : "z11",
+            "m" : "<NONE>",
+            "n" : None,
+            "o" : "z14",
+            "p" : "z15",
+                }
+        registry = {}
+        for tree_idx, tree in enumerate(trees):
+            for nd in tree:
+                if nd.label is not None:
+                    if tree_idx > 3:
+                        nd.label = node_label_to_taxon_label_map[nd.label]
+                    if nd.label == "<NONE>":
+                        try:
+                            t = registry[None]
+                        except KeyError:
+                            t = dendropy.Taxon(label=None)
+                            registry[None] = t
+                        self.expected_labels.add(None)
+                    else:
+                        try:
+                            t = registry[nd.label]
+                        except KeyError:
+                            t = dendropy.Taxon(label=nd.label)
+                            registry[nd.label] = t
+                        self.expected_labels.add(nd.label)
+                    nd.taxon = t
+                    self.expected_taxa.add(nd.taxon)
+        self.tree_list = dendropy.TreeList()
+        self.tree_list._trees = trees
+
+    def test_noop_update_with_no_taxa(self):
+        trees = []
+        tns = dendropy.TaxonNamespace()
+        for idx in range(5):
+            tree1, anodes1, lnodes1, inodes1 = self.get_tree(
+                    suppress_internal_node_taxa=True,
+                    suppress_leaf_node_taxa=True,
+                    taxon_namespace=tns)
+            trees.append(tree1)
+        tlst = dendropy.TreeList(taxon_namespace=tns)
+        tlst._trees = trees
+        original_tns = tlst.taxon_namespace
+        self.assertEqual(len(original_tns), 0)
+        tlst.update_taxon_namespace()
+        self.assertIs(tlst.taxon_namespace, original_tns)
+        for tree in tlst:
+            self.assertIs(tree.taxon_namespace, tlst.taxon_namespace)
+        self.assertEqual(len(original_tns), 0)
+
+    def test_update(self):
+        original_tns = self.tree_list.taxon_namespace
+        self.assertEqual(len(original_tns), 0)
+        self.tree_list.update_taxon_namespace()
+        self.tree_list.update_taxon_namespace()
+        self.tree_list.update_taxon_namespace()
+        for tree in self.tree_list:
+            self.assertIs(tree.taxon_namespace, self.tree_list.taxon_namespace)
+        self.assertIs(self.tree_list.taxon_namespace, original_tns)
+        new_taxa = [t for t in original_tns]
+        new_labels = [t.label for t in original_tns]
+        self.assertCountEqual(new_taxa, self.expected_taxa)
+        self.assertCountEqual(new_labels, self.expected_labels)
+
+class TestTreeListMigrateAndReconstructTaxonNamespace(
+        curated_test_tree.CuratedTestTree,
+        dendropytest.ExtendedTestCase):
+
+    def setUp(self):
+        tns = dendropy.TaxonNamespace()
+        trees = []
+        for idx in range(8):
+            tree, anodes, lnodes, inodes = self.get_tree(
+                    suppress_internal_node_taxa=True,
+                    suppress_leaf_node_taxa=True,
+                    taxon_namespace=tns)
+            trees.append(tree)
+        self.node_label_to_taxon_label_map = {
+            "a" : "a",
+            "b" : "a",
+            "c" : "2",
+            "e" : "2",
+            "f" : "b",
+            "g" : "B",
+            "h" : "B",
+            "i" : "h",
+            "j" : "H",
+            "k" : "h",
+            "l" : None,
+            "m" : None,
+            "n" : "H",
+            "o" : "J",
+            "p" : "j",
+                }
+        self.original_taxa = []
+        registry = {}
+        for tree in trees:
+            for idx, nd in enumerate(tree):
+                try:
+                    t = registry[nd.label]
+                except KeyError:
+                    taxon_label = self.node_label_to_taxon_label_map[nd.label]
+                    t = dendropy.Taxon(label=taxon_label)
+                    registry[nd.label] = t
+                    self.original_taxa.append(t)
+                tree.taxon_namespace.add_taxon(t)
+                nd.taxon = t
+                nd.original_taxon = t
+        assert len(tree.taxon_namespace) == len(self.node_label_to_taxon_label_map)
+        assert len(tree.taxon_namespace) == len(self.original_taxa)
+        self.tree_list = dendropy.TreeList(taxon_namespace=tns)
+        self.tree_list._trees = trees
+
+    def verify_taxon_namespace_reconstruction(self,
+            unify_taxa_by_label=False,
+            case_sensitive_label_mapping=True,
+            original_tns=None,
+            redundant_taxa=False):
+        if unify_taxa_by_label:
+            if not case_sensitive_label_mapping:
+                expected_labels = []
+                for label in self.node_label_to_taxon_label_map.values():
+                    if label is None:
+                        expected_labels.append(label)
+                    else:
+                        label = label.upper()
+                        if label not in expected_labels:
+                            expected_labels.append(label)
+            else:
+                expected_labels = list(set(label for label in self.node_label_to_taxon_label_map.values()))
+        else:
+            expected_labels = [label for label in self.node_label_to_taxon_label_map.values()]
+        for tree in self.tree_list:
+            seen_taxa = []
+            self.assertIs(tree.taxon_namespace, self.tree_list.taxon_namespace)
+            for nd in tree:
+                self.assertIsNot(nd.taxon, nd.original_taxon)
+                if not case_sensitive_label_mapping and nd.taxon.label is not None:
+                    self.assertEqual(nd.taxon.label.upper(), nd.original_taxon.label.upper())
+                    self.assertEqual(self.node_label_to_taxon_label_map[nd.label].upper(), nd.taxon.label.upper())
+                else:
+                    self.assertEqual(nd.taxon.label, nd.original_taxon.label)
+                    self.assertEqual(self.node_label_to_taxon_label_map[nd.label], nd.taxon.label)
+                self.assertNotIn(nd.original_taxon, tree.taxon_namespace)
+                self.assertIn(nd.original_taxon, self.original_taxa)
+                self.assertIn(nd.taxon, tree.taxon_namespace)
+                self.assertNotIn(nd.taxon, self.original_taxa)
+                if original_tns is not None:
+                    self.assertNotIn(nd.taxon, original_tns)
+                if nd.taxon not in seen_taxa:
+                    seen_taxa.append(nd.taxon)
+                else:
+                    self.assertTrue(unify_taxa_by_label or redundant_taxa)
+                    if not case_sensitive_label_mapping:
+                        self.assertIn(nd.taxon.label, [t.label for t in seen_taxa])
+                    else:
+                        if nd.taxon.label is None:
+                            self.assertIs(nd.original_taxon.label, None)
+                            self.assertEqual([t.label for t in seen_taxa].count(None), 1)
+                        else:
+                            x1 = [t.label.upper() for t in seen_taxa if t.label is not None]
+                            self.assertIn(nd.taxon.label.upper(), x1)
+            self.assertEqual(len(seen_taxa), len(tree.taxon_namespace))
+            if not case_sensitive_label_mapping:
+                seen_labels = [(t.label.upper() if t.label is not None else None) for t in seen_taxa]
+            else:
+                seen_labels = [t.label for t in seen_taxa]
+            c1 = collections.Counter(expected_labels)
+            c2 = collections.Counter(seen_labels)
+            self.assertEqual(c1, c2)
+
+        self.assertEqual(len(tree.taxon_namespace), len(expected_labels))
+        if not unify_taxa_by_label and not redundant_taxa:
+            self.assertEqual(len(tree.taxon_namespace), len(self.node_label_to_taxon_label_map))
+
+    def test_basic_reconstruction(self):
+        tns = dendropy.TaxonNamespace()
+        trees = []
+        for idx in range(5):
+            tree, anodes, lnodes, inodes = self.get_tree(
+                    suppress_internal_node_taxa=False,
+                    suppress_leaf_node_taxa=False,
+                    taxon_namespace=tns)
+            trees.append(tree)
+        tree_list = dendropy.TreeList(taxon_namespace=tns)
+        tree_list._trees = trees
+        new_tns = dendropy.TaxonNamespace()
+        new_tns.is_case_sensitive = True
+        tree_list.taxon_namespace = new_tns
+        tree_list.reconstruct_taxon_namespace(unify_taxa_by_label=False)
+        self.assertIsNot(tree_list.taxon_namespace, tns)
+        self.assertIs(tree_list.taxon_namespace, new_tns)
+        self.assertEqual(len(tree_list.taxon_namespace), len(tns))
+        original_labels = [t.label for t in tns]
+        new_labels = [t.label for t in new_tns]
+        self.assertCountEqual(new_labels, original_labels)
+        for tree in tree_list:
+            self.assertIs(tree.taxon_namespace, tree_list.taxon_namespace)
+            for nd in tree:
+                if nd.taxon is not None:
+                    self.assertIn(nd.taxon, tree.taxon_namespace)
+                    self.assertNotIn(nd.taxon, tns)
+
+    def test_reconstruct_taxon_namespace_non_unifying(self):
+        original_tns = self.tree_list.taxon_namespace
+        new_tns = dendropy.TaxonNamespace()
+        new_tns.is_case_sensitive = True
+        self.tree_list._taxon_namespace = new_tns
+        self.assertEqual(len(self.tree_list.taxon_namespace), 0)
+        self.tree_list.reconstruct_taxon_namespace(unify_taxa_by_label=False)
+        self.assertIsNot(self.tree_list.taxon_namespace, original_tns)
+        self.assertIs(self.tree_list.taxon_namespace, new_tns)
+        self.verify_taxon_namespace_reconstruction(
+                unify_taxa_by_label=False,
+                case_sensitive_label_mapping=True)
+
+    def test_reconstruct_taxon_namespace_unifying_case_sensitive(self):
+        original_tns = self.tree_list.taxon_namespace
+        new_tns = dendropy.TaxonNamespace()
+        new_tns.is_case_sensitive = True
+        self.tree_list._taxon_namespace = new_tns
+        self.assertEqual(len(self.tree_list.taxon_namespace), 0)
+        self.tree_list.reconstruct_taxon_namespace(unify_taxa_by_label=True)
+        self.assertIsNot(self.tree_list.taxon_namespace, original_tns)
+        self.assertIs(self.tree_list.taxon_namespace, new_tns)
+        self.verify_taxon_namespace_reconstruction(
+                unify_taxa_by_label=True,
+                case_sensitive_label_mapping=True,
+                original_tns=original_tns)
+
+    def test_reconstruct_taxon_namespace_unifying_case_insensitive(self):
+        original_tns = self.tree_list.taxon_namespace
+        new_tns = dendropy.TaxonNamespace()
+        new_tns.is_case_sensitive = False
+        self.tree_list._taxon_namespace = new_tns
+        self.assertEqual(len(self.tree_list.taxon_namespace), 0)
+        self.tree_list.reconstruct_taxon_namespace(unify_taxa_by_label=True)
+        self.assertIsNot(self.tree_list.taxon_namespace, original_tns)
+        self.assertIs(self.tree_list.taxon_namespace, new_tns)
+        self.verify_taxon_namespace_reconstruction(
+                unify_taxa_by_label=True,
+                case_sensitive_label_mapping=False,
+                original_tns=original_tns)
+
+    def test_basic_migration(self):
+        tns = dendropy.TaxonNamespace()
+        trees = []
+        for idx in range(5):
+            tree, anodes, lnodes, inodes = self.get_tree(
+                    suppress_internal_node_taxa=False,
+                    suppress_leaf_node_taxa=False,
+                    taxon_namespace=tns)
+            trees.append(tree)
+        tree_list = dendropy.TreeList(taxon_namespace=tns)
+        tree_list._trees = trees
+        new_tns = dendropy.TaxonNamespace()
+        new_tns.is_case_sensitive = True
+        tree_list.taxon_namespace = new_tns
+        tree_list.migrate_taxon_namespace(
+                new_tns,
+                unify_taxa_by_label=False)
+        self.assertIsNot(tree_list.taxon_namespace, tns)
+        self.assertIs(tree_list.taxon_namespace, new_tns)
+        self.assertEqual(len(tree_list.taxon_namespace), len(tns))
+        original_labels = [t.label for t in tns]
+        new_labels = [t.label for t in new_tns]
+        self.assertCountEqual(new_labels, original_labels)
+        for tree in tree_list:
+            self.assertIs(tree.taxon_namespace, tree_list.taxon_namespace)
+            for nd in tree:
+                if nd.taxon is not None:
+                    self.assertIn(nd.taxon, tree.taxon_namespace)
+                    self.assertNotIn(nd.taxon, tns)
+
+    def test_migrate_taxon_namespace_non_unifying(self):
+        original_tns = self.tree_list.taxon_namespace
+        new_tns = dendropy.TaxonNamespace()
+        new_tns.is_case_sensitive = True
+        self.tree_list.migrate_taxon_namespace(
+                new_tns,
+                unify_taxa_by_label=False)
+        self.assertIsNot(self.tree_list.taxon_namespace, original_tns)
+        self.assertIs(self.tree_list.taxon_namespace, new_tns)
+        self.verify_taxon_namespace_reconstruction(
+                unify_taxa_by_label=False,
+                case_sensitive_label_mapping=True,
+                original_tns=original_tns)
+
+    def test_migrate_taxon_namespace_unifying_case_sensitive(self):
+        original_tns = self.tree_list.taxon_namespace
+        new_tns = dendropy.TaxonNamespace()
+        new_tns.is_case_sensitive = True
+        self.tree_list.migrate_taxon_namespace(
+                new_tns,
+                unify_taxa_by_label=True)
+        self.assertIsNot(self.tree_list.taxon_namespace, original_tns)
+        self.assertIs(self.tree_list.taxon_namespace, new_tns)
+        self.verify_taxon_namespace_reconstruction(
+                unify_taxa_by_label=True,
+                case_sensitive_label_mapping=True,
+                original_tns=original_tns)
+
+    def test_migrate_taxon_namespace_unifying_case_insensitive(self):
+        original_tns = self.tree_list.taxon_namespace
+        new_tns = dendropy.TaxonNamespace()
+        new_tns.is_case_sensitive = False
+        self.tree_list.migrate_taxon_namespace(
+                new_tns,
+                unify_taxa_by_label=True)
+        self.assertIsNot(self.tree_list.taxon_namespace, original_tns)
+        self.assertIs(self.tree_list.taxon_namespace, new_tns)
+        self.verify_taxon_namespace_reconstruction(
+                unify_taxa_by_label=True,
+                case_sensitive_label_mapping=False,
+                original_tns=original_tns)
+
+class TestTreeListAppend(
+        curated_test_tree.CuratedTestTree,
+        unittest.TestCase):
+
+    def setUp(self):
+        self.native_tns = dendropy.TaxonNamespace()
+        self.tree_list = dendropy.TreeList(taxon_namespace=self.native_tns)
+        self.foreign_tns = dendropy.TaxonNamespace()
+        self.foreign_tree, anodes, lnodes, inodes = self.get_tree(
+                suppress_internal_node_taxa=False,
+                suppress_leaf_node_taxa=False,
+                taxon_namespace=self.foreign_tns)
+        for nd in self.foreign_tree:
+            nd.original_taxon = nd.taxon
+        self.check_tns = dendropy.TaxonNamespace()
+        self.check_tree, anodes, lnodes, inodes = self.get_tree(
+                suppress_internal_node_taxa=False,
+                suppress_leaf_node_taxa=False,
+                taxon_namespace=self.check_tns)
+
+    def test_append_default(self):
+        self.assertIsNot(self.tree_list.taxon_namespace, self.foreign_tree.taxon_namespace)
+        self.tree_list.append(self.foreign_tree)
+        self.assertEqual(len(self.tree_list), 1)
+        self.assertIn(self.foreign_tree, self.tree_list)
+        self.assertIs(self.foreign_tree, self.tree_list[0])
+        self.assertIs(self.tree_list.taxon_namespace, self.native_tns)
+        self.assertIs(self.foreign_tree.taxon_namespace, self.tree_list.taxon_namespace)
+        self.assertEqual(len(self.tree_list.taxon_namespace), len(self.foreign_tns))
+        for nd in self.foreign_tree:
+            if nd.taxon:
+                self.assertIn(nd.taxon, self.tree_list.taxon_namespace)
+                self.assertIsNot(nd.taxon, nd.original_taxon)
+                self.assertIn(nd.original_taxon, self.foreign_tns)
+                self.assertNotIn(nd.original_taxon, self.tree_list.taxon_namespace)
+                self.assertEqual(nd.taxon.label, nd.original_taxon.label)
+
+    def test_append_migrate_matching_labels(self):
+        kwargs_groups = [
+                {"taxon_import_strategy": "migrate", "unify_taxa_by_label": True},
+                {"taxon_import_strategy": "migrate", "unify_taxa_by_label": False},
+                {"taxon_import_strategy": "add", },
+        ]
+        for kwargs in kwargs_groups:
+            self.setUp()
+            self.assertEqual(len(self.tree_list.taxon_namespace), 0)
+            native_tree, anodes, lnodes, inodes = self.get_tree(
+                    suppress_internal_node_taxa=False,
+                    suppress_leaf_node_taxa=False,
+                    taxon_namespace=self.native_tns)
+            self.assertEqual(len(self.tree_list.taxon_namespace), len(self.postorder_sequence))
+            self.assertEqual(len(self.tree_list.taxon_namespace), len(self.foreign_tns))
+            original_tns_len = len(self.tree_list.taxon_namespace)
+            self.tree_list.append(self.foreign_tree, **kwargs)
+            self.assertEqual(len(self.tree_list), 1)
+            self.assertIn(self.foreign_tree, self.tree_list)
+            self.assertIs(self.foreign_tree, self.tree_list[0])
+            self.assertIs(self.foreign_tree.taxon_namespace, self.tree_list.taxon_namespace)
+            if kwargs["taxon_import_strategy"] == "add":
+                self.assertEqual(len(self.tree_list.taxon_namespace),
+                        original_tns_len + len(self.foreign_tns))
+                for nd in self.foreign_tree:
+                    self.assertIn(nd.taxon, self.foreign_tns)
+                    self.assertIn(nd.taxon, self.tree_list.taxon_namespace)
+            else:
+                if "unify_taxa_by_label" not in kwargs or not kwargs["unify_taxa_by_label"]:
+                    self.assertEqual(len(self.tree_list.taxon_namespace),
+                            original_tns_len + len(self.foreign_tns))
+                else:
+                    self.assertEqual(len(self.tree_list.taxon_namespace), original_tns_len)
+                for nd in self.foreign_tree:
+                    self.assertNotIn(nd.taxon, self.foreign_tns)
+                    self.assertIn(nd.taxon, self.tree_list.taxon_namespace)
+
+    def test_append_add(self):
+        self.assertIsNot(self.tree_list.taxon_namespace, self.foreign_tree.taxon_namespace)
+        self.tree_list.append(self.foreign_tree,
+                taxon_import_strategy="add")
+        self.assertEqual(len(self.tree_list), 1)
+        self.assertIn(self.foreign_tree, self.tree_list)
+        self.assertIs(self.foreign_tree, self.tree_list[0])
+        self.assertIs(self.tree_list.taxon_namespace, self.native_tns)
+        self.assertIs(self.foreign_tree.taxon_namespace, self.tree_list.taxon_namespace)
+        self.assertEqual(len(self.tree_list.taxon_namespace), len(self.foreign_tns))
+        for nd in self.foreign_tree:
+            if nd.taxon:
+                self.assertIn(nd.taxon, self.tree_list.taxon_namespace)
+                self.assertIs(nd.taxon, nd.original_taxon)
+                self.assertIn(nd.original_taxon, self.foreign_tns)
+                self.assertIn(nd.original_taxon, self.tree_list.taxon_namespace)
+
+class TestTreeListTaxa(
+        curated_test_tree.CuratedTestTree,
+        dendropytest.ExtendedTestCase):
+
+    def setUp(self):
+        self.tree_list = dendropy.TreeList()
+        self.expected_taxa = None
+        for i in range(10):
+            tree1, anodes1, lnodes1, inodes1 = self.get_tree(
+                    taxon_namespace=self.tree_list.taxon_namespace,
+                    suppress_internal_node_taxa=False,
+                    suppress_leaf_node_taxa=False)
+            self.tree_list.append(tree1)
+            if self.expected_taxa is None:
+                self.expected_taxa = set([nd.taxon for nd in anodes1 if nd.taxon is not None])
+
+    def test_basic_taxa(self):
+        self.assertEqual(self.tree_list.poll_taxa(), self.expected_taxa)
+
+class TestTreeListPurgeTaxonNamespace(
+        curated_test_tree.CuratedTestTree,
+        dendropytest.ExtendedTestCase):
+
+    def setUp(self):
+        self.tree_list = dendropy.TreeList()
+        self.expected_taxa = None
+        for i in range(10):
+            tree1, anodes1, lnodes1, inodes1 = self.get_tree(
+                    taxon_namespace=self.tree_list.taxon_namespace,
+                    suppress_internal_node_taxa=False,
+                    suppress_leaf_node_taxa=False)
+            self.tree_list.append(tree1)
+            if self.expected_taxa is None:
+                self.expected_taxa = set([nd.taxon for nd in anodes1 if nd.taxon is not None])
+
+    def test_noop_purge(self):
+        self.assertEqual(set(self.tree_list.taxon_namespace), self.expected_taxa)
+        self.tree_list.purge_taxon_namespace()
+        self.assertEqual(set(self.tree_list.taxon_namespace), self.expected_taxa)
+
+    def test_basic_purge(self):
+        self.assertEqual(set(self.tree_list.taxon_namespace), self.expected_taxa)
+        added_taxa = set(self.expected_taxa)
+        for label in ("z1", "z2", "z3", "z4"):
+            t = self.tree_list.taxon_namespace.new_taxon(label=label)
+            added_taxa.add(t)
+        self.assertEqual(set(self.tree_list.taxon_namespace), added_taxa)
+        self.tree_list.purge_taxon_namespace()
+        self.assertEqual(set(self.tree_list.taxon_namespace), self.expected_taxa)
+
+class TreeListCreation(unittest.TestCase):
+
+    def test_create_with_taxon_namespace(self):
+        tns = dendropy.TaxonNamespace()
+        tt = dendropy.TreeList(label="a", taxon_namespace=tns)
+        self.assertEqual(tt.label, "a")
+        self.assertIs(tt.taxon_namespace, tns)
+
+class TestSpecialTreeListConstruction(
+        unittest.TestCase):
+
+    def test_construction_from_another_tree_different_label(self):
+        tlist1 = dendropy.TreeList()
+        tlist1.label = "tlist1"
+        self.assertEqual(tlist1.label, "tlist1")
+        tlist2 = dendropy.TreeList(tlist1, label="tlist2")
+        self.assertEqual(tlist2.label, "tlist2")
+        self.assertNotEqual(tlist1.label, "tlist2")
+        self.assertNotEqual(tlist1.label, tlist2.label)
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/dendropy/test/test_datamodel_tree_node_fundamentals.py b/dendropy/test/test_datamodel_tree_node_fundamentals.py
new file mode 100644
index 0000000..17945f6
--- /dev/null
+++ b/dendropy/test/test_datamodel_tree_node_fundamentals.py
@@ -0,0 +1,322 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Tests basic Node child management.
+"""
+
+import unittest
+import dendropy
+import copy
+from dendropy.test.support import compare_and_validate
+
+class TestNodeConstruction(unittest.TestCase):
+
+    def test_basic_construction(self):
+        taxon = dendropy.Taxon("z")
+        nd = dendropy.Node(taxon=taxon, label="x", edge_length=1)
+        self.assertIs(nd.taxon, taxon)
+        self.assertEqual(nd.label, "x")
+        edge = nd.edge
+        self.assertEqual(edge.length, 1)
+        self.assertIs(edge.head_node, nd)
+        self.assertIs(edge.tail_node, None)
+
+class NodeIdentity(unittest.TestCase):
+
+    def setUp(self):
+        taxon = dendropy.Taxon("a")
+        self.n1 = dendropy.Node(label="a", taxon=taxon)
+        self.n2 = dendropy.Node(label="a", taxon=taxon)
+
+    def test_equal(self):
+        # two distinct |Node| objects are never equal, even if all
+        # member values are the same.
+        self.assertNotEqual(self.n1, self.n2)
+        self.assertIs(self.n1.taxon, self.n2.taxon)
+
+    def test_hash_dict_membership(self):
+        k = {}
+        k[self.n1] = 1
+        k[self.n2] = 2
+        self.assertEqual(len(k), 2)
+        self.assertEqual(k[self.n1], 1)
+        self.assertEqual(k[self.n2], 2)
+        self.assertIn(self.n1, k)
+        self.assertIn(self.n2, k)
+        del k[self.n1]
+        self.assertNotIn(self.n1, k)
+        self.assertIn(self.n2, k)
+        self.assertEqual(len(k), 1)
+        k1 = {self.n1: 1}
+        k2 = {self.n2: 1}
+        self.assertIn(self.n1, k1)
+        self.assertIn(self.n2, k2)
+        self.assertNotIn(self.n2, k1)
+        self.assertNotIn(self.n1, k2)
+
+    def test_hash_set_membership(self):
+        k = set()
+        k.add(self.n1)
+        k.add(self.n2)
+        self.assertEqual(len(k), 2)
+        self.assertIn(self.n1, k)
+        self.assertIn(self.n2, k)
+        k.discard(self.n1)
+        self.assertNotIn(self.n1, k)
+        self.assertIn(self.n2, k)
+        self.assertEqual(len(k), 1)
+        k1 = {self.n1: 1}
+        k2 = {self.n2: 1}
+        self.assertIn(self.n1, k1)
+        self.assertIn(self.n2, k2)
+        self.assertNotIn(self.n2, k1)
+        self.assertNotIn(self.n1, k2)
+
+class NodeCloning(compare_and_validate.Comparator, unittest.TestCase):
+
+    def setUp(self):
+        self.taxa = [dendropy.Taxon(label=label) for label in ["a", "b", "c", "d"]]
+        self.n0 = dendropy.Node(label="0", taxon=self.taxa[0])
+        self.c1 = dendropy.Node(label="1", taxon=None)
+        self.c2 = dendropy.Node(label=None, taxon=self.taxa[1])
+        self.c3 = dendropy.Node(label=None, taxon=None)
+        self.c3 = dendropy.Node(label=None, taxon=self.taxa[2])
+        self.p1 = dendropy.Node(label="-1", taxon=self.taxa[3])
+        self.n0.parent_node = self.p1
+        self.n0.set_child_nodes([self.c1, self.c2])
+        self.c2.set_child_nodes([self.c3])
+        self.nodes = [self.n0, self.c1, self.c2, self.c3, self.p1]
+        for idx, nd in enumerate(self.nodes):
+            if idx % 2 == 0:
+                nd.edge.label = "E{}".format(idx)
+                nd.edge.length = idx
+            an1 = nd.annotations.add_new("a{}".format(idx),
+                    "{}{}{}".format(nd.label, nd.taxon, idx))
+            an2 = nd.annotations.add_bound_attribute("label")
+            an3 = an1.annotations.add_bound_attribute("name")
+            ae1 = nd.edge.annotations.add_new("a{}".format(idx),
+                    "{}{}".format(nd.edge.label, idx))
+            ae2 = nd.edge.annotations.add_bound_attribute("label")
+            ae3 = ae1.annotations.add_bound_attribute("name")
+
+    def test_unsupported_copy(self):
+        with self.assertRaises(TypeError):
+            self.n0.clone(0)
+        with self.assertRaises(TypeError):
+            copy.copy(self.n0)
+        with self.assertRaises(TypeError):
+            self.n0.clone(1)
+        with self.assertRaises(TypeError):
+            self.n0.taxon_namespace_scoped_copy()
+
+    def test_deepcopy(self):
+        for clone in (
+                self.n0.clone(2),
+                copy.deepcopy(self.n0),
+                ):
+            self.compare_distinct_nodes(
+                    clone, self.n0,
+                    taxon_namespace_scoped=False,
+                    compare_tree_annotations=True)
+
+class TestNodeSetChildNodes(unittest.TestCase):
+
+    def test_set_child_nodes(self):
+        parent = dendropy.Node(label="parent")
+        assigned_ch = [dendropy.Node(label=c) for c in ["c1", "c2", "c3"]]
+        for nd in assigned_ch:
+            x = [dendropy.Node(label=c) for c in ["s1", "s2"]]
+            nd.set_child_nodes(x)
+            nd._expected_children = x
+        parent.set_child_nodes(assigned_ch)
+        for ch in parent._child_nodes:
+            self.assertIn(ch, assigned_ch)
+            self.assertIs(ch._parent_node, parent)
+            self.assertIs(ch.edge.tail_node, parent)
+            self.assertIs(ch.edge.head_node, ch)
+            self.assertEqual(len(ch._child_nodes), len(ch._expected_children))
+            for sch in ch._child_nodes:
+                self.assertIn(sch, ch._expected_children)
+                self.assertIs(sch._parent_node, ch)
+                self.assertIs(sch.edge.tail_node, ch)
+                self.assertIs(sch.edge.head_node, sch)
+        for ch in assigned_ch:
+            self.assertTrue(ch in parent._child_nodes)
+
+    def test_add_child(self):
+        parent = dendropy.Node(label="parent")
+        assigned_ch = [dendropy.Node(label=c) for c in ["c1", "c2", "c3"]]
+        for nd in assigned_ch:
+            x = [dendropy.Node(label=c) for c in ["s1", "s2"]]
+            for y in x:
+                nd.add_child(y)
+            nd._expected_children = x
+        for ch in assigned_ch:
+            k = parent.add_child(ch)
+            self.assertIs(k, ch)
+        for ch in parent._child_nodes:
+            self.assertIn(ch, assigned_ch)
+            self.assertIs(ch._parent_node, parent)
+            self.assertIs(ch.edge.tail_node, parent)
+            self.assertIs(ch.edge.head_node, ch)
+            self.assertEqual(len(ch._child_nodes), len(ch._expected_children))
+            for sch in ch._child_nodes:
+                self.assertIn(sch, ch._expected_children)
+                self.assertIs(sch._parent_node, ch)
+                self.assertIs(sch.edge.tail_node, ch)
+                self.assertIs(sch.edge.head_node, sch)
+        for ch in assigned_ch:
+            self.assertTrue(ch in parent._child_nodes)
+
+    def test_insert_child_at_pos(self):
+        new_child_labels = ["c1", "c2", "c3"]
+        insert_ch_label = "x1"
+        for pos in range(len(new_child_labels)+1):
+            parent = dendropy.Node(label="parent")
+            assigned_ch = [dendropy.Node(label=c) for c in new_child_labels]
+            parent.set_child_nodes(assigned_ch)
+            insert_ch = dendropy.Node(label=insert_ch_label)
+            parent.insert_child(pos, insert_ch)
+            x = 0
+            for idx, ch in enumerate(parent._child_nodes):
+                if idx == pos:
+                    self.assertEqual(ch.label, insert_ch_label)
+                else:
+                    self.assertEqual(ch.label, new_child_labels[x])
+                    x += 1
+
+    def test_redundant_set(self):
+        parent = dendropy.Node(label="parent")
+        assigned_ch = [dendropy.Node(label=c) for c in ["c1", "c2", "c3"]]
+        parent.set_child_nodes(assigned_ch)
+        parent.set_child_nodes(assigned_ch)
+        ch2 = assigned_ch + assigned_ch
+        parent.set_child_nodes(ch2)
+        self.assertEqual(parent._child_nodes, assigned_ch)
+        for nd in parent.child_node_iter():
+            self.assertIs(nd.parent_node, parent)
+
+    def test_redundant_insert_child_at_pos(self):
+        new_child_labels = ["c1", "c2", "c3"]
+        for child_to_insert_idx in range(len(new_child_labels)):
+            for insertion_idx in range(len(new_child_labels)):
+                parent = dendropy.Node(label="parent")
+                assigned_ch = [dendropy.Node(label=c) for c in new_child_labels]
+                parent.set_child_nodes(assigned_ch)
+                self.assertEqual(parent._child_nodes, assigned_ch)
+                insert_ch = assigned_ch[child_to_insert_idx]
+                parent.insert_child(insertion_idx, insert_ch)
+                self.assertEqual(len(parent._child_nodes), len(assigned_ch))
+                self.assertEqual(len(set(parent._child_nodes)), len(parent._child_nodes))
+                x = 0
+                for idx, ch in enumerate(parent._child_nodes):
+                    if idx == insertion_idx:
+                        self.assertIs(ch, insert_ch)
+                    self.assertIn(ch, assigned_ch)
+                for ch in assigned_ch:
+                    self.assertIn(ch, parent._child_nodes)
+                    self.assertEqual(parent._child_nodes.count(ch), 1)
+
+    def test_redundant_add(self):
+        parent = dendropy.Node(label="parent")
+        assigned_ch = [dendropy.Node(label=c) for c in ["c1", "c2", "c3"]]
+        parent.set_child_nodes(assigned_ch)
+        for ch in assigned_ch:
+            parent.add_child(ch)
+            self.assertEqual(parent._child_nodes, assigned_ch)
+            for nd in parent.child_node_iter():
+                self.assertIs(nd.parent_node, parent)
+
+    def test_parent_node_setting(self):
+        parent = dendropy.Node(label="parent")
+        assigned_ch = [dendropy.Node(label=c) for c in ["c1", "c2", "c3"]]
+        for ch in assigned_ch:
+            ch.parent_node = parent
+        for ch in assigned_ch:
+            self.assertEqual(parent._child_nodes, assigned_ch)
+            for nd in parent.child_node_iter():
+                self.assertIs(nd.parent_node, parent)
+                self.assertIs(nd.edge.tail_node, parent)
+                self.assertIs(nd.edge.head_node, nd)
+
+    def test_new_child(self):
+        parent = dendropy.Node(label="parent")
+        new_child_labels = ["c1", "c2", "c3"]
+        sub_child_labels = ["s1", "s2"]
+        for label in new_child_labels:
+            nd = parent.new_child(label=label)
+            for y in sub_child_labels:
+                x = nd.new_child(label=y)
+                self.assertTrue(isinstance(x, dendropy.Node))
+                self.assertEqual(x.label, y)
+                self.assertIs(x.parent_node, nd)
+                self.assertIs(x.edge.head_node, x)
+                self.assertIs(x.edge.tail_node, nd)
+        self.assertEqual(len(parent._child_nodes), len(new_child_labels))
+        for ch in parent._child_nodes:
+            self.assertIn(ch.label, new_child_labels)
+            self.assertIs(ch._parent_node, parent)
+            self.assertIs(ch.edge.tail_node, parent)
+            self.assertIs(ch.edge.head_node, ch)
+            for sch in ch._child_nodes:
+                self.assertIn(sch.label, sub_child_labels)
+                self.assertIs(sch._parent_node, ch)
+                self.assertIs(sch.edge.tail_node, ch)
+                self.assertIs(sch.edge.head_node, sch)
+
+    def test_new_child_at_pos(self):
+        new_child_labels = ["c1", "c2", "c3"]
+        insert_ch_label = "x1"
+        for pos in range(len(new_child_labels)+1):
+            parent = dendropy.Node(label="parent")
+            assigned_ch = [dendropy.Node(label=c) for c in new_child_labels]
+            parent.set_child_nodes(assigned_ch)
+            parent.insert_new_child(pos, label=insert_ch_label)
+            x = 0
+            for idx, ch in enumerate(parent._child_nodes):
+                if idx == pos:
+                    self.assertEqual(ch.label, insert_ch_label)
+                else:
+                    self.assertEqual(ch.label, new_child_labels[x])
+                    x += 1
+
+    def test_remove_child(self):
+        assigned_child_labels = ["c1", "c2", "c3"]
+        for remove_idx in range(len(assigned_child_labels)):
+            parent = dendropy.Node(label="parent")
+            assigned_ch = [dendropy.Node(label=c) for c in assigned_child_labels]
+            parent.set_child_nodes(assigned_ch)
+            ch_nodes = list(parent._child_nodes)
+            to_remove = ch_nodes[remove_idx]
+            x = parent.remove_child(to_remove)
+            self.assertIs(to_remove, x)
+            ch_nodes.remove(to_remove)
+            ch_nodes2 = list(parent._child_nodes)
+            self.assertEqual(ch_nodes, ch_nodes2)
+
+    def test_edge_head_node_setting(self):
+        node = dendropy.Node(label="x")
+        edge1 = node.edge
+        edge2 = dendropy.Edge()
+        node.edge = edge2
+        self.assertIs(node.edge, edge2)
+        self.assertIs(node.edge.head_node, node)
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/dendropy/test/test_datamodel_tree_structure_and_iteration.py b/dendropy/test/test_datamodel_tree_structure_and_iteration.py
new file mode 100644
index 0000000..7c33a6a
--- /dev/null
+++ b/dendropy/test/test_datamodel_tree_structure_and_iteration.py
@@ -0,0 +1,597 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Tests basic Tree structure and iteration.
+"""
+
+import unittest
+import dendropy
+from dendropy.test.support import curated_test_tree
+from dendropy.test.support import dendropytest
+
+class TestTreeNodeAndEdgeCollections(curated_test_tree.CuratedTestTree, unittest.TestCase):
+
+    def test_get_nodes(self):
+        tree, anodes, lnodes, inodes = self.get_tree()
+        nodes = tree.nodes()
+        self.assertEqual(len(nodes), len(anodes))
+        self.assertEqual(set(nodes), anodes)
+        obs_labels = [nd.label for nd in nodes]
+
+    def test_get_nodes_filtered(self):
+        tree, anodes, lnodes, inodes = self.get_tree()
+        nodes = tree.nodes(filter_fn = lambda x : x.edge.length > 13)
+        exp_nodes = set([nd for nd in anodes if nd.edge.length > 13])
+        for nd in nodes:
+            self.assertTrue(nd.edge.length > 13)
+        self.assertEqual(len(nodes), len(exp_nodes))
+        self.assertEqual(set(nodes), exp_nodes)
+
+    def test_get_leaf_nodes(self):
+        tree, anodes, lnodes, inodes = self.get_tree()
+        nodes = tree.leaf_nodes()
+        self.assertEqual(len(nodes), len(lnodes))
+        self.assertEqual(set(nodes), lnodes)
+
+    def test_get_internal_nodes_with_root(self):
+        tree, anodes, lnodes, inodes = self.get_tree()
+        nodes = tree.internal_nodes()
+        nlnodes = inodes | set([tree.seed_node])
+        self.assertEqual(len(nodes), len(nlnodes))
+        self.assertEqual(set(nodes), nlnodes)
+
+    def test_get_internal_nodes_no_root(self):
+        tree, anodes, lnodes, inodes = self.get_tree()
+        nodes = tree.internal_nodes(True)
+        self.assertEqual(len(nodes), len(inodes))
+        self.assertEqual(set(nodes), inodes)
+
+    def test_get_edges(self):
+        tree, anodes, lnodes, inodes = self.get_tree()
+        edges = tree.edges()
+        eset = set([nd.edge for nd in anodes])
+        self.assertEqual(len(edges), len(eset))
+        self.assertEqual(set(edges), eset)
+
+    def test_get_edges_filtered(self):
+        tree, anodes, lnodes, inodes = self.get_tree()
+        edges = tree.edges(filter_fn=lambda x : x.length > 13)
+        exp_edges = set([nd.edge for nd in anodes if nd.edge.length > 13])
+        for edge in edges:
+            self.assertTrue(edge.length > 13)
+        self.assertEqual(len(edges), len(exp_edges))
+        self.assertEqual(set(edges), exp_edges)
+
+    def test_get_leaf_edges(self):
+        tree, anodes, lnodes, inodes = self.get_tree()
+        edges = tree.leaf_edges()
+        exp_edges = set([nd.edge for nd in lnodes])
+        self.assertEqual(len(edges), len(exp_edges))
+        self.assertEqual(set(edges), exp_edges)
+
+    def test_get_internal_edges_with_root(self):
+        tree, anodes, lnodes, inodes = self.get_tree()
+        edges = tree.internal_edges()
+        nlnodes = inodes | set([tree.seed_node])
+        exp_edges = set([nd.edge for nd in nlnodes])
+        self.assertEqual(len(edges), len(exp_edges))
+        self.assertEqual(set(edges), exp_edges)
+
+    def test_get_internal_edges_no_root(self):
+        tree, anodes, lnodes, inodes = self.get_tree()
+        edges = tree.internal_edges(True)
+        exp_edges = set([nd.edge for nd in inodes])
+        self.assertEqual(len(edges), len(exp_edges))
+        self.assertEqual(set(edges), exp_edges)
+
+    def test_get_child_nodes(self):
+        tree, anodes, lnodes, inodes = self.get_tree()
+        for node in anodes:
+            child_labels = [ch.label for ch in node.child_nodes()]
+            expected_children = self.node_children[node.label]
+            self.assertEqual(len(child_labels), len(expected_children))
+            self.assertEqual(set(child_labels), set(expected_children))
+
+class TestTreeNodeFinders(curated_test_tree.CuratedTestTree, unittest.TestCase):
+
+    def test_find_node(self):
+        tree, anodes, lnodes, inodes = self.get_tree()
+        node = tree.find_node(lambda x: x.label == "c")
+        self.assertEqual(node.label, "c")
+
+    def test_find_node_nonexisting(self):
+        tree, anodes, lnodes, inodes = self.get_tree()
+        node = tree.find_node(lambda x: x.label == "zzz")
+        self.assertIs(node, None)
+
+    def test_find_node_with_label(self):
+        tree, anodes, lnodes, inodes = self.get_tree()
+        node = tree.find_node_with_label("c")
+        self.assertEqual(node.label, "c")
+
+    def test_find_node_with_label_nonexisting(self):
+        tree, anodes, lnodes, inodes = self.get_tree()
+        node = tree.find_node_with_label("zzz")
+        self.assertIs(node, None)
+
+class TestTreeIterators(curated_test_tree.CuratedTestTree, unittest.TestCase):
+
+    ### Default Iterator ###
+
+    def test_default_iteration(self):
+        tree, anodes, lnodes, inodes = self.get_tree()
+        nodes = [nd for nd in tree]
+        visited_labels = [nd.label for nd in nodes]
+        self.assertSequenceEqual(visited_labels, self.preorder_sequence)
+
+    ### Preorder Node Iterator ###
+
+    def test_preorder_node_iter_unfiltered(self):
+        tree, anodes, lnodes, inodes = self.get_tree()
+        nodes = [nd for nd in tree.preorder_node_iter()]
+        visited_labels = [nd.label for nd in nodes]
+        self.assertSequenceEqual(visited_labels, self.preorder_sequence)
+
+    def test_preorder_node_iter_filtered(self):
+        tree, anodes, lnodes, inodes = self.get_tree()
+        f = lambda x: x.edge.length > 13
+        nodes = [nd for nd in tree.preorder_node_iter(filter_fn=f)]
+        visited_labels = [nd.label for nd in nodes]
+        exp_labels = [x for x in self.preorder_sequence if self.node_edge_lengths[x] > 13]
+        self.assertSequenceEqual(visited_labels, exp_labels)
+
+    ### Preorder Internal Node Iterator ###
+
+    def test_preorder_internal_node_iter_unfiltered(self):
+        tree, anodes, lnodes, inodes = self.get_tree()
+        nodes = [nd for nd in tree.preorder_internal_node_iter()]
+        visited_labels = [nd.label for nd in nodes]
+        exp_labels = [x for x in self.preorder_sequence if
+                self.node_children[x]]
+        self.assertSequenceEqual(visited_labels, exp_labels)
+
+    def test_preorder_internal_node_iter_filtered(self):
+        tree, anodes, lnodes, inodes = self.get_tree()
+        f = lambda x: x.edge.length > 13
+        nodes = [nd for nd in tree.preorder_internal_node_iter(filter_fn=f)]
+        visited_labels = [nd.label for nd in nodes]
+        exp_labels = [x for x in self.preorder_sequence if
+                (self.node_children[x] and self.node_edge_lengths[x] > 13)]
+        self.assertSequenceEqual(visited_labels, exp_labels)
+
+    def test_preorder_internal_node_iter_without_root_unfiltered(self):
+        tree, anodes, lnodes, inodes = self.get_tree()
+        nodes = [nd for nd in tree.preorder_internal_node_iter(exclude_seed_node=True)]
+        visited_labels = [nd.label for nd in nodes]
+        exp_labels = [x for x in self.preorder_sequence if
+                self.node_children[x] and x != "a"]
+        self.assertSequenceEqual(visited_labels, exp_labels)
+
+    def test_preorder_internal_node_iter_without_root_filtered(self):
+        tree, anodes, lnodes, inodes = self.get_tree()
+        f = lambda x: x.edge.length > 13
+        nodes = [nd for nd in tree.preorder_internal_node_iter(exclude_seed_node=True, filter_fn=f)]
+        visited_labels = [nd.label for nd in nodes]
+        exp_labels = [x for x in self.preorder_sequence if
+                (self.node_children[x] and self.node_edge_lengths[x] > 13) and x != "a"]
+        self.assertSequenceEqual(visited_labels, exp_labels)
+
+    ### Postorder Node Iterator ###
+
+    def test_postorder_node_iter_unfiltered(self):
+        tree, anodes, lnodes, inodes = self.get_tree()
+        nodes = [nd for nd in tree.postorder_node_iter()]
+        visited_labels = [nd.label for nd in nodes]
+        self.assertSequenceEqual(visited_labels, self.postorder_sequence)
+
+    def test_postorder_node_iter_filtered(self):
+        tree, anodes, lnodes, inodes = self.get_tree()
+        f = lambda x: x.edge.length > 13
+        nodes = [nd for nd in tree.postorder_node_iter(filter_fn=f)]
+        visited_labels = [nd.label for nd in nodes]
+        exp_labels = [x for x in self.postorder_sequence if self.node_edge_lengths[x] > 13]
+        self.assertSequenceEqual(visited_labels, exp_labels)
+
+    ### Postorder Internal Node Iterator ###
+
+    def test_postorder_internal_node_iter_unfiltered(self):
+        tree, anodes, lnodes, inodes = self.get_tree()
+        nodes = [nd for nd in tree.postorder_internal_node_iter()]
+        visited_labels = [nd.label for nd in nodes]
+        exp_labels = [x for x in self.postorder_sequence if
+                self.node_children[x]]
+        self.assertSequenceEqual(visited_labels, exp_labels)
+
+    def test_postorder_internal_node_iter_filtered(self):
+        tree, anodes, lnodes, inodes = self.get_tree()
+        f = lambda x: x.edge.length > 13
+        nodes = [nd for nd in tree.postorder_internal_node_iter(filter_fn=f)]
+        visited_labels = [nd.label for nd in nodes]
+        exp_labels = [x for x in self.postorder_sequence if
+                (self.node_children[x] and self.node_edge_lengths[x] > 13)]
+        self.assertSequenceEqual(visited_labels, exp_labels)
+
+    def test_postorder_internal_node_iter_without_root_unfiltered(self):
+        tree, anodes, lnodes, inodes = self.get_tree()
+        nodes = [nd for nd in tree.postorder_internal_node_iter(exclude_seed_node=True)]
+        visited_labels = [nd.label for nd in nodes]
+        exp_labels = [x for x in self.postorder_sequence if
+                self.node_children[x] and x != "a"]
+        self.assertSequenceEqual(visited_labels, exp_labels)
+
+    def test_postorder_internal_node_iter_without_root_filtered(self):
+        tree, anodes, lnodes, inodes = self.get_tree()
+        f = lambda x: x.edge.length > 13
+        nodes = [nd for nd in tree.postorder_internal_node_iter(exclude_seed_node=True, filter_fn=f)]
+        visited_labels = [nd.label for nd in nodes]
+        exp_labels = [x for x in self.postorder_sequence if
+                (self.node_children[x] and self.node_edge_lengths[x] > 13) and x != "a"]
+        self.assertSequenceEqual(visited_labels, exp_labels)
+
+    ### Level-Order Node Iterator ###
+
+    def test_levelorder_node_iter_unfiltered(self):
+        tree, anodes, lnodes, inodes = self.get_tree()
+        nodes = [nd for nd in tree.levelorder_node_iter()]
+        visited_labels = [nd.label for nd in nodes]
+        self.assertSequenceEqual(visited_labels, self.levelorder_sequence)
+
+    def test_levelorder_node_iter_filtered(self):
+        tree, anodes, lnodes, inodes = self.get_tree()
+        f = lambda x: x.edge.length > 13
+        nodes = [nd for nd in tree.levelorder_node_iter(filter_fn=f)]
+        visited_labels = [nd.label for nd in nodes]
+        exp_labels = [x for x in self.levelorder_sequence if self.node_edge_lengths[x] > 13]
+        self.assertSequenceEqual(visited_labels, exp_labels)
+
+    ### In-Order Node Iterator ###
+
+    def test_inorder_node_iter_unfiltered(self):
+        tree, anodes, lnodes, inodes = self.get_tree()
+        nodes = [nd for nd in tree.inorder_node_iter()]
+        visited_labels = [nd.label for nd in nodes]
+        self.assertSequenceEqual(visited_labels, self.inorder_sequence)
+
+    def test_inorder_node_iter_filtered(self):
+        tree, anodes, lnodes, inodes = self.get_tree()
+        f = lambda x: x.edge.length > 13
+        nodes = [nd for nd in tree.inorder_node_iter(filter_fn=f)]
+        visited_labels = [nd.label for nd in nodes]
+        exp_labels = [x for x in self.inorder_sequence if self.node_edge_lengths[x] > 13]
+        self.assertSequenceEqual(visited_labels, exp_labels)
+
+    ### Leaf Node Iterator ###
+
+    def test_leaf_node_iter_unfiltered(self):
+        tree, anodes, lnodes, inodes = self.get_tree()
+        nodes = [nd for nd in tree.leaf_node_iter()]
+        visited_labels = [nd.label for nd in nodes]
+        self.assertSequenceEqual(visited_labels, self.leaf_sequence)
+
+    def test_leaf_node_iter_filtered(self):
+        tree, anodes, lnodes, inodes = self.get_tree()
+        f = lambda x: x.edge.length > 13
+        nodes = [nd for nd in tree.leaf_node_iter(filter_fn=f)]
+        visited_labels = [nd.label for nd in nodes]
+        exp_labels = [x for x in self.leaf_sequence if self.node_edge_lengths[x] > 13]
+        self.assertSequenceEqual(visited_labels, exp_labels)
+
+    ### Age-Order Node Iterator ###
+
+    def test_node_ages(self):
+        tree, anodes, lnodes, inodes = self.get_tree()
+        tree.calc_node_ages()
+        nodes = [nd for nd in tree.ageorder_node_iter()]
+        for nd in nodes:
+            self.assertEqual(nd.age, self.node_ages[nd.label])
+
+    def test_ageorder_node_iter_unfiltered(self):
+        tree, anodes, lnodes, inodes = self.get_tree()
+        nodes = [nd for nd in tree.ageorder_node_iter()]
+        visited_labels = [nd.label for nd in nodes]
+        self.assertSequenceEqual(visited_labels, self.ageorder_sequence)
+
+    def test_ageorder_node_iter_unfiltered_no_leaves(self):
+        tree, anodes, lnodes, inodes = self.get_tree()
+        nodes = [nd for nd in tree.ageorder_node_iter(include_leaves=False)]
+        visited_labels = [nd.label for nd in nodes]
+        expected = [label for label in self.ageorder_sequence if self.node_children[label]]
+        self.assertSequenceEqual(visited_labels, expected)
+
+    def test_ageorder_node_iter_unfiltered_reversed(self):
+        tree, anodes, lnodes, inodes = self.get_tree()
+        nodes = [nd for nd in tree.ageorder_node_iter(descending=True)]
+        visited_labels = [nd.label for nd in nodes]
+        nda = [ (self.node_ages[x], x) for x in self.preorder_sequence ]
+        nda.sort(key=lambda x: x[0], reverse=True)
+        exp = [x[1] for x in nda]
+        self.assertSequenceEqual(visited_labels, exp)
+
+    def test_leaf_node_iter_filtered(self):
+        tree, anodes, lnodes, inodes = self.get_tree()
+        f = lambda x: x.edge.length > 13
+        nodes = [nd for nd in tree.ageorder_node_iter(filter_fn=f)]
+        visited_labels = [nd.label for nd in nodes]
+        exp_labels = [x for x in self.ageorder_sequence if self.node_edge_lengths[x] > 13]
+        self.assertSequenceEqual(visited_labels, exp_labels)
+
+    ### Preorder Edge Iterator ###
+
+    def test_preorder_edge_iter_unfiltered(self):
+        tree, anodes, lnodes, inodes = self.get_tree()
+        nodes = [edge.head_node for edge in tree.preorder_edge_iter()]
+        visited_labels = [nd.label for nd in nodes]
+        self.assertSequenceEqual(visited_labels, self.preorder_sequence)
+
+    def test_preorder_edge_iter_filtered(self):
+        tree, anodes, lnodes, inodes = self.get_tree()
+        f = lambda x: x.length > 13
+        nodes = [edge.head_node for edge in tree.preorder_edge_iter(filter_fn=f)]
+        visited_labels = [nd.label for nd in nodes]
+        exp_labels = [x for x in self.preorder_sequence if self.node_edge_lengths[x] > 13]
+        self.assertSequenceEqual(visited_labels, exp_labels)
+
+    ### Preorder Internal Edge Iterator ###
+
+    def test_preorder_internal_edge_iter_unfiltered(self):
+        tree, anodes, lnodes, inodes = self.get_tree()
+        nodes = [edge.head_node for edge in tree.preorder_internal_edge_iter()]
+        visited_labels = [nd.label for nd in nodes]
+        exp_labels = [x for x in self.preorder_sequence if
+                self.node_children[x]]
+        self.assertSequenceEqual(visited_labels, exp_labels)
+
+    def test_preorder_internal_edge_iter_filtered(self):
+        tree, anodes, lnodes, inodes = self.get_tree()
+        f = lambda x: x.length > 13
+        nodes = [edge.head_node for edge in tree.preorder_internal_edge_iter(filter_fn=f)]
+        visited_labels = [nd.label for nd in nodes]
+        exp_labels = [x for x in self.preorder_sequence if
+                (self.node_children[x] and self.node_edge_lengths[x] > 13)]
+        self.assertSequenceEqual(visited_labels, exp_labels)
+
+    def test_preorder_internal_edge_iter_without_root_unfiltered(self):
+        tree, anodes, lnodes, inodes = self.get_tree()
+        nodes = [edge.head_node for edge in tree.preorder_internal_edge_iter(exclude_seed_edge=True)]
+        visited_labels = [nd.label for nd in nodes]
+        exp_labels = [x for x in self.preorder_sequence if
+                self.node_children[x] and x != "a"]
+        self.assertSequenceEqual(visited_labels, exp_labels)
+
+    def test_preorder_internal_edge_iter_without_root_filtered(self):
+        tree, anodes, lnodes, inodes = self.get_tree()
+        f = lambda x: x.length > 13
+        nodes = [edge.head_node for edge in tree.preorder_internal_edge_iter(exclude_seed_edge=True, filter_fn=f)]
+        visited_labels = [nd.label for nd in nodes]
+        exp_labels = [x for x in self.preorder_sequence if
+                (self.node_children[x] and self.node_edge_lengths[x] > 13) and x != "a"]
+        self.assertSequenceEqual(visited_labels, exp_labels)
+
+    ### Postorder Edge Iterator ###
+
+    def test_postorder_edge_iter_unfiltered(self):
+        tree, anodes, lnodes, inodes = self.get_tree()
+        nodes = [edge.head_node for edge in tree.postorder_edge_iter()]
+        visited_labels = [nd.label for nd in nodes]
+        self.assertSequenceEqual(visited_labels, self.postorder_sequence)
+
+    def test_postorder_edge_iter_filtered(self):
+        tree, anodes, lnodes, inodes = self.get_tree()
+        f = lambda x: x.length > 13
+        nodes = [edge.head_node for edge in tree.postorder_edge_iter(filter_fn=f)]
+        visited_labels = [nd.label for nd in nodes]
+        exp_labels = [x for x in self.postorder_sequence if self.node_edge_lengths[x] > 13]
+        self.assertSequenceEqual(visited_labels, exp_labels)
+
+    ### Postorder Internal Edge Iterator ###
+
+    def test_postorder_internal_edge_iter_unfiltered(self):
+        tree, anodes, lnodes, inodes = self.get_tree()
+        nodes = [edge.head_node for edge in tree.postorder_internal_edge_iter()]
+        visited_labels = [nd.label for nd in nodes]
+        exp_labels = [x for x in self.postorder_sequence if
+                self.node_children[x]]
+        self.assertSequenceEqual(visited_labels, exp_labels)
+
+    def test_postorder_internal_edge_iter_filtered(self):
+        tree, anodes, lnodes, inodes = self.get_tree()
+        f = lambda x: x.length > 13
+        nodes = [edge.head_node for edge in tree.postorder_internal_edge_iter(filter_fn=f)]
+        visited_labels = [nd.label for nd in nodes]
+        exp_labels = [x for x in self.postorder_sequence if
+                (self.node_children[x] and self.node_edge_lengths[x] > 13)]
+        self.assertSequenceEqual(visited_labels, exp_labels)
+
+    def test_postorder_internal_edge_iter_without_root_unfiltered(self):
+        tree, anodes, lnodes, inodes = self.get_tree()
+        nodes = [edge.head_node for edge in tree.postorder_internal_edge_iter(exclude_seed_edge=True)]
+        visited_labels = [nd.label for nd in nodes]
+        exp_labels = [x for x in self.postorder_sequence if
+                self.node_children[x] and x != "a"]
+        self.assertSequenceEqual(visited_labels, exp_labels)
+
+    def test_postorder_internal_edge_iter_without_root_filtered(self):
+        tree, anodes, lnodes, inodes = self.get_tree()
+        f = lambda x: x.length > 13
+        nodes = [edge.head_node for edge in tree.postorder_internal_edge_iter(exclude_seed_edge=True, filter_fn=f)]
+        visited_labels = [nd.label for nd in nodes]
+        exp_labels = [x for x in self.postorder_sequence if
+                (self.node_children[x] and self.node_edge_lengths[x] > 13) and x != "a"]
+        self.assertSequenceEqual(visited_labels, exp_labels)
+
+    ### Level-Order Edge Iterator ###
+
+    def test_levelorder_edge_iter_unfiltered(self):
+        tree, anodes, lnodes, inodes = self.get_tree()
+        nodes = [edge.head_node for edge in tree.levelorder_edge_iter()]
+        visited_labels = [nd.label for nd in nodes]
+        self.assertSequenceEqual(visited_labels, self.levelorder_sequence)
+
+    def test_levelorder_edge_iter_filtered(self):
+        tree, anodes, lnodes, inodes = self.get_tree()
+        f = lambda x: x.length > 13
+        nodes = [edge.head_node for edge in tree.levelorder_edge_iter(filter_fn=f)]
+        visited_labels = [nd.label for nd in nodes]
+        exp_labels = [x for x in self.levelorder_sequence if self.node_edge_lengths[x] > 13]
+        self.assertSequenceEqual(visited_labels, exp_labels)
+
+    ### In-Order Edge Iterator ###
+
+    def test_inorder_edge_iter_unfiltered(self):
+        tree, anodes, lnodes, inodes = self.get_tree()
+        nodes = [edge.head_node for edge in tree.inorder_edge_iter()]
+        visited_labels = [nd.label for nd in nodes]
+        self.assertSequenceEqual(visited_labels, self.inorder_sequence)
+
+    def test_inorder_edge_iter_filtered(self):
+        tree, anodes, lnodes, inodes = self.get_tree()
+        f = lambda x: x.length > 13
+        nodes = [edge.head_node for edge in tree.inorder_edge_iter(filter_fn=f)]
+        visited_labels = [nd.label for nd in nodes]
+        exp_labels = [x for x in self.inorder_sequence if self.node_edge_lengths[x] > 13]
+        self.assertSequenceEqual(visited_labels, exp_labels)
+
+    ### Leaf Edge Iterator ###
+
+    def test_leaf_edge_iter_unfiltered(self):
+        tree, anodes, lnodes, inodes = self.get_tree()
+        nodes = [edge.head_node for edge in tree.leaf_edge_iter()]
+        visited_labels = [nd.label for nd in nodes]
+        self.assertSequenceEqual(visited_labels, self.leaf_sequence)
+
+    def test_leaf_edge_iter_filtered(self):
+        tree, anodes, lnodes, inodes = self.get_tree()
+        f = lambda x: x.length > 13
+        nodes = [edge.head_node for edge in tree.leaf_edge_iter(filter_fn=f)]
+        visited_labels = [nd.label for nd in nodes]
+        exp_labels = [x for x in self.leaf_sequence if self.node_edge_lengths[x] > 13]
+        self.assertSequenceEqual(visited_labels, exp_labels)
+
+    ### Special Iterators ###
+
+    def test_child_node_iterator_unfiltered(self):
+        tree, anodes, lnodes, inodes = self.get_tree()
+        for nd in anodes:
+            expected_children = self.node_children[nd.label]
+            children = [ch.label for ch in nd.child_node_iter()]
+            self.assertSequenceEqual(children, expected_children)
+
+    def test_child_node_iterator_filtered(self):
+        tree, anodes, lnodes, inodes = self.get_tree()
+        filter_fn = lambda x: x.edge.length > 13
+        for nd in anodes:
+            expected_children = [label for label in self.node_children[nd.label] if self.node_edge_lengths[label] > 13]
+            children = [ch.label for ch in nd.child_node_iter(filter_fn=filter_fn)]
+            self.assertEqual(children, expected_children)
+
+    def test_child_edge_iterator_unfiltered(self):
+        tree, anodes, lnodes, inodes = self.get_tree()
+        for nd in anodes:
+            expected_children = self.node_children[nd.label]
+            children = [edge.head_node.label for edge in nd.child_edge_iter()]
+            self.assertSequenceEqual(children, expected_children)
+
+    def test_child_edge_iterator_filtered(self):
+        tree, anodes, lnodes, inodes = self.get_tree()
+        filter_fn = lambda x: x.length > 13
+        for nd in anodes:
+            expected_children = [label for label in self.node_children[nd.label] if self.node_edge_lengths[label] > 13]
+            children = [edge.head_node.label for edge in nd.child_edge_iter(filter_fn=filter_fn)]
+            self.assertEqual(children, expected_children)
+
+    def test_ancestor_iterator_exclusive_unfiltered(self):
+        tree, anodes, lnodes, inodes = self.get_tree()
+        for nd in anodes:
+            ancestors = [ch.label for ch in nd.ancestor_iter(inclusive=False)]
+            expected_ancestors = self.node_ancestors[nd.label]
+            self.assertSequenceEqual(ancestors, expected_ancestors)
+
+    def test_ancestor_iterator_exclusive_filtered(self):
+        tree, anodes, lnodes, inodes = self.get_tree()
+        filter_fn = lambda x: x.edge.length > 13
+        for nd in anodes:
+            expected_ancestors = self.node_ancestors[nd.label]
+            expected_ancestors = [nda for nda in expected_ancestors if self.node_edge_lengths[nda] > 13]
+            ancestors = [ch.label for ch in nd.ancestor_iter(inclusive=False, filter_fn=filter_fn)]
+            self.assertEqual(ancestors, expected_ancestors)
+
+    def test_ancestor_iterator_inclusive_unfiltered(self):
+        tree, anodes, lnodes, inodes = self.get_tree()
+        for nd in anodes:
+            ancestors = [ch.label for ch in nd.ancestor_iter(inclusive=True)]
+            expected_ancestors = [nd.label] + list(self.node_ancestors[nd.label])
+            self.assertEqual(ancestors, expected_ancestors)
+
+    def test_ancestor_iterator_inclusive_filtered(self):
+        tree, anodes, lnodes, inodes = self.get_tree()
+        filter_fn = lambda x: x.edge.length > 13
+        for nd in anodes:
+            expected_ancestors = [nd.label] + list(self.node_ancestors[nd.label])
+            expected_ancestors = [nda for nda in expected_ancestors if self.node_edge_lengths[nda] > 13]
+            ancestors = [ch.label for ch in nd.ancestor_iter(inclusive=True, filter_fn=filter_fn)]
+            self.assertEqual(ancestors, expected_ancestors)
+
+class TreeRootingState(dendropytest.ExtendedTestCase):
+
+    def test_is_rooted(self):
+        self.assertFalse(self.fail_incomplete_tests())
+
+    def test_is_unrooted(self):
+        self.assertFalse(self.fail_incomplete_tests())
+
+class TestTreeApply(curated_test_tree.CuratedTestTree, unittest.TestCase):
+
+    def test_apply(self):
+        tree, anodes, lnodes, inodes = self.get_tree()
+        observed = []
+        before_fn = lambda x: observed.append(("before", x.label))
+        after_fn = lambda x: observed.append(("after", x.label))
+        leaf_fn = lambda x: observed.append(("leaf", x.label))
+        tree.apply(
+                before_fn=before_fn,
+                after_fn=after_fn,
+                leaf_fn=leaf_fn)
+        expected = [
+            ("before", "a"),
+            ("before", "b"),
+            ("leaf", "i"),
+            ("before", "e"),
+            ("leaf", "j"),
+            ("leaf", "k"),
+            ("after", "e"),
+            ("after", "b"),
+            ("before", "c"),
+            ("before", "g"),
+            ("leaf", "l"),
+            ("leaf", "m"),
+            ("after", "g"),
+            ("before", "f"),
+            ("leaf", "n"),
+            ("before", "h"),
+            ("leaf", "o"),
+            ("leaf", "p"),
+            ("after", "h"),
+            ("after", "f"),
+            ("after", "c"),
+            ("after", "a"),
+                ]
+        self.assertEqual(observed, expected)
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/dendropy/test/test_datamodel_tree_taxon_management.py b/dendropy/test/test_datamodel_tree_taxon_management.py
new file mode 100644
index 0000000..cc051a9
--- /dev/null
+++ b/dendropy/test/test_datamodel_tree_taxon_management.py
@@ -0,0 +1,460 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Tests Tree taxon management
+"""
+
+import os
+import unittest
+import dendropy
+import collections
+import copy
+from dendropy.test.support import curated_test_tree
+from dendropy.test.support import compare_and_validate
+from dendropy.test.support import dendropytest
+
+class TestTreeUpdateTaxonNamespace(
+        curated_test_tree.CuratedTestTree,
+        dendropytest.ExtendedTestCase):
+
+    def setUp(self):
+        self.tree1, self.anodes1, self.lnodes1, self.inodes1 = self.get_tree(
+                suppress_internal_node_taxa=True,
+                suppress_leaf_node_taxa=True)
+        self.expected_labels = set([nd.label for nd in self.anodes1 if nd.label is not None])
+        self.expected_taxa = set()
+        for nd in self.tree1:
+            if nd.label is not None:
+                nd.taxon = dendropy.Taxon(label=nd.label)
+                self.expected_taxa.add(nd.taxon)
+        assert len(self.expected_labels) == len(self.anodes1)
+        assert len(self.expected_taxa) == len(self.expected_labels)
+
+    def test_noop_update_with_no_taxa(self):
+        tree, anodes, lnodes, inodes = self.get_tree(
+                suppress_internal_node_taxa=True,
+                suppress_leaf_node_taxa=True)
+        original_tns = tree.taxon_namespace
+        self.assertEqual(len(original_tns), 0)
+        tree.update_taxon_namespace()
+        self.assertIs(tree.taxon_namespace, original_tns)
+        self.assertEqual(len(original_tns), 0)
+
+    def test_noop_update(self):
+        tree, anodes, lnodes, inodes = self.get_tree(
+                suppress_internal_node_taxa=False,
+                suppress_leaf_node_taxa=False)
+        original_tns = tree.taxon_namespace
+        original_taxa = [t for t in original_tns]
+        original_labels = [t.label for t in original_tns]
+        tree.update_taxon_namespace()
+        self.assertIs(tree.taxon_namespace, original_tns)
+        new_taxa = [t for t in original_tns]
+        new_labels = [t.label for t in original_tns]
+        self.assertEqual(new_taxa, original_taxa)
+        self.assertEqual(new_labels, original_labels)
+
+    def test_update_taxon_namespace(self):
+        tns1 = self.tree1.taxon_namespace
+        self.assertEqual(len(tns1), 0)
+        tns2 = self.tree1.update_taxon_namespace()
+        self.assertIs(self.tree1.taxon_namespace, tns1)
+        self.assertEqual(set(tns2._taxa), self.expected_taxa)
+        self.assertEqual(len(tns2._taxa), len(self.expected_labels))
+
+class TestTreeMigrateAndReconstructTaxonNamespace(
+        curated_test_tree.CuratedTestTree,
+        dendropytest.ExtendedTestCase):
+
+    def setUp(self):
+        self.tree, self.anodes, self.lnodes, self.inodes = self.get_tree(
+                suppress_internal_node_taxa=True,
+                suppress_leaf_node_taxa=True)
+        self.node_label_to_taxon_label_map = {
+            "a" : "a",
+            "b" : "a",
+            "c" : "2",
+            "e" : "2",
+            "f" : "b",
+            "g" : "B",
+            "h" : "B",
+            "i" : "h",
+            "j" : "H",
+            "k" : "h",
+            "l" : None,
+            "m" : None,
+            "n" : "H",
+            "o" : "J",
+            "p" : "j",
+                }
+        self.original_taxa = []
+        for idx, nd in enumerate(self.tree):
+            taxon_label = self.node_label_to_taxon_label_map[nd.label]
+            t = dendropy.Taxon(label=taxon_label)
+            self.tree.taxon_namespace.add_taxon(t)
+            nd.taxon = t
+            nd.original_taxon = t
+            self.original_taxa.append(t)
+        assert len(self.tree.taxon_namespace) == len(self.node_label_to_taxon_label_map)
+        assert len(self.tree.taxon_namespace) == len(self.original_taxa)
+
+    def create_redundant_taxa(self):
+        nodes_to_unify_taxa = []
+        for nd in self.tree:
+            if nd.taxon.label and nd.taxon.label.upper() == "J":
+                nodes_to_unify_taxa.append(nd)
+        assert len(nodes_to_unify_taxa) >= 2
+        utaxon = nodes_to_unify_taxa[0].taxon
+        utaxon.node_label = nodes_to_unify_taxa[0].label
+        for nd in nodes_to_unify_taxa[1:]:
+            self.tree.taxon_namespace.remove_taxon(nd.taxon)
+            self.original_taxa.remove(nd.taxon)
+            nd.original_taxon = utaxon
+            nd.taxon = utaxon
+            del self.node_label_to_taxon_label_map[nd.label]
+            nd.label = utaxon.node_label
+
+    def verify_taxon_namespace_reconstruction(self,
+            unify_taxa_by_label=False,
+            case_sensitive_label_mapping=True,
+            original_tns=None,
+            redundant_taxa=False):
+        seen_taxa = []
+        if unify_taxa_by_label:
+            if not case_sensitive_label_mapping:
+                expected_labels = []
+                for label in self.node_label_to_taxon_label_map.values():
+                    if label is None:
+                        expected_labels.append(label)
+                    else:
+                        label = label.upper()
+                        if label not in expected_labels:
+                            expected_labels.append(label)
+            else:
+                expected_labels = list(set(label for label in self.node_label_to_taxon_label_map.values()))
+        else:
+            expected_labels = [label for label in self.node_label_to_taxon_label_map.values()]
+        for nd in self.tree:
+            self.assertIsNot(nd.taxon, nd.original_taxon)
+            if (not case_sensitive_label_mapping) and nd.taxon.label is not None:
+                self.assertEqual(nd.taxon.label.upper(), nd.original_taxon.label.upper())
+                self.assertEqual(self.node_label_to_taxon_label_map[nd.label].upper(), nd.taxon.label.upper())
+            else:
+                self.assertEqual(nd.taxon.label, nd.original_taxon.label)
+                self.assertEqual(self.node_label_to_taxon_label_map[nd.label], nd.taxon.label)
+            self.assertNotIn(nd.original_taxon, self.tree.taxon_namespace)
+            self.assertIn(nd.original_taxon, self.original_taxa)
+            self.assertIn(nd.taxon, self.tree.taxon_namespace)
+            self.assertNotIn(nd.taxon, self.original_taxa)
+            if original_tns is not None:
+                self.assertNotIn(nd.taxon, original_tns)
+            if nd.taxon not in seen_taxa:
+                seen_taxa.append(nd.taxon)
+            else:
+                self.assertTrue(unify_taxa_by_label or redundant_taxa)
+                if not case_sensitive_label_mapping:
+                    self.assertIn(nd.taxon.label, [t.label for t in seen_taxa])
+                else:
+                    if nd.taxon.label is None:
+                        self.assertIs(nd.original_taxon.label, None)
+                        self.assertEqual([t.label for t in seen_taxa].count(None), 1)
+                    else:
+                        x1 = [t.label.upper() for t in seen_taxa if t.label is not None]
+                        self.assertIn(nd.taxon.label.upper(), x1)
+        self.assertEqual(len(self.tree.taxon_namespace), len(expected_labels))
+        if not unify_taxa_by_label and not redundant_taxa:
+            self.assertEqual(len(self.tree.taxon_namespace), len(self.node_label_to_taxon_label_map))
+        self.assertEqual(len(seen_taxa), len(self.tree.taxon_namespace))
+        if not case_sensitive_label_mapping:
+            seen_labels = [(t.label.upper() if t.label is not None else None) for t in seen_taxa]
+        else:
+            seen_labels = [t.label for t in seen_taxa]
+        c1 = collections.Counter(expected_labels)
+        c2 = collections.Counter(seen_labels)
+        self.assertEqual(c1, c2)
+
+    def test_noop_taxon_namespace_reconstruction(self):
+        tree, anodes, lnodes, inodes = self.get_tree(
+                suppress_internal_node_taxa=False,
+                suppress_leaf_node_taxa=False)
+        original_tns = tree.taxon_namespace
+        original_tns.is_case_sensitive = True
+        original_taxa = [t for t in original_tns]
+        original_labels = [t.label for t in original_tns]
+        tree.reconstruct_taxon_namespace(
+                unify_taxa_by_label=False)
+        self.assertIs(tree.taxon_namespace, original_tns)
+        new_taxa = [t for t in original_tns]
+        new_labels = [t.label for t in original_tns]
+        self.assertEqual(new_taxa, original_taxa)
+        self.assertEqual(new_labels, original_labels)
+
+    def test_reconstruct_taxon_namespace_non_unifying(self):
+        original_tns = self.tree.taxon_namespace
+        new_tns = dendropy.TaxonNamespace()
+        new_tns.is_case_sensitive = True
+        self.tree._taxon_namespace = new_tns
+        self.assertEqual(len(self.tree.taxon_namespace), 0)
+        self.tree.reconstruct_taxon_namespace(unify_taxa_by_label=False)
+        self.assertIsNot(self.tree.taxon_namespace, original_tns)
+        self.assertIs(self.tree.taxon_namespace, new_tns)
+        self.verify_taxon_namespace_reconstruction(
+                unify_taxa_by_label=False,
+                case_sensitive_label_mapping=True)
+
+    def test_reconstruct_taxon_namespace_unifying_case_sensitive(self):
+        original_tns = self.tree.taxon_namespace
+        new_tns = dendropy.TaxonNamespace()
+        new_tns.is_case_sensitive = True
+        self.tree._taxon_namespace = new_tns
+        self.assertEqual(len(self.tree.taxon_namespace), 0)
+        self.tree.reconstruct_taxon_namespace(unify_taxa_by_label=True)
+        self.assertIsNot(self.tree.taxon_namespace, original_tns)
+        self.assertIs(self.tree.taxon_namespace, new_tns)
+        self.verify_taxon_namespace_reconstruction(
+                unify_taxa_by_label=True,
+                case_sensitive_label_mapping=True,
+                original_tns=original_tns)
+
+    def test_reconstruct_taxon_namespace_unifying_case_insensitive(self):
+        original_tns = self.tree.taxon_namespace
+        new_tns = dendropy.TaxonNamespace()
+        new_tns.is_case_sensitive = False
+        self.tree._taxon_namespace = new_tns
+        self.assertEqual(len(self.tree.taxon_namespace), 0)
+        self.tree.reconstruct_taxon_namespace(unify_taxa_by_label=True)
+        self.assertIsNot(self.tree.taxon_namespace, original_tns)
+        self.assertIs(self.tree.taxon_namespace, new_tns)
+        self.verify_taxon_namespace_reconstruction(
+                unify_taxa_by_label=True,
+                case_sensitive_label_mapping=False,
+                original_tns=original_tns)
+
+    def test_reconstruct_taxon_namespace_with_redundant_taxa(self):
+        for (unify, ci) in [
+                (False, True),
+                (True, True),
+                (True, False), ]:
+            self.setUp()
+            self.create_redundant_taxa()
+            original_tns = self.tree.taxon_namespace
+            new_tns = dendropy.TaxonNamespace()
+            new_tns.is_case_sensitive = ci
+            self.tree._taxon_namespace = new_tns
+            self.assertEqual(len(self.tree.taxon_namespace), 0)
+            self.tree.reconstruct_taxon_namespace(unify_taxa_by_label=unify)
+            self.assertIsNot(self.tree.taxon_namespace, original_tns)
+            self.assertIs(self.tree.taxon_namespace, new_tns)
+            self.verify_taxon_namespace_reconstruction(
+                    unify_taxa_by_label=unify,
+                    case_sensitive_label_mapping=ci,
+                    original_tns=original_tns,
+                    redundant_taxa=True)
+
+    def test_reconstruct_taxon_namespace_mapping(self):
+        for (unify, ci) in [
+                (False, False),
+                (True, False),
+                (True, True), ]:
+            self.setUp()
+            original_tns = self.tree.taxon_namespace
+            new_tns = dendropy.TaxonNamespace()
+            new_tns.is_case_sensitive = ci
+            self.tree._taxon_namespace = new_tns
+            self.assertEqual(len(self.tree.taxon_namespace), 0)
+            memo = {}
+            for taxon in self.original_taxa:
+                memo[taxon] = dendropy.Taxon()
+            memo_copy = dict(memo)
+            self.tree.reconstruct_taxon_namespace(
+                    unify_taxa_by_label=unify,
+                    taxon_mapping_memo=memo)
+            self.assertIsNot(self.tree.taxon_namespace, original_tns)
+            self.assertIs(self.tree.taxon_namespace, new_tns)
+            for nd in self.tree:
+                self.assertIs(nd.taxon, memo_copy[nd.original_taxon])
+
+    def test_noop_migrate_taxon_namespace(self):
+        tree, anodes, lnodes, inodes = self.get_tree(
+                suppress_internal_node_taxa=False,
+                suppress_leaf_node_taxa=False)
+        original_tns = tree.taxon_namespace
+        original_tns.is_case_sensitive = True
+        original_taxa = [t for t in original_tns]
+        original_labels = [t.label for t in original_tns]
+        tree.migrate_taxon_namespace(
+                original_tns,
+                unify_taxa_by_label=False)
+        self.assertIs(tree.taxon_namespace, original_tns)
+        new_taxa = [t for t in original_tns]
+        new_labels = [t.label for t in original_tns]
+        self.assertEqual(new_taxa, original_taxa)
+        self.assertEqual(new_labels, original_labels)
+
+    def test_simple_migrate_taxon_namespace(self):
+        tree, anodes, lnodes, inodes = self.get_tree(
+                suppress_internal_node_taxa=True,
+                suppress_leaf_node_taxa=True)
+        original_tns = tree.taxon_namespace
+        original_taxa = [t for t in original_tns]
+        original_labels = [t.label for t in original_tns]
+        new_tns = dendropy.TaxonNamespace()
+        new_tns.is_case_sensitive = True
+        tree.migrate_taxon_namespace(
+                new_tns,
+                unify_taxa_by_label=False)
+        self.assertIsNot(tree.taxon_namespace, original_tns)
+        self.assertIs(tree.taxon_namespace, new_tns)
+        new_taxa = [t for t in new_tns]
+        self.assertEqual(len(new_taxa), len(original_taxa))
+        for t1, t2 in zip(new_taxa, original_taxa):
+            self.assertIsNot(t1, t2)
+            self.assertEqual(t1.label, t2.label)
+
+    def test_migrate_taxon_namespace_non_unifying(self):
+        original_tns = self.tree.taxon_namespace
+        new_tns = dendropy.TaxonNamespace()
+        new_tns.is_case_sensitive = True
+        self.tree.migrate_taxon_namespace(
+                new_tns,
+                unify_taxa_by_label=False)
+        self.assertIsNot(self.tree.taxon_namespace, original_tns)
+        self.assertIs(self.tree.taxon_namespace, new_tns)
+        self.verify_taxon_namespace_reconstruction(
+                unify_taxa_by_label=False,
+                case_sensitive_label_mapping=True,
+                original_tns=original_tns)
+
+    def test_migrate_taxon_namespace_unifying_case_sensitive(self):
+        original_tns = self.tree.taxon_namespace
+        new_tns = dendropy.TaxonNamespace()
+        new_tns.is_case_sensitive = True
+        self.tree.migrate_taxon_namespace(
+                new_tns,
+                unify_taxa_by_label=True)
+        self.assertIsNot(self.tree.taxon_namespace, original_tns)
+        self.assertIs(self.tree.taxon_namespace, new_tns)
+        self.verify_taxon_namespace_reconstruction(
+                unify_taxa_by_label=True,
+                case_sensitive_label_mapping=True,
+                original_tns=original_tns)
+
+    def test_migrate_taxon_namespace_unifying_case_insensitive(self):
+        original_tns = self.tree.taxon_namespace
+        new_tns = dendropy.TaxonNamespace()
+        new_tns.is_case_sensitive = False
+        self.tree.migrate_taxon_namespace(
+                new_tns,
+                unify_taxa_by_label=True)
+        self.assertIsNot(self.tree.taxon_namespace, original_tns)
+        self.assertIs(self.tree.taxon_namespace, new_tns)
+        self.verify_taxon_namespace_reconstruction(
+                unify_taxa_by_label=True,
+                case_sensitive_label_mapping=False,
+                original_tns=original_tns)
+
+    def test_migrate_taxon_namespace_mapping(self):
+        for (unify, ci) in [
+                (False, True),
+                (True, True),
+                (True, False), ]:
+            self.setUp()
+            original_tns = self.tree.taxon_namespace
+            new_tns = dendropy.TaxonNamespace()
+            new_tns.is_case_sensitive = ci
+            memo = {}
+            for taxon in self.original_taxa:
+                memo[taxon] = dendropy.Taxon()
+            memo_copy = dict(memo)
+            self.tree.migrate_taxon_namespace(
+                    new_tns,
+                    unify_taxa_by_label=unify,
+                    taxon_mapping_memo=memo)
+            self.assertIsNot(self.tree.taxon_namespace, original_tns)
+            self.assertIs(self.tree.taxon_namespace, new_tns)
+            for nd in self.tree:
+                self.assertIs(nd.taxon, memo_copy[nd.original_taxon])
+
+    def test_migrate_taxon_namespace_with_redundant_taxa(self):
+        for (unify, ci) in [
+                (False, True),
+                (True, True),
+                (True, False), ]:
+            self.setUp()
+            self.create_redundant_taxa()
+            original_tns = self.tree.taxon_namespace
+            new_tns = dendropy.TaxonNamespace()
+            new_tns.is_case_sensitive = ci
+            self.tree.migrate_taxon_namespace(
+                    new_tns,
+                    unify_taxa_by_label=unify)
+            self.assertIsNot(self.tree.taxon_namespace, original_tns)
+            self.assertIs(self.tree.taxon_namespace, new_tns)
+            self.verify_taxon_namespace_reconstruction(
+                    unify_taxa_by_label=unify,
+                    case_sensitive_label_mapping=ci,
+                    original_tns=original_tns,
+                    redundant_taxa=True)
+
+    def test_unassign_taxa(self):
+        self.assertFalse(self.fail_incomplete_tests())
+
+    def test_randomly_assign_taxa(self):
+        self.assertFalse(self.fail_incomplete_tests())
+
+class TestTreeTaxa(
+        curated_test_tree.CuratedTestTree,
+        dendropytest.ExtendedTestCase):
+
+    def setUp(self):
+        self.tree1, self.anodes1, self.lnodes1, self.inodes1 = self.get_tree(
+                suppress_internal_node_taxa=False,
+                suppress_leaf_node_taxa=False)
+        self.expected_taxa = set([nd.taxon for nd in self.anodes1 if nd.taxon is not None])
+
+    def test_basic_taxa(self):
+        self.assertEqual(self.tree1.poll_taxa(), self.expected_taxa)
+
+class TestTreePurgeTaxonNamespace(
+        curated_test_tree.CuratedTestTree,
+        dendropytest.ExtendedTestCase):
+
+    def setUp(self):
+        self.tree1, self.anodes1, self.lnodes1, self.inodes1 = self.get_tree(
+                suppress_internal_node_taxa=False,
+                suppress_leaf_node_taxa=False)
+        self.expected_taxa = set([nd.taxon for nd in self.anodes1 if nd.taxon is not None])
+
+    def test_noop_purge(self):
+        self.assertEqual(set(self.tree1.taxon_namespace), self.expected_taxa)
+        self.tree1.purge_taxon_namespace()
+        self.assertEqual(set(self.tree1.taxon_namespace), self.expected_taxa)
+
+    def test_basic_purge(self):
+        self.assertEqual(set(self.tree1.taxon_namespace), self.expected_taxa)
+        added_taxa = set(self.expected_taxa)
+        for label in ("z1", "z2", "z3", "z4"):
+            t = self.tree1.taxon_namespace.new_taxon(label=label)
+            added_taxa.add(t)
+        self.assertEqual(set(self.tree1.taxon_namespace), added_taxa)
+        self.tree1.purge_taxon_namespace()
+        self.assertEqual(set(self.tree1.taxon_namespace), self.expected_taxa)
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/dendropy/test/test_datamodel_treearray.py b/dendropy/test/test_datamodel_treearray.py
new file mode 100644
index 0000000..23f27ea
--- /dev/null
+++ b/dendropy/test/test_datamodel_treearray.py
@@ -0,0 +1,59 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+import unittest
+from dendropy.test.support import pathmap
+import dendropy
+
+class TreeArrayBasicTreeAccession(unittest.TestCase):
+
+    def get_trees(self, taxon_namespace=None):
+        trees = dendropy.TreeList.get_from_path(pathmap.tree_source_path(
+                "pythonidae.reference-trees.nexus"),
+                "nexus",
+                taxon_namespace=taxon_namespace)
+        return trees
+
+    def verify_tree_array(self, tree_array, source_trees, ignore_edges=False):
+        self.assertEqual(len(tree_array), len(source_trees))
+        for idx, source_tree in enumerate(source_trees):
+            source_splits = [b.split_bitmask for b in source_tree.encode_bipartitions()]
+            tss_splits, tss_edges = tree_array.get_split_bitmask_and_edge_tuple(idx)
+            self.assertEqual(len(tss_splits), len(source_splits))
+            self.assertEqual(set(tss_splits), set(source_splits))
+            # since encoding is done in postorder, we can rely on correspondence of index ...
+            for idx, nd in enumerate(source_tree.postorder_node_iter()):
+                source_split = nd.edge.bipartition.split_bitmask
+                tss_split = tss_splits[idx]
+                tss_edge_length = tss_edges[idx]
+                self.assertEqual(source_split, tss_split)
+                if nd.edge.length is None:
+                    self.assertEqual(tss_edge_length, 0)
+                else:
+                    self.assertAlmostEqual(tss_edge_length, nd.edge.length)
+
+    def test_add_tree(self):
+        trees = self.get_trees()
+        tree_array = dendropy.TreeArray(taxon_namespace=trees.taxon_namespace)
+        for tree in trees:
+            tree_array.add_tree(tree)
+        self.verify_tree_array(tree_array, trees)
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/dendropy/test/test_fitch.py b/dendropy/test/test_fitch.py
new file mode 100644
index 0000000..6cdf0ea
--- /dev/null
+++ b/dendropy/test/test_fitch.py
@@ -0,0 +1,93 @@
+
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Tests of tree metrics.
+"""
+
+import random
+import unittest
+import math
+try:
+    from StringIO import StringIO # Python 2 legacy support: StringIO in this module is the one needed (not io)
+except ImportError:
+    from io import StringIO # Python 3
+import sys
+if not (sys.version_info.major >= 3 and sys.version_info.minor >= 4):
+    from dendropy.utility.filesys import pre_py34_open as open
+
+import dendropy
+from dendropy.calculate.treescore import fitch_down_pass
+from dendropy.test.support import pathmap
+
+class FitchTest(unittest.TestCase):
+
+    def test_pscores_with_gaps_as_new_state(self):
+        # #NEXUS
+        # begin paup;
+        #     set warnroot = no;
+        #     exe apternodus.chars.nexus;
+        #     gett file = apternodus.tre;
+        #     set criterion = parsimony;
+        #     pset gap = newstate;
+        #     pscore;
+        # end;
+        expected_scores = [396, 396, 396, 396, 396, 396, 396, 396, 396, 396, 396, 396, 396, 396, 396, 396, 713, 715, 723, 733, 672, 719, 734, 709, 695, 686]
+        self.verify_pscores("apternodus.chars.nexus", "apternodus.tre", False, expected_scores)
+
+    def test_pscores_with_gaps_as_missing(self):
+        # #NEXUS
+        # begin paup;
+        #     set warnroot = no;
+        #     exe apternodus.chars.nexus;
+        #     gett file = apternodus.tre;
+        #     set criterion = parsimony;
+        #     pset gap = missing;
+        #     pscore;
+        # end;
+        expected_scores = [ 370, 370, 370, 370, 370, 370, 370, 370, 370, 370, 370, 370, 370, 370, 370, 370, 671, 670, 678, 687, 633, 675, 689, 668, 652, 644]
+        self.verify_pscores("apternodus.chars.nexus", "apternodus.tre", True, expected_scores)
+
+
+    def verify_pscores(self, char_fname, trees_fname, gaps_as_missing, expected_scores):
+        dataset = dendropy.DataSet.get_from_path(
+                pathmap.char_source_path(char_fname),
+                "nexus")
+        dataset.read_from_path(
+                pathmap.tree_source_path(trees_fname),
+                schema='NEXUS',
+                taxon_namespace=dataset.taxon_namespaces[0])
+        char_mat = dataset.char_matrices[0]
+        # sa = char_mat.default_state_alphabet
+        # for x in sa:
+        #     print("{}: {}".format(x, x.is_gap_state))
+        # for x in sa:
+        #     print("{}\t{}\t{}\t\t\t\t{}".format(x, x._index, x.fundamental_indexes, x.fundamental_indexes_with_gaps_as_missing))
+        taxon_state_sets_map = char_mat.taxon_state_sets_map(gaps_as_missing=gaps_as_missing)
+        tree_list = dataset.tree_lists[0]
+        self.assertEqual(len(expected_scores), len(tree_list))
+        for n, tree in enumerate(tree_list):
+            node_list = tree.postorder_node_iter()
+            pscore = fitch_down_pass(node_list, taxon_state_sets_map=taxon_state_sets_map)
+            # print("{} vs. {}".format(expected_scores[n], pscore))
+            self.assertEqual(expected_scores[n], pscore)
+
+if __name__ == "__main__":
+    unittest.main()
+
diff --git a/dendropy/test/test_paup.py b/dendropy/test/test_paup.py
new file mode 100644
index 0000000..c226bdd
--- /dev/null
+++ b/dendropy/test/test_paup.py
@@ -0,0 +1,259 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Test the PAUP* wrapper
+"""
+
+import os
+import sys
+import csv
+import collections
+import unittest
+
+from dendropy.test.support import pathmap
+from dendropy.test.support import paupsplitsreference
+from dendropy.test.support.dendropytest import ExtendedTestCase
+from dendropy.utility import messaging
+if not (sys.version_info.major >= 3 and sys.version_info.minor >= 4):
+    from dendropy.utility.filesys import pre_py34_open as open
+_LOG = messaging.get_logger(__name__)
+
+from dendropy.utility import bitprocessing
+from dendropy.interop import paup
+from dendropy import Bipartition
+
+if not paup.DENDROPY_PAUP_INTEROPERABILITY:
+    _LOG.warn("PAUP interoperability not available: skipping PAUP tests")
+else:
+
+    class PaupWrapperRepToSplitMaskTest(unittest.TestCase):
+
+        def setUp(self):
+            self.ps = paup.PaupService()
+
+        def testUnnormalized(self):
+            for i in range(0xFF):
+                s = bitprocessing.int_as_bitstring(i, 8, ".", "*")[::-1]
+                r = paup.PaupService.bipartition_groups_to_split_bitmask(s, normalized=False)
+                self.assertEqual(r, i, "%s  =>  %s  =>  %s" \
+                    % (bitprocessing.int_as_bitstring(i, 8), s, bitprocessing.int_as_bitstring(r, 8)))
+
+        def testNormalized0(self):
+            for i in range(0xFF):
+                s = bitprocessing.int_as_bitstring(i, 8, "*", ".")[::-1]
+                r = paup.PaupService.bipartition_groups_to_split_bitmask(s, normalized=True)
+                normalized = Bipartition.normalize_bitmask(i, 0xFF, 1)
+                self.assertEqual(r, normalized, "%s  =>  %s  =>  %s" \
+                    % (bitprocessing.int_as_bitstring(i, 8), s, bitprocessing.int_as_bitstring(normalized, 8)))
+
+        def testNormalized1(self):
+            for i in range(0xFF):
+                s = bitprocessing.int_as_bitstring(i, 8, ".", "*")[::-1]
+                r = paup.PaupService.bipartition_groups_to_split_bitmask(s, normalized=True)
+                normalized = Bipartition.normalize_bitmask(i, 0xFF, 1)
+                self.assertEqual(r, normalized, "%s  =>  %s  =>  %s" \
+                    % (bitprocessing.int_as_bitstring(i, 8), s, bitprocessing.int_as_bitstring(normalized, 8)))
+
+    class PaupWrapperSplitsParse(ExtendedTestCase):
+
+        def check_splits_counting(self,
+                tree_filename,
+                taxa_definition_filepath,
+                splits_filename,
+                paup_as_rooted,
+                paup_use_tree_weights,
+                paup_burnin,
+                expected_taxon_labels,
+                expected_is_rooted,
+                expected_num_trees,
+                ):
+            tree_filepath = pathmap.tree_source_path(tree_filename)
+            paup_service = paup.PaupService()
+            result = paup_service.count_splits_from_files(
+                    tree_filepaths=[tree_filepath],
+                    taxa_definition_filepath=taxa_definition_filepath,
+                    is_rooted=paup_as_rooted,
+                    use_tree_weights=paup_use_tree_weights,
+                    burnin=paup_burnin,
+                    )
+            num_trees = result["num_trees"]
+            bipartition_counts = result["bipartition_counts"]
+            bipartition_freqs = result["bipartition_freqs"]
+            taxon_namespace = result["taxon_namespace"]
+            is_rooted = result["is_rooted"]
+
+            # check taxon namespace
+            self.assertEqual(len(taxon_namespace), len(expected_taxon_labels))
+            for taxon, expected_label in zip(taxon_namespace, expected_taxon_labels):
+                self.assertEqual(taxon.label, expected_label)
+
+            # check general tree state
+            self.assertEqual(num_trees, expected_num_trees)
+            self.assertIs(is_rooted, expected_is_rooted)
+
+            splits_ref = paupsplitsreference.get_splits_reference(
+                    splits_filename=splits_filename,
+                    key_column_index=0,
+                    )
+            self.assertEqual(len(splits_ref), len(bipartition_counts))
+            self.assertEqual(len(splits_ref), len(bipartition_freqs))
+            if is_rooted:
+                splits_ref_bitmasks = set([splits_ref[x]["unnormalized_split_bitmask"] for x in splits_ref])
+            else:
+                splits_ref_bitmasks = set([splits_ref[x]["normalized_split_bitmask"] for x in splits_ref])
+            counts_keys = set(bipartition_counts.keys())
+            freqs_keys = set(bipartition_freqs.keys())
+            self.assertEqual(len(counts_keys), len(splits_ref_bitmasks))
+            self.assertEqual(counts_keys, splits_ref_bitmasks, "\n    {}\n\n    {}\n\n".format(sorted(counts_keys), sorted(splits_ref_bitmasks)))
+            for split_str_rep in splits_ref:
+                ref = splits_ref[split_str_rep]
+                self.assertEqual(split_str_rep, ref["bipartition_string"])
+                self.assertEqual(paup.PaupService.bipartition_groups_to_split_bitmask(split_str_rep, normalized=False),
+                        ref["unnormalized_split_bitmask"])
+                self.assertEqual(paup.PaupService.bipartition_groups_to_split_bitmask(split_str_rep, normalized=True),
+                        ref["normalized_split_bitmask"])
+                split_bitmask = paup.PaupService.bipartition_groups_to_split_bitmask(split_str_rep, normalized=not is_rooted)
+                self.assertEqual(bipartition_counts[split_bitmask], ref["count"])
+                # self.assertAlmostEqual(bipartition_freqs[split_bitmask], ref["frequency"])
+                self.assertAlmostEqual(bipartition_freqs[split_bitmask], ref["frequency"], 2) # PAUP* 4.10b: no very precise
+
+        def test_group1(self):
+            cetacean_taxon_labels = [
+                "Bos taurus",
+                "Balaena mysticetus",
+                "Balaenoptera physalus",
+                "Cephalorhynchus eutropia",
+                "Delphinapterus leucas",
+                "Delphinus delphis",
+                "Eschrichtius robustus",
+                "Globicephala melas",
+                "Inia geoffrensis",
+                "Kogia breviceps",
+                "Kogia simus",
+                "Lagenorhynchus albirostris",
+                "Lagenorhynchus obscurus",
+                "Lissodelphis peronii",
+                "Megaptera novaeangliae",
+                "Mesoplodon europaeus",
+                "Mesoplodon peruvianus",
+                "Phocoena phocoena",
+                "Phocoena spinipinnis",
+                "Physeter catodon",
+                "Tursiops truncatus",
+                "Ziphius cavirostris",
+            ]
+            issue_mth_taxon_labels = ["T{:02d}".format(i) for i in range(1, 60)]
+            sources = [
+                    ("cetaceans.mb.no-clock.mcmc.trees"    , 251, False, False), # Trees explicitly unrooted
+                    ("cetaceans.mb.no-clock.mcmc.weighted-01.trees" , 251, False , True), # Weighted
+                    ("cetaceans.mb.no-clock.mcmc.weighted-02.trees" , 251, False , True), # Weighted
+                    ("cetaceans.mb.no-clock.mcmc.weighted-03.trees" , 251, False , True), # Weighted
+                    ("cetaceans.mb.strict-clock.mcmc.trees", 251, True , False), # Trees explicitly rooted
+                    ("cetaceans.mb.strict-clock.mcmc.weighted-01.trees" , 251, True , True), # Weighted
+                    ("cetaceans.mb.strict-clock.mcmc.weighted-02.trees" , 251, True , True), # Weighted
+                    ("cetaceans.mb.strict-clock.mcmc.weighted-03.trees" , 251, True , True), # Weighted
+                    ("cetaceans.raxml.bootstraps.trees"    , 250, True , False), # No tree rooting statement; PAUP defaults to rooted, DendroPy defaults to unrooted
+                    ("cetaceans.raxml.bootstraps.weighted-01.trees"    , 250, True , False), # No tree rooting statement; PAUP defaults to rooted, DendroPy defaults to unrooted
+                    ("cetaceans.raxml.bootstraps.weighted-02.trees"    , 250, True , False), # No tree rooting statement; PAUP defaults to rooted, DendroPy defaults to unrooted
+                    ("cetaceans.raxml.bootstraps.weighted-03.trees"    , 250, True , False), # No tree rooting statement; PAUP defaults to rooted, DendroPy defaults to unrooted
+                    ("issue_mth_2009-02-03.rooted.nexus"   , 100, True , False), # 100 trees (frequency column not reported by PAUP)
+                    ("issue_mth_2009-02-03.unrooted.nexus" , 100, False , False), # 100 trees (frequency column not reported by PAUP)
+            ]
+            splits_filename_template = "{stemname}.is-rooted-{is_rooted}.use-tree-weights-{use_weights}.burnin-{burnin}.splits.txt"
+            for tree_filename, num_trees, treefile_is_rooted, treefile_is_weighted in sources:
+                stemname = tree_filename
+                if "cetacean" in tree_filename:
+                    expected_taxon_labels = cetacean_taxon_labels
+                    taxa_definition_filepath = pathmap.tree_source_path("cetaceans.taxa.nex")
+                else:
+                    expected_taxon_labels = issue_mth_taxon_labels
+                    taxa_definition_filepath = pathmap.tree_source_path("issue_mth_2009-02-03.unrooted.nexus")
+                for use_weights in (False, True, None):
+                    for paup_read_as_rooted in (None, True, False):
+                        for paup_burnin in (0, 150):
+                            if tree_filename.startswith("issue_mth") and paup_burnin > 0:
+                                continue
+                            if paup_read_as_rooted is None:
+                                expected_is_rooted = treefile_is_rooted
+                            elif paup_read_as_rooted:
+                                expected_is_rooted = True
+                            else:
+                                expected_is_rooted = False
+                            splits_filename = splits_filename_template.format(
+                                    stemname=stemname,
+                                    is_rooted=paup_read_as_rooted,
+                                    use_weights=use_weights,
+                                    burnin=paup_burnin)
+                            self.check_splits_counting(
+                                    tree_filename=tree_filename,
+                                    taxa_definition_filepath=taxa_definition_filepath,
+                                    splits_filename=splits_filename,
+                                    paup_as_rooted=paup_read_as_rooted,
+                                    paup_use_tree_weights=use_weights,
+                                    paup_burnin=paup_burnin,
+                                    expected_taxon_labels=expected_taxon_labels,
+                                    expected_is_rooted=expected_is_rooted,
+                                    expected_num_trees=num_trees-paup_burnin)
+
+    class PaupWrapperTaxaParse(ExtendedTestCase):
+
+        def setUp(self):
+            self.taxa_filepath = None
+            self.expected_taxlabels = None
+
+        def check_labels(self):
+            p = paup.PaupRunner()
+            p.stage_execute_file(self.taxa_filepath)
+            p.stage_list_taxa()
+            p.run()
+            taxon_namespace = p.parse_taxon_namespace()
+            self.assertEqual(len(taxon_namespace), len(self.expected_taxlabels))
+            for i, t in enumerate(taxon_namespace):
+                self.assertEqual(t.label, self.expected_taxlabels[i])
+
+    class PaupWrapperTaxaParseTest1(PaupWrapperTaxaParse):
+
+        def setUp(self):
+            self.taxa_filepath = pathmap.data_source_path(["trees", "feb032009.tre"])
+            self.expected_taxlabels = ("T01", "T02", "T03", "T04", "T05", "T06",
+                                       "T07", "T08", "T09", "T10", "T11", "T12", "T13", "T14",
+                                       "T15", "T16", "T17", "T18", "T19", "T20", "T21", "T22",
+                                       "T23", "T24", "T25", "T26", "T27", "T28", "T29", "T30",
+                                       "T31", "T32", "T33", "T34", "T35", "T36", "T37", "T38",
+                                       "T39", "T40", "T41", "T42", "T43", "T44", "T45", "T46",
+                                       "T47", "T48", "T49", "T50", "T51", "T52", "T53", "T54",
+                                       "T55", "T56", "T57", "T58", "T59")
+
+        def runTest(self):
+            self.check_labels
+
+    class PaupWrapperTaxaParseTest2(PaupWrapperTaxaParse):
+
+        def setUp(self):
+            self.taxa_filepath = pathmap.data_source_path(["chars", "primates.chars.nexus"])
+            self.expected_taxlabels = ("Lemur catta", "Homo sapiens",
+                    "Pan", "Gorilla", "Pongo", "Hylobates", "Macaca fuscata",
+                    "Macaca mulatta", "Macaca fascicularis", "Macaca sylvanus",
+                    "Saimiri sciureus", "Tarsius syrichta", )
+
+        def runTest(self):
+            self.check_labels
+
+    if __name__ == "__main__":
+        unittest.main()
diff --git a/dendropy/test/test_popgenstat.py b/dendropy/test/test_popgenstat.py
new file mode 100644
index 0000000..e38ce70
--- /dev/null
+++ b/dendropy/test/test_popgenstat.py
@@ -0,0 +1,109 @@
+#! /usr/bin/env python
+
+############################################################################
+##  test_tree_io.py
+##
+##  Part of the DendroPy library for phylogenetic computing.
+##
+##  Copyright 2008 Jeet Sukumaran and Mark T. Holder.
+##
+##  This program is free software; you can redistribute it and/or modify
+##  it under the terms of the GNU General Public License as published by
+##  the Free Software Foundation; either version 3 of the License, or
+##  (at your option) any later version.
+##
+##  This program is distributed in the hope that it will be useful,
+##  but WITHOUT ANY WARRANTY; without even the implied warranty of
+##  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+##  GNU General Public License for more details.
+##
+##  You should have received a copy of the GNU General Public License along
+##  with this program. If not, see <http://www.gnu.org/licenses/>.
+##
+############################################################################
+
+"""
+Tests population genetic statistic calculation.
+"""
+
+import unittest
+import math
+import dendropy
+from dendropy.test.support import dendropytest
+from dendropy.test.support import pathmap
+from dendropy.utility import messaging
+from dendropy.calculate import popgenstat
+_LOG = messaging.get_logger(__name__)
+
+class TajimasDTests(dendropytest.ExtendedTestCase):
+
+    def setUp(self):
+        s = """\
+            >s1
+            ATAATAAAAA AATAATAAAA AAATAAAAAA AATAAAAAAA A
+            >s2
+            AAAAAAAATA AATAATAAAA AAATAAAAAA AAAAAAAAAA A
+            >s3
+            AAAATAAAAA TATAATAAAA AAATATAAAA AAAAAAAAAA A
+            >s4
+            AAAAAAAAAA AATAATAAAA AAATAAATAA ATAAAAAAAA A
+            >s5
+            AAAATAAAAA AAATATAAAA AAATAAAAAA AAAAAAAAAA A
+            >s6
+            AAAATAAAAA AAAAATAAAA AAAAAAAAAA AAAAATAAAA A
+            >s7
+            AAAAAATAAA AATAATAAAA AAATAAAAAA AAAAAAAAAA A
+            >s8
+            AAAAAAAAAA AAAAATAAAA AAATAAAAAA AAAAAAAAAT A
+            >s9
+            AAAAAAAAAA AAAAAAAAAA AAATAAAAAA AAAAAAAAAA A
+            >s10
+            AAAAAAAAAA AAAAATAAAA AAATAATAAA AAAAAAAAAA A"""
+        self.matrix = dendropy.DnaCharacterMatrix.get_from_string(s, 'fasta')
+
+    def testTajimasD(self):
+        self.assertAlmostEqual(popgenstat.tajimas_d(self.matrix), -1.44617198561, 4)
+
+class SinglePopTest(dendropytest.ExtendedTestCase):
+
+    data = dendropy.DnaCharacterMatrix.get_from_path(pathmap.char_source_path('COII_Apes.nex'), schema="nexus")
+
+    def test_num_segregating_sites(self):
+        self.assertEqual(popgenstat.num_segregating_sites(self.data, ignore_uncertain=True), 183)
+
+    def test_average_number_of_pairwise_differences(self):
+        self.assertAlmostEqual(popgenstat.average_number_of_pairwise_differences(self.data, ignore_uncertain=True),  62.75000, 4)
+
+    def test_nucleotide_diversity(self):
+        self.assertAlmostEqual(popgenstat.nucleotide_diversity(self.data, ignore_uncertain=True), 0.09174, 4)
+
+    def test_tajimas_d(self):
+        self.assertAlmostEqual(popgenstat.tajimas_d(self.data, ignore_uncertain=True), 1.12467, 4)
+
+    def test_wattersons_theta(self):
+        self.assertAlmostEqual(popgenstat.wattersons_theta(self.data, ignore_uncertain=True), 49.00528, 4)
+
+class PopulationPairSummaryStatisticsTests(dendropytest.ExtendedTestCase):
+
+    def testPopulationPairSummaryStatistics(self):
+        seqs = dendropy.DnaCharacterMatrix.get_from_path(pathmap.char_source_path('orti.nex'), schema="nexus")
+        p1 = []
+        p2 = []
+        for idx, t in enumerate(seqs.taxon_namespace):
+            if t.label.startswith('EPAC'):
+                p1.append(seqs[t])
+            else:
+                p2.append(seqs[t])
+        pp = popgenstat.PopulationPairSummaryStatistics(p1, p2)
+        self.assertAlmostEqual(pp.average_number_of_pairwise_differences, 11.28063, 4)
+        self.assertAlmostEqual(pp.average_number_of_pairwise_differences_between, 16.119047619, 4)
+        self.assertAlmostEqual(pp.average_number_of_pairwise_differences_within, 10.2191697192, 4)
+        self.assertAlmostEqual(pp.average_number_of_pairwise_differences_net, 5.89987789988, 4)
+        self.assertEqual(pp.num_segregating_sites, 29)
+        self.assertAlmostEqual(pp.wattersons_theta, 7.85734688643, 4)
+        self.assertAlmostEqual(pp.tajimas_d, 1.65318627677, 4)
+        self.assertAlmostEqual(pp.wakeleys_psi, 0.8034976, 2)
+
+if __name__ == "__main__":
+    unittest.main()
+
diff --git a/dendropy/test/test_statistics.py b/dendropy/test/test_statistics.py
new file mode 100644
index 0000000..3aba71e
--- /dev/null
+++ b/dendropy/test/test_statistics.py
@@ -0,0 +1,644 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Tests statistical routines.
+"""
+
+import unittest
+from dendropy.test.support import dendropytest
+from dendropy.calculate import statistics
+from dendropy.utility import messaging
+
+_LOG = messaging.get_logger(__name__)
+
+class HpdCalculationTests(dendropytest.ExtendedTestCase):
+    """
+    Test reference values as given by ``hpd.emp``, in the "TeachingDemos" package
+    of R.
+    """
+
+    def testEmpiricalUnimodalHpd1(self):
+        # Normal[mu=100, sd=1)
+        v = [100.93356, 100.66576, 99.44097, 100.60761, 103.65723, 101.15563,
+                99.09657, 100.39654, 98.77339, 101.13712, 99.33979, 99.99060,
+                100.39395, 101.68240, 100.99664, 99.17798, 100.83020, 98.90373,
+                100.30441, 99.49553, 100.52652, 99.76291, 99.95605, 99.63605,
+                99.21535, 100.51619, 100.55036, 101.21747, 101.04181, 97.76084,
+                100.19069, 99.46182, 100.47579, 99.56889, 100.23977, 101.22907,
+                97.85931, 100.86051, 99.56121, 100.44109, 100.02328, 98.62446,
+                100.11008, 100.12700, 99.27087, 100.72895, 99.06796, 99.38019,
+                99.79908, 100.82761, 101.26901, 99.88911, 98.09761, 99.16706,
+                98.98752, 100.10088, 100.58883, 99.42982, 101.90322, 101.22817,
+                101.36052, 97.70629, 100.15950, 99.39458, 100.19414, 103.43317,
+                100.32429, 98.90429, 101.28049, 99.82948, 100.96041, 99.46024,
+                98.22509, 101.63878, 100.66998, 101.82238, 99.49847, 100.41055,
+                98.71792, 99.66001, 98.53177, 99.11997, 100.14802, 98.96423,
+                101.93145, 100.09478, 100.85930, 99.82181, 101.50284, 99.93301,
+                99.57168, 98.19978, 100.90708, 99.25086, 101.74170, 99.86034,
+                99.85785, 99.89154, 99.62313, 99.41994,]
+        #==> 101.82238 97.70629
+        #==> 101.90322 97.76084
+        #==> 101.93145 97.85931
+        #==> 103.43317 98.09761
+        #==> 103.65723 98.19978
+        #n= 100
+        #nn= 5
+        #xx= [4.11609, 4.142380000000003, 4.0721400000000045, 5.335560000000001, 5.457449999999994]
+        #m= 4.07214
+        #nnn= 100
+        #(97.85931, 101.93145)
+        c1, c2 = statistics.empirical_hpd(v)
+        self.assertAlmostEqual(c1, 97.85931)
+        self.assertAlmostEqual(c2, 101.93145)
+
+    def testEmpiricalUnimodalHpd2(self):
+        # Exp(rate=0.2)
+        v = [3.27592276968062, 0.471030483022332, 8.69292160732502,
+                5.31655522508031, 11.6689748180798, 3.74156305545426,
+                0.930466635618359, 4.02394197251564, 0.0273082678142286,
+                2.19627505168319, 11.2686246344702, 3.12780772801489,
+                16.3526409110966, 1.03131624741206, 4.43873460812746,
+                1.16054141893983, 1.37002475326881, 4.03690286425358,
+                2.75003841612488, 0.247246073558927, 2.97294339863583,
+                9.91361622656596, 1.40643152873963, 5.06202565485096,
+                2.56700876867399, 5.10566710939115, 8.30197051456789,
+                0.439280721062038, 11.3532735680448, 1.46181986900046,
+                11.1246174474465, 2.24797004368156, 1.79919427493587,
+                8.79207140509944, 4.81857897692776, 2.30751369846985,
+                0.589188064119702, 3.36240844568238, 9.85515167894673,
+                13.7341997859286, 3.04674943210557, 10.2497380129517,
+                15.3365677214208, 0.322058985475451, 2.13952575810254,
+                8.7431202231924, 9.48776975232077, 0.437449288806399,
+                2.91444693226367, 0.234506344422698, 2.30315598892048,
+                11.9319818628238, 1.30209191970236, 1.34823656175286,
+                25.9922393489827, 9.88916845991366, 4.32954248951232,
+                0.748464160133153, 1.30975685780868, 10.16635726164,
+                12.2592059050905, 0.469188864149385, 1.23079363489524,
+                40.8792947675279, 5.14233190545297, 6.33412759730077,
+                4.14186116397752, 0.811017339583486, 2.73471124237403,
+                9.42033216315222, 6.48358419050878, 3.18536503706127,
+                3.99172384842717, 0.936779121402651, 9.05760801355255,
+                5.50938969922668, 1.06717714807019, 8.42135253320348,
+                3.84890870635814, 0.382886157387499, 6.31485156693912,
+                0.300180648919195, 0.8748722959183, 1.82131360052153,
+                3.14994857879356, 0.281196870910665, 7.2329476564647,
+                2.68667792435735, 0.364864277653396, 0.757411941885948,
+                5.50616672241545, 4.88404127282506, 0.167293788399547,
+                8.03142971525326, 10.5768193447809, 14.8404745685177,
+                1.30770040210336, 25.8265917826642, 1.59898946760222,
+                4.81477369067675,]
+        #  n= 100
+        #  nn= 5
+        #  P1= 15.33657 16.35264 25.82659 25.99224 40.87929
+        #  P2= 0.02730827 0.1672938 0.2345063 0.2472461 0.2811969
+        #  xx= 15.30926 16.18535 25.59209 25.74499 40.5981
+        #  m= 15.30926
+        #  nnn= 1
+        #  FINAL =  0.02730827 15.33657
+        #  [1]  0.02730827 15.33656772
+        c1, c2 = statistics.empirical_hpd(v)
+        self.assertAlmostEqual(c1, 0.02730827)
+        self.assertAlmostEqual(c2, 15.33656772)
+
+    def testEmpiricalUnimodalHpd3(self):
+        # Gamma(shape=5.5, rate=100)
+        v = [0.0380707918890542, 0.0613501755118962, 0.0349990175822757,
+                0.0619137725677135, 0.07861308334536, 0.0480553266749484,
+                0.062862553519841, 0.0682992193072175, 0.0498105967259703,
+                0.0391473475922808, 0.0814239076471267, 0.0622768940091115,
+                0.0278766690168117, 0.0317487156549716, 0.0586667309884353,
+                0.0610901783693829, 0.0614272435233303, 0.0467353725415623,
+                0.0235002000890242, 0.085772673901534, 0.0556214310452847,
+                0.0670344572022403, 0.0544400125583679, 0.0538488966212504,
+                0.05294861204774, 0.0487265157882631, 0.0319744057916896,
+                0.0727994868817337, 0.0406117459082881, 0.0411118268978343,
+                0.0500761307970585, 0.0308194535724841, 0.0657552493534245,
+                0.100523723943228, 0.0465808929090943, 0.0460932186699443,
+                0.0561270845816648, 0.0600098984413655, 0.039155736440776,
+                0.0477087458173384, 0.0662527620357706, 0.0259218058062224,
+                0.0240999453565313, 0.0403149976175261, 0.0397058239610132,
+                0.080726331847454, 0.084357840225122, 0.0469807175107997,
+                0.0629060978549567, 0.0815708340371883, 0.0662480716451838,
+                0.0291424513010887, 0.0423492520899737, 0.0400760974537379,
+                0.0988931209604346, 0.0334625360347498, 0.0481980311926021,
+                0.0326792585090408, 0.0454491423001323, 0.020993064627905,
+                0.0435735408306696, 0.0408747071998941, 0.0152619235644154,
+                0.0749659776042904, 0.0568986969556779, 0.0238240850033704,
+                0.0546832244279347, 0.0793788099421741, 0.0366737460311167,
+                0.0122826115173667, 0.0542719395504513, 0.0426583849776426,
+                0.0211571623626521, 0.097984660746214, 0.0909562231889738,
+                0.0317473196033018, 0.0683970866878872, 0.0249627875602813,
+                0.081633395263259, 0.050187713841904, 0.09452301382497,
+                0.0832417097555666, 0.0784842034909532, 0.0329463277742707,
+                0.134071786835307, 0.0672633924985841, 0.0492264776710421,
+                0.0346193998786818, 0.0608703914888914, 0.0479141586724897,
+                0.0653849788769291, 0.0363831431722953, 0.0978132293790966,
+                0.0663011935255327, 0.0245590323775349, 0.0438722027031532,
+                0.0294721189654155, 0.0482169372325583, 0.0372503987850244,
+                0.049380448859450]
+        #  n= 100
+        #  nn= 5
+        #  P1= 0.09781323 0.09798466 0.09889312 0.1005237 0.1340718
+        #  P2= 0.01228261 0.01526192 0.02099306 0.02115716 0.0235002
+        #  xx= 0.08553062 0.08272274 0.07790006 0.07936656 0.1105716
+        #  m= 0.07790006
+        #  nnn= 3
+        #  FINAL =  0.02099306 0.09889312
+        #  [1] 0.02099306 0.09889312
+        c1, c2 = statistics.empirical_hpd(v)
+        self.assertAlmostEqual(c1, 0.02099306)
+        self.assertAlmostEqual(c2, 0.09889312)
+
+class TestMedian(unittest.TestCase):
+    """
+    Test from: http://wiki.python.org/moin/SimplePrograms.
+    """
+
+    def testMedian(self):
+        self.assertEqual(statistics.median([2, 9, 9, 7, 9, 2, 4, 5, 8]), 7)
+
+class TestVarianceCovariance(unittest.TestCase):
+
+    def setUp(self):
+        # vlen = 30
+        # ndim = 5
+        # data = []
+        # data.append([random.gauss(random.randint(0, 1000), 1000) for i in range(vlen)])
+        # for dim in range(ndim-1):
+        #     u = random.uniform(0, 1)
+        #     if u > 0.8:
+        #         data.append([x * 3 for x in data[0]])
+        #     elif u > 0.6:
+        #         data.append([x + random.gauss(0, x/100) for x in data[0]])
+        #     elif u > 0.4:
+        #         data.append([x + random.gauss(0, x/10) for x in data[0]])
+        #     elif u > 0.2:
+        #         data.append([random.gauss(0, random.randint(1, 100)) for x in data[0]])
+        # print data
+        self.data = [[-387.0725787021329, 931.5293654488856, 546.94301742877,
+            -1308.9393303633315, 1811.352775963276, 451.3538641126903,
+            562.9659558111259, 2279.483383601514, -908.0885971464554,
+            -650.165321672863, -426.0293691171515, 1109.1183877724106,
+            35.16260780005632, 1453.566295339867, 2838.575344737274,
+            370.1667315279002, -853.2876360550472, 1171.341723677777,
+            556.5194253749042, 275.59547544853393, 1936.6411793187876,
+            -146.60284069858278, 227.34032009264192, 294.9677652833037,
+            -230.43349352689938, 2272.2745879401054, -522.4676539536883,
+            880.3216676214797, -364.3858002091546, 4.674611872631715],
+            [-388.4042113486994, 927.1172167918467, 538.8506889384198,
+                -1298.3326898912524, 1813.4425528719585, 449.9792508848036,
+                566.6065457390224, 2275.5404727189675, -911.1644303228873,
+                -637.7793275095457, -426.24850592012865, 1116.7861772441804,
+                34.677032164373, 1435.260152533119, 2859.8928263857206,
+                367.6125468449959, -864.1297741225864, 1170.321476247361,
+                554.8054179855712, 271.657749074347, 1921.1532376931864,
+                -145.65499088854472, 226.57461475353222, 298.7472447593899,
+                -231.0886348421088, 2292.785060285147, -528.8141997687727,
+                880.8322517812744, -367.48567832266133, 4.599758786967388],
+            [-1161.2177361063987, 2794.588096346657, 1640.82905228631,
+                -3926.8179910899944, 5434.0583278898275, 1354.061592338071,
+                1688.8978674333778, 6838.450150804542, -2724.2657914393662,
+                -1950.495965018589, -1278.0881073514545, 3327.355163317232,
+                105.48782340016896, 4360.698886019602, 8515.726034211822,
+                1110.5001945837007, -2559.8629081651416, 3514.0251710333314,
+                1669.5582761247128, 826.7864263456017, 5809.923537956363,
+                -439.80852209574834, 682.0209602779257, 884.9032958499112,
+                -691.3004805806981, 6816.823763820316, -1567.402961861065,
+                2640.965002864439, -1093.1574006274636, 14.023835617895145],
+            [-389.0970757367656, 930.0845285660921, 546.6471157339992,
+                -1325.4845049714904, 1807.0225831671107, 451.97595156242477,
+                563.1808453852201, 2275.359502238535, -910.6620730299588,
+                -639.8911116399893, -430.4296686299347, 1114.0085423914036,
+                35.23322927194568, 1445.7289312962391, 2869.2649737548804,
+                369.61107326701136, -847.7524690958505, 1180.5581555467884,
+                558.6182051869164, 270.71734809287057, 1953.153593413107,
+                -148.16228159208333, 228.94407431058426, 296.62535613622003,
+                -232.98333550881344, 2267.1784698610254, -515.4738926823279,
+                874.9725307590513, -360.1574033066678, 4.7265309567945994]]
+
+        # import numpy
+        # expected_cov = numpy.cov(numpy.array(data),rowvar=0).tolist()
+        # print expected_cov
+        self.expected_cov = [[149393.17790377847, -360424.96906892554, -211940.40550363238,
+            505544.04928701016, -700254.966766187, -174502.80419746446,
+            -217345.74407933108, -881568.5997736979, 350625.3243410929,
+            252753.2580470269, 164369.7501328118, -427881.97570378595,
+            -13617.530634341638, -563503.0643466829, -1093807.4471157312,
+            -143274.32024934815, 329464.18420184636, -452213.78708996583,
+            -215077.8426113635, -107087.45068333478, -748475.3164486786,
+            56624.942130779986, -87816.4837208772, -113659.46065397991,
+            88859.94740941831, -877271.1179979955, 201980.68894789586,
+            -340566.08097309095, 140911.57157102908, -1808.2870504090968],
+            [-360424.96906892554, 869569.9362145537, 511337.65185982996,
+                -1219681.7066146561, 1689442.2338632138, 421008.1622157625,
+                524366.7087900256, 2126887.1168150404, -845917.5496523898,
+                -609812.9697163638, -396557.36555995565, 1032301.8444213009,
+                32854.22747731646, 1359535.4531227027, 2638892.0982596218,
+                345667.84863406006, -794860.8161464253, 1091011.1989148206,
+                518899.4015696987, 258366.84703955788, 1805784.8802174286,
+                -136613.94859184456, 211866.75720555347, 274211.2373116087,
+                -214382.2903332192, 2116495.277061747, -487298.17819055147,
+                821655.4956393185, -339963.90830350405, 4362.735925535438],
+            [-211940.40550363238, 511337.65185982996, 300691.95320246654,
+                -717234.9644576052, 993436.5475125475, 247567.57305548087,
+                308338.74556175485, 1250675.074863846, -497422.68334818585,
+                -358596.5629816347, -233190.50326784851, 607018.0135966156,
+                19319.94031851559, 799465.1231295253, 1551742.404717372,
+                203265.87160003383, -467383.8843663716, 641555.1887777617,
+                305132.059903797, 151928.88274220875, 1061887.3462284554,
+                -80335.43600964258, 124586.13005423953, 161240.75009037546,
+                -126064.20956241484, 1244532.2694496606, -286532.5812771456,
+                483153.3495236603, -199901.83344425834, 2565.5529679555852],
+            [505544.04928701016, -1219681.7066146561, -717234.9644576052,
+                1710856.9192995324, -2369616.693280398, -590520.9262912686,
+                -735476.3899937997, -2983204.334641759, 1186502.1830001853,
+                855325.270463226, 556238.3789052436, -1447924.970738861,
+                -46083.75449593974, -1906929.2316904257, -3701435.8716964982,
+                -484845.3566280408, 1114827.964160791, -1530318.2381011278,
+                -727832.8495215549, -362379.6028049886, -2532950.829680498,
+                191627.34944563507, -297177.9863681518, -384609.91910996195,
+                300706.3619029368, -2968549.589068031, 683441.8508461871,
+                -1152443.0012280906, 476810.7250318338, -6119.722146225432],
+            [-700254.966766187, 1689442.2338632138, 993436.5475125475,
+                -2369616.693280398, 3282358.894459347, 817954.7970777344,
+                1018779.1665524077, 4132236.72058621, -1643501.8847934983,
+                -1184764.4420874363, -770448.6531432323, 2005628.3012952602,
+                63829.770800075996, 2641350.3080521864, 5127000.986694892,
+                671578.037266826, -1544337.948133165, 2119663.6178804073,
+                1008140.5005908067, 501967.62658711313, 3508319.811483866,
+                -265416.7728446663, 411622.71723057824, 532760.6687731618,
+                -416512.20772471745, 4112110.4047745992, -946778.110294905,
+                1596369.7050428558, -660516.0362065202, 8475.908935267644],
+            [-174502.80419746446, 421008.1622157625, 247567.57305548087,
+                    -590520.9262912686, 817954.7970777344, 203834.24078224797,
+                    253876.05452768996, 1029746.8036606665, -409557.69626571395,
+                    -295242.9268632595, -191997.03665307324, 499797.5854784194,
+                    15906.615340342569, 658226.857278107, 1277645.5563866766,
+                    167357.41134353352, -384836.6886719451, 528222.8533432522,
+                    251229.2922489141, 125089.06639656228, 874286.8934292952,
+                    -66142.93440720538, 102577.1383801494, 132761.78158270588,
+                    -103795.28152140026, 1024715.8636411378, -235927.61123554132,
+                    397809.33378739585, -164595.1661642016, 2112.2592771769364],
+            [-217345.74407933108, 524366.7087900256, 308338.74556175485,
+                    -735476.3899937997, 1018779.1665524077, 253876.05452768996,
+                    316210.3607986867, 1282561.1913416046, -510111.4859997901,
+                    -367720.8262720667, -239131.93911860665, 622510.5342108728,
+                    19811.239619897766, 819812.9135390642, 1591328.923455987,
+                    208443.05518058766, -479336.6991379133, 657900.5923021627,
+                    312905.72644413053, 155798.6535323029, 1088904.5405440978,
+                    -82379.5850977837, 127759.0605202802, 165360.08034015898,
+                    -129277.37186774613, 1276326.3970300478, -293863.91038333974,
+                    495481.2331138214, -205012.36107524062, 2630.7156238501116],
+            [-881568.5997736979, 2126887.1168150404, 1250675.074863846,
+                    -2983204.334641759, 4132236.72058621, 1029746.8036606665,
+                    1282561.1913416046, 5202179.297296101, -2069043.7281889115,
+                    -1491539.2678336531, -969941.4300628213, 2524928.8110790555,
+                    80357.76413641141, 3325284.0270826635, 6454508.230033948,
+                    845470.0928448668, -1944182.937873276, 2668509.1611595503,
+                    1269178.4498434912, 631941.0554721548, 4416754.270064828,
+                    -334143.0030964492, 518205.1120158168, 670701.4840533712,
+                    -524359.3000358283, 5176801.443597403, -1191905.4989193215,
+                    2009703.8383549566, -831531.0005204134, 10670.709481456897],
+            [350625.3243410929, -845917.5496523898, -497422.68334818585,
+                    1186502.1830001853, -1643501.8847934983, -409557.69626571395,
+                    -510111.4859997901, -2069043.7281889115, 822917.5775059358,
+                    593214.284884317, 385773.9652874163, -1004238.9922932169,
+                    -31960.216388136178, -1322541.9562906092, -2567157.759426228,
+                    -336264.6943500905, 773258.0277370816, -1061341.708613389,
+                    -504786.57990451134, -251335.81249763246, -1756661.0753439257,
+                    132897.84866975705, -206104.45876818095, -266758.9838720577,
+                    208553.34492990456, -2058963.6467483111, 474053.7557940749,
+                    -799311.7184875482, 330722.3835950521, -4244.009956370862],
+            [252753.2580470269, -609812.9697163638, -358596.5629816347,
+                    855325.270463226, -1184764.4420874363, -295242.9268632595,
+                    -367720.8262720667, -1491539.2678336531, 593214.284884317,
+                    427669.4388326606, 278090.68951873126, -723913.6401645798,
+                    -23040.250998091913, -953434.0691506112, -1850533.255967261,
+                    -242411.14225233067, 557409.7886478706, -765089.5555305473,
+                    -363890.3596168801, -181195.07273396023, -1266349.3650786553,
+                    95803.21315757626, -148575.82840293963, -192291.63727863415,
+                    150337.46468169338, -1484227.4060228057, 341730.2139528351,
+                    -576210.7159965835, 238409.02615910847, -3059.4982172104096],
+            [164369.7501328118, -396557.36555995565, -233190.50326784851,
+                    556238.3789052436, -770448.6531432323, -191997.03665307324,
+                    -239131.93911860665, -969941.4300628213, 385773.9652874163,
+                    278090.68951873126, 180849.71012352195, -470774.31315405807,
+                    -14982.923343675813, -619996.3704957349, -1203464.8189202165,
+                    -157637.91511571038, 362482.45126124314, -497552.47194499307,
+                    -236640.6363944147, -117821.50366155664, -823524.9396619514,
+                    62302.80202420966, -96621.09067510017, -125052.38923511525,
+                    97769.0083372963, -965199.8661808498, 222220.7239743777,
+                    -374702.7345993242, 155033.09889147282, -1989.630875353818],
+            [-427881.97570378595, 1032301.8444213009, 607018.0135966156,
+                    -1447924.970738861, 2005628.3012952602, 499797.5854784194,
+                    622510.5342108728, 2524928.8110790555, -1004238.9922932169,
+                    -723913.6401645798, -470774.31315405807, 1225515.2768995222,
+                    39001.91367844581, 1613935.3071920145, 3132811.410855453,
+                    410354.6573258249, -943643.2096791578, 1295193.7852807571,
+                    616008.64907689, 306712.0378226414, 2143707.851570392,
+                    -162179.4927570954, 251516.37232037642, 325538.4205528294,
+                    -254505.75555902356, 2512644.2727309032, -578510.1002880113,
+                    975430.5141329551, -403595.15688305, 5179.070796881113],
+            [-13617.530634341638, 32854.22747731646, 19319.94031851559,
+                    -46083.75449593974, 63829.770800075996, 15906.615340342569,
+                    19811.239619897766, 80357.76413641141, -31960.216388136178,
+                    -23040.250998091913, -14982.923343675813, 39001.91367844581,
+                    1241.3385189229937, 51366.7955789965, 99702.3108080859,
+                    13060.145803019688, -30030.028192114336, 41221.07079578728,
+                    19605.247834362857, 9761.597937414512, 68228.21759586644,
+                    -5161.704186270724, 8004.88048572238, 10359.987223487302,
+                    -8099.861329113616, 79963.00689497797, -18410.037263166676,
+                    31043.267870027597, -12843.928854833162, 164.84178348727528],
+            [-563503.0643466829, 1359535.4531227027, 799465.1231295253,
+                    -1906929.2316904257, 2641350.3080521864, 658226.857278107,
+                    819812.9135390642, 3325284.0270826635, -1322541.9562906092,
+                    -953434.0691506112, -619996.3704957349, 1613935.3071920145,
+                    51366.7955789965, 2125597.304548632, 4125729.5848259497,
+                    540438.4500383266, -1242702.3271705636, 1705741.068189257,
+                    811275.3732202249, 403950.436570662, 2823280.188805055,
+                    -213590.6110219508, 331244.08755697083, 428707.7124927715,
+                    -335174.29690302856, 3308990.0677973754, -761852.8239891479,
+                    1284614.197931908, -531509.7065459328, 6821.053891274157],
+            [-1093807.4471157312, 2638892.0982596218, 1551742.404717372,
+                    -3701435.8716964982, 5127000.986694892, 1277645.5563866766,
+                    1591328.923455987, 6454508.230033948, -2567157.759426228,
+                    -1850533.255967261, -1203464.8189202165, 3132811.410855453,
+                    99702.3108080859, 4125729.5848259497, 8008541.321542145,
+                    1048998.5050313922, -2412211.78696484, 3310962.6510743233,
+                    1574724.0808363736, 784040.7689848202, 5480085.136142541,
+                    -414589.70319374395, 642962.9309969022, 832179.4829438967,
+                    -650606.0552617743, 6423061.221356821, -1478817.680854236,
+                    2493487.8598607164, -1031695.6660793469, 13239.667773888234],
+            [-143274.32024934815, 345667.84863406006, 203265.87160003383,
+                    -484845.3566280408, 671578.037266826, 167357.41134353352,
+                    208443.05518058766, 845470.0928448668, -336264.6943500905,
+                    -242411.14225233067, -157637.91511571038, 410354.6573258249,
+                    13060.145803019688, 540438.4500383266, 1048998.5050313922,
+                    137408.6008743094, -315966.71928906185, 433694.47866279254,
+                    206270.85667665722, 102704.98019558156, 717831.0996346171,
+                    -54306.42454874992, 84220.5244337459, 109002.58458148634,
+                    -85220.3192824344, 841334.6894181048, -193706.59635809882,
+                    326619.8854179307, -135139.83929993995, 1734.2717532579436],
+            [329464.18420184636, -794860.8161464253, -467383.8843663716,
+                    1114827.964160791, -1544337.948133165, -384836.6886719451,
+                    -479336.6991379133, -1944182.937873276, 773258.0277370816,
+                    557409.7886478706, 362482.45126124314, -943643.2096791578,
+                    -30030.028192114336, -1242702.3271705636, -2412211.78696484,
+                    -315966.71928906185, 726637.3819157126, -997265.430360959,
+                    -474313.9488545993, -236170.86012900577, -1650566.435514354,
+                    124871.17138425406, -193660.3231670929, -250666.41510243676,
+                    195962.68392153853, -1934778.5608125532, 445481.2395412975,
+                    -751090.921891152, 310783.5962562703, -3987.559218979412],
+            [-452213.78708996583, 1091011.1989148206, 641555.1887777617,
+                    -1530318.2381011278, 2119663.6178804073, 528222.8533432522,
+                    657900.5923021627, 2668509.1611595503, -1061341.708613389,
+                    -765089.5555305473, -497552.47194499307, 1295193.7852807571,
+                    41221.07079578728, 1705741.068189257, 3310962.6510743233,
+                    433694.47866279254, -997265.430360959, 1368864.31907959,
+                    651045.8515231148, 324153.44444827526, 2265678.4004814955,
+                    -171407.0585552743, 265823.656563293, 344043.76444693253,
+                    -268981.31944869336, 2655463.6300199735, -611377.6050845169,
+                    1030886.2507075757, -426529.56895785424, 5473.860147105716],
+            [-215077.8426113635, 518899.4015696987, 305132.059903797,
+                    -727832.8495215549, 1008140.5005908067, 251229.2922489141,
+                    312905.72644413053, 1269178.4498434912, -504786.57990451134,
+                    -363890.3596168801, -236640.6363944147, 616008.64907689,
+                    19605.247834362857, 811275.3732202249, 1574724.0808363736,
+                    206270.85667665722, -474313.9488545993, 651045.8515231148,
+                    309644.9282722824, 154173.38075181373, 1077579.5867207146,
+                    -81522.81306041653, 126428.52608715445, 163630.89540469993,
+                    -127929.97219949249, 1262973.7207831193, -290781.4266488454,
+                    490304.56733124814, -202864.38927320475, 2603.419562994833],
+            [-107087.45068333478, 258366.84703955788, 151928.88274220875,
+                    -362379.6028049886, 501967.62658711313, 125089.06639656228,
+                    155798.6535323029, 631941.0554721548, -251335.81249763246,
+                    -181195.07273396023, -117821.50366155664, 306712.0378226414,
+                    9761.597937414512, 403950.436570662, 784040.7689848202,
+                    102704.98019558156, -236170.86012900577, 324153.44444827526,
+                    154173.38075181373, 76769.35988650756, 536522.2459911266,
+                    -40589.550210622714, 62948.51597611402, 81471.77983343764,
+                    -63695.14153572459, 628851.9369505029, -144789.68252842987,
+                    244132.85332968459, -101012.28661090058, 1296.2207276234788],
+            [-748475.3164486786, 1805784.8802174286, 1061887.3462284554,
+                    -2532950.829680498, 3508319.811483866, 874286.8934292952,
+                    1088904.5405440978, 4416754.270064828, -1756661.0753439257,
+                    -1266349.3650786553, -823524.9396619514, 2143707.851570392,
+                    68228.21759586644, 2823280.188805055, 5480085.136142541,
+                    717831.0996346171, -1650566.435514354, 2265678.4004814955,
+                    1077579.5867207146, 536522.2459911266, 3750088.4701662865,
+                    -283708.0857056962, 439980.0171042077, 569430.5613567849,
+                    -445203.54222542694, 4395081.95769238, -1011880.8642217676,
+                    1706246.973139175, -705947.4037211653, 9060.324337779772],
+            [56624.942130779986, -136613.94859184456, -80335.43600964258,
+                    191627.34944563507, -265416.7728446663, -66142.93440720538,
+                    -82379.5850977837, -334143.0030964492, 132897.84866975705,
+                    95803.21315757626, 62302.80202420966, -162179.4927570954,
+                    -5161.704186270724, -213590.6110219508, -414589.70319374395,
+                    -54306.42454874992, 124871.17138425406, -171407.0585552743,
+                    -81522.81306041653, -40589.550210622714, -283708.0857056962,
+                    21463.584825583323, -33286.11965366797, -43079.587776675464,
+                    33681.361394683474, -332503.7460183222, 76552.19242979104,
+                    -129083.4533779376, 53407.305044565444, -685.4470597571659],
+            [-87816.4837208772, 211866.75720555347, 124586.13005423953,
+                    -297177.9863681518, 411622.71723057824, 102577.1383801494,
+                    127759.0605202802, 518205.1120158168, -206104.45876818095,
+                    -148575.82840293963, -96621.09067510017, 251516.37232037642,
+                    8004.88048572238, 331244.08755697083, 642962.9309969022,
+                    84220.5244337459, -193660.3231670929, 265823.656563293,
+                    126428.52608715445, 62948.51597611402, 439980.0171042077,
+                    -33286.11965366797, 51621.107956628206, 66810.39748020428,
+                    -52234.17349903379, 515669.16612852685, -118724.19772457697,
+                    200190.33198319352, -82828.47575395652, 1062.9922663705831],
+            [-113659.46065397991, 274211.2373116087, 161240.75009037546,
+                    -384609.91910996195, 532760.6687731618, 132761.78158270588,
+                    165360.08034015898, 670701.4840533712, -266758.9838720577,
+                    -192291.63727863415, -125052.38923511525, 325538.4205528294,
+                    10359.987223487302, 428707.7124927715, 832179.4829438967,
+                    109002.58458148634, -250666.41510243676, 344043.76444693253,
+                    163630.89540469993, 81471.77983343764, 569430.5613567849,
+                    -43079.587776675464, 66810.39748020428, 86474.60960771656,
+                    -67604.85973949074, 667446.9213854757, -153673.90990044305,
+                    259106.53605521304, -107209.4264066645, 1375.6960266945034],
+            [88859.94740941831, -214382.2903332192, -126064.20956241484,
+                    300706.3619029368, -416512.20772471745, -103795.28152140026,
+                    -129277.37186774613, -524359.3000358283, 208553.34492990456,
+                    150337.46468169338, 97769.0083372963, -254505.75555902356,
+                    -8099.861329113616, -335174.29690302856, -650606.0552617743,
+                    -85220.3192824344, 195962.68392153853, -268981.31944869336,
+                    -127929.97219949249, -63695.14153572459, -445203.54222542694,
+                    33681.361394683474, -52234.17349903379, -67604.85973949074,
+                    52854.870747991714, -521798.4380423071, 120135.4345834101,
+                    -202568.0459981826, 83812.77706615409, -1075.6048368400182],
+            [-877271.1179979955, 2116495.277061747, 1244532.2694496606,
+                    -2968549.589068031, 4112110.4047745992, 1024715.8636411378,
+                    1276326.3970300478, 5176801.443597403, -2058963.6467483111,
+                    -1484227.4060228057, -965199.8661808498, 2512644.2727309032,
+                    79963.00689497797, 3308990.0677973754, 6423061.221356821,
+                    841334.6894181048, -1934778.5608125532, 2655463.6300199735,
+                    1262973.7207831193, 628851.9369505029, 4395081.95769238,
+                    -332503.7460183222, 515669.16612852685, 667446.9213854757,
+                    -521798.4380423071, 5151685.670941004, -1186148.933316184,
+                    1999922.1689142368, -827505.67522064, 10618.107310982323],
+            [201980.68894789586, -487298.17819055147, -286532.5812771456,
+                    683441.8508461871, -946778.110294905, -235927.61123554132,
+                    -293863.91038333974, -1191905.4989193215, 474053.7557940749,
+                    341730.2139528351, 222220.7239743777, -578510.1002880113,
+                    -18410.037263166676, -761852.8239891479, -1478817.680854236,
+                    -193706.59635809882, 445481.2395412975, -611377.6050845169,
+                    -290781.4266488454, -144789.68252842987, -1011880.8642217676,
+                    76552.19242979104, -118724.19772457697, -153673.90990044305,
+                    120135.4345834101, -1186148.933316184, 273114.8613477328,
+                    -460469.1766300898, 190533.7237230798, -2444.5541643981],
+            [-340566.08097309095, 821655.4956393185, 483153.3495236603,
+                    -1152443.0012280906, 1596369.7050428558, 397809.33378739585,
+                    495481.2331138214, 2009703.8383549566, -799311.7184875482,
+                    -576210.7159965835, -374702.7345993242, 975430.5141329551,
+                    31043.267870027597, 1284614.197931908, 2493487.8598607164,
+                    326619.8854179307, -751090.921891152, 1030886.2507075757,
+                    490304.56733124814, 244132.85332968459, 1706246.973139175,
+                    -129083.4533779376, 200190.33198319352, 259106.53605521304,
+                    -202568.0459981826, 1999922.1689142368, -460469.1766300898,
+                    776393.7397663131, -321243.75384548464, 4122.182167280981],
+            [140911.57157102908, -339963.90830350405, -199901.83344425834,
+                    476810.7250318338, -660516.0362065202, -164595.1661642016,
+                    -205012.36107524062, -831531.0005204134, 330722.3835950521,
+                    238409.02615910847, 155033.09889147282, -403595.15688305,
+                    -12843.928854833162, -531509.7065459328, -1031695.6660793469,
+                    -135139.83929993995, 310783.5962562703, -426529.56895785424,
+                    -202864.38927320475, -101012.28661090058, -705947.4037211653,
+                    53407.305044565444, -82828.47575395652, -107209.4264066645,
+                    83812.77706615409, -827505.67522064, 190533.7237230798,
+                    -321243.75384548464, 132923.1402000635, -1705.4815430793697],
+            [-1808.2870504090968, 4362.735925535438, 2565.5529679555852,
+                    -6119.722146225432, 8475.908935267644, 2112.2592771769364,
+                    2630.7156238501116, 10670.709481456897, -4244.009956370862,
+                    -3059.4982172104096, -1989.630875353818, 5179.070796881113,
+                    164.84178348727528, 6821.053891274157, 13239.667773888234,
+                    1734.2717532579436, -3987.559218979412, 5473.860147105716,
+                    2603.419562994833, 1296.2207276234788, 9060.324337779772,
+                    -685.4470597571659, 1062.9922663705831, 1375.6960266945034,
+                    -1075.6048368400182, 10618.107310982323, -2444.5541643981,
+                    4122.182167280981, -1705.4815430793697, 21.890454372860443]]
+
+    def testVarianceCovariance1(self):
+        cov = statistics.variance_covariance(self.data)
+        for i, x in enumerate(cov):
+            for j, y in enumerate(x):
+                self.assertAlmostEqual(cov[i][j], self.expected_cov[i][j])
+
+    def testVarianceCovariance2(self):
+        d = [[-323.9573971686496, 315.7388065925112, -439.6717600886799,
+            -1178.099816177854, 534.643723366918, 1432.26163863866,
+            -497.1855740226206, -1301.8090901379649, -757.2159368960788,
+            1019.1610261218987], [-971.8721915059489, 947.2164197775336,
+                -1319.0152802660396, -3534.299448533562, 1603.9311701007541,
+                4296.7849159159805, -1491.5567220678618, -3905.427270413895,
+                -2271.6478106882364, 3057.483078365696], [1805.2578794408694,
+                    -110.00822001898491, 3403.866092254204, -7.164802263774928,
+                    1090.216811454231, -1328.8920608696994, 1313.2108525086446,
+                    -20191.95994589982, -2217.3808792199297,
+                    -14735.400560599832], [-394.27706884544017,
+                        305.72735899534194, -386.17544812632764,
+                        -1312.6418677447905, 554.652417355839,
+                        1429.310268304489, -479.9816099694797,
+                        -1474.7086736016736, -766.5610040209765,
+                        943.9067418319103], [-91.76220598952864,
+                            136.88060210863958, -85.06584754543456,
+                            -81.68026472600144, 26.24124439892005,
+                            -0.4363035110954008, 9.600832668565268,
+                            -13.610656002675592, -56.73800195687172,
+                            10.062534396465715]]
+
+        e = [[1118051.315834594, -340504.4676922032, 1930944.9443670718,
+            1089311.6226191486, -702.760833078737, -1907420.4159428228,
+            1043769.6960229203, -7881480.1714532245, -303512.8258235939,
+            -7557633.140329254], [-340504.4676922032, 153014.53369854396,
+                -566748.0815504972, -545103.1972692355, 131399.1886553155,
+                811301.8312219149, -373506.9853324348, 1559586.2728247487,
+                -113040.33582529923, 2056440.4519370624], [1930944.9443670718,
+                    -566748.0815504972, 3350039.66106187, 1776176.7004303827,
+                    62345.622800847406, -3185282.8296726383,
+                    1777391.1979210868, -14030883.204065276,
+                    -623124.0396947996, -13181274.97885164],
+                [1089311.6226191486, -545103.1972692355, 1776176.7004303827,
+                    2033255.6374553614, -594563.6823540081,
+                    -2903121.0040589883, 1268999.4533419132,
+                    -3864821.0472551202, 631450.3221457871,
+                    -6251946.061145776], [-702.760833078737, 131399.1886553155,
+                        62345.622800847406, -594563.6823540081,
+                        363149.79842651676, 640030.6375209386,
+                        -154836.23379737284, -2326341.9245344824,
+                        -566971.7171338849, -674414.9016981383],
+                    [-1907420.4159428228, 811301.8312219149,
+                        -3185282.8296726383, -2903121.0040589883,
+                        640030.6375209386, 4381775.401023215,
+                        -2054062.3922582623, 9356357.877914747,
+                        -479593.4464370444, 11710459.142730745],
+                    [1043769.6960229203, -373506.9853324348,
+                        1777391.1979210868, 1268999.4533419132,
+                        -154836.23379737284, -2054062.3922582623,
+                        1041077.3129029109, -6374584.529610114,
+                        -42690.656074846236, -6773734.4112940645],
+                    [-7881480.1714532245, 1559586.2728247487,
+                        -14030883.204065276, -3864821.0472551202,
+                        -2326341.9245344824, 9356357.877914747,
+                        -6374584.529610114, 70562391.7586506,
+                        5780752.083342586, 57672359.799883455],
+                    [-303512.8258235939, -113040.33582529923,
+                        -623124.0396947996, 631450.3221457871,
+                        -566971.7171338849, -479593.4464370444,
+                        -42690.656074846236, 5780752.083342586,
+                        968375.1765316972, 3112971.715860271],
+                    [-7557633.140329254, 2056440.4519370624,
+                        -13181274.97885164, -6251946.061145776,
+                        -674414.9016981383, 11710459.142730745,
+                        -6773734.4112940645, 57672359.799883455,
+                        3112971.715860271, 52393351.00087271]]
+
+        cov = statistics.variance_covariance(d)
+        for i, x in enumerate(cov):
+            for j, y in enumerate(x):
+                self.assertAlmostEqual(cov[i][j], e[i][j])
+
+class FishersExactTests(dendropytest.ExtendedTestCase):
+    """
+    Fisher's exact test.
+    """
+
+    def setUp(self):
+        self.table = [[12, 5], [29, 2]]
+
+    def testLeftTail(self):
+        ft = statistics.FishersExactTest(self.table)
+        p = ft.left_tail_p()
+        self.assertAlmostEqual(p, 0.044554737835078267)
+
+    def testRightTail(self):
+        ft = statistics.FishersExactTest(self.table)
+        p = ft.right_tail_p()
+        self.assertAlmostEqual(p, 0.99452520602190897)
+
+    def testTwoTail(self):
+        ft = statistics.FishersExactTest(self.table)
+        p = ft.two_tail_p()
+        self.assertAlmostEqual(p, 0.08026855207410688)
+
+if __name__ == "__main__":
+    unittest.main()
+
diff --git a/dendropy/test/test_tree_calculations_and_metrics.py b/dendropy/test/test_tree_calculations_and_metrics.py
new file mode 100644
index 0000000..2a29b6a
--- /dev/null
+++ b/dendropy/test/test_tree_calculations_and_metrics.py
@@ -0,0 +1,1119 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Tests of tree metrics.
+"""
+
+import random
+import unittest
+import math
+try:
+    from StringIO import StringIO # Python 2 legacy support: StringIO in this module is the one needed (not io)
+except ImportError:
+    from io import StringIO # Python 3
+from dendropy.test.support import dendropytest
+from dendropy.test.support import pathmap
+
+import dendropy
+from dendropy.calculate import treemeasure
+from dendropy.calculate import treecompare
+
+def _get_reference_tree_list(taxon_namespace=None):
+    tree_list = dendropy.TreeList(label=None, taxon_namespace=taxon_namespace)
+    tax_4313741136 = tree_list.taxon_namespace.require_taxon(label="Antaresia childreni")
+    tax_4313741328 = tree_list.taxon_namespace.require_taxon(label="Antaresia maculosa")
+    tax_4313741456 = tree_list.taxon_namespace.require_taxon(label="Antaresia melanocephalus")
+    tax_4313741584 = tree_list.taxon_namespace.require_taxon(label="Antaresia perthensis")
+    tax_4313741712 = tree_list.taxon_namespace.require_taxon(label="Antaresia ramsayi")
+    tax_4313741840 = tree_list.taxon_namespace.require_taxon(label="Antaresia stimsoni")
+    tax_4313741904 = tree_list.taxon_namespace.require_taxon(label="Apodora papuana")
+    tax_4313741968 = tree_list.taxon_namespace.require_taxon(label="Bothrochilus boa")
+    tax_4313742032 = tree_list.taxon_namespace.require_taxon(label="Candoia aspera")
+    tax_4313742160 = tree_list.taxon_namespace.require_taxon(label="Liasis albertisii")
+    tax_4313742224 = tree_list.taxon_namespace.require_taxon(label="Liasis fuscus")
+    tax_4313742288 = tree_list.taxon_namespace.require_taxon(label="Liasis mackloti")
+    tax_4313742352 = tree_list.taxon_namespace.require_taxon(label="Liasis olivaceus")
+    tax_4313742480 = tree_list.taxon_namespace.require_taxon(label="Loxocemus bicolor")
+    tax_4313742608 = tree_list.taxon_namespace.require_taxon(label="Morelia amethistina")
+    tax_4313742672 = tree_list.taxon_namespace.require_taxon(label="Morelia boeleni")
+    tax_4313742736 = tree_list.taxon_namespace.require_taxon(label="Morelia bredli")
+    tax_4313742800 = tree_list.taxon_namespace.require_taxon(label="Morelia carinata")
+    tax_4313742928 = tree_list.taxon_namespace.require_taxon(label="Morelia clastolepis")
+    tax_4313743056 = tree_list.taxon_namespace.require_taxon(label="Morelia kinghorni")
+    tax_4313743120 = tree_list.taxon_namespace.require_taxon(label="Morelia nauta")
+    tax_4313743248 = tree_list.taxon_namespace.require_taxon(label="Morelia oenpelliensis")
+    tax_4313743312 = tree_list.taxon_namespace.require_taxon(label="Morelia spilota")
+    tax_4313759824 = tree_list.taxon_namespace.require_taxon(label="Morelia tracyae")
+    tax_4313759888 = tree_list.taxon_namespace.require_taxon(label="Morelia viridisN")
+    tax_4313759952 = tree_list.taxon_namespace.require_taxon(label="Morelia viridisS")
+    tax_4313760016 = tree_list.taxon_namespace.require_taxon(label="Python curtus")
+    tax_4313760080 = tree_list.taxon_namespace.require_taxon(label="Python molurus")
+    tax_4313760144 = tree_list.taxon_namespace.require_taxon(label="Python regius")
+    tax_4313760272 = tree_list.taxon_namespace.require_taxon(label="Python reticulatus")
+    tax_4313760336 = tree_list.taxon_namespace.require_taxon(label="Python sebae")
+    tax_4313760464 = tree_list.taxon_namespace.require_taxon(label="Python timoriensis")
+    tax_4313760592 = tree_list.taxon_namespace.require_taxon(label="Xenopeltis unicolor")
+    tree_4313760848 = dendropy.Tree(label="Tree01", taxon_namespace=tree_list.taxon_namespace)
+    tree_list.append(tree_4313760848, reindex_taxa=False)
+    nd_4313761232 = tree_4313760848.seed_node.new_child(label="Node4313761232", taxon=None, edge_length=78.6266408419)
+    nd_4313801936 = tree_4313760848.seed_node.new_child(label="Node4313801936", taxon=None, edge_length=229.880308935)
+    nd_4313761360 = nd_4313761232.new_child(label="Node4313761360", taxon=None, edge_length=123.936295332)
+    nd_4313801552 = nd_4313761232.new_child(label="Node4313801552", taxon=None, edge_length=202.422980755)
+    nd_4313761488 = nd_4313761360.new_child(label="Node4313761488", taxon=None, edge_length=53.7887032791)
+    nd_4313779728 = nd_4313761360.new_child(label="Node4313779728", taxon=None, edge_length=69.6091072013)
+    nd_4313761616 = nd_4313761488.new_child(label="Node4313761616", taxon=None, edge_length=4.50960412336)
+    nd_4313779088 = nd_4313761488.new_child(label="Node4313779088", taxon=None, edge_length=24.4333103788)
+    nd_4313761744 = nd_4313761616.new_child(label="Node4313761744", taxon=None, edge_length=1.29166787247)
+    nd_4313778704 = nd_4313761616.new_child(label="Node4313778704", taxon=None, edge_length=13.1562713928)
+    nd_4313761872 = nd_4313761744.new_child(label="Node4313761872", taxon=None, edge_length=0.739270951321)
+    nd_4313777808 = nd_4313761744.new_child(label="Node4313777808", taxon=None, edge_length=16.8726126715)
+    nd_4313762000 = nd_4313761872.new_child(label="Node4313762000", taxon=None, edge_length=4.00932181352)
+    nd_4313777552 = nd_4313761872.new_child(label="Morelia boeleni", taxon=tax_4313742672, edge_length=18.3098649933)
+    nd_4313762128 = nd_4313762000.new_child(label="Node4313762128", taxon=None, edge_length=10.4795105126)
+    nd_4313762704 = nd_4313762000.new_child(label="Node4313762704", taxon=None, edge_length=3.67431549248)
+    nd_4313762256 = nd_4313762128.new_child(label="Antaresia childreni", taxon=tax_4313741136, edge_length=3.82103266723)
+    nd_4313762448 = nd_4313762128.new_child(label="Node4313762448", taxon=None, edge_length=2.72293867732)
+    nd_4313762576 = nd_4313762448.new_child(label="Morelia clastolepis", taxon=tax_4313742928, edge_length=1.09809398991)
+    nd_4313762384 = nd_4313762448.new_child(label="Loxocemus bicolor", taxon=tax_4313742480, edge_length=1.09809398991)
+    nd_4313762960 = nd_4313762704.new_child(label="Node4313762960", taxon=None, edge_length=4.47413907662)
+    nd_4313776720 = nd_4313762704.new_child(label="Node4313776720", taxon=None, edge_length=3.27824986702)
+    nd_4313763088 = nd_4313762960.new_child(label="Node4313763088", taxon=None, edge_length=1.10594701364)
+    nd_4313776208 = nd_4313762960.new_child(label="Node4313776208", taxon=None, edge_length=0.946531890581)
+    nd_4313763216 = nd_4313763088.new_child(label="Node4313763216", taxon=None, edge_length=3.56964311758)
+    nd_4313763600 = nd_4313763088.new_child(label="Node4313763600", taxon=None, edge_length=3.54960462938)
+    nd_4313763344 = nd_4313763216.new_child(label="Antaresia melanocephalus", taxon=tax_4313741456, edge_length=1.47649847948)
+    nd_4313762832 = nd_4313763216.new_child(label="Antaresia perthensis", taxon=tax_4313741584, edge_length=1.47649847948)
+    nd_4313763728 = nd_4313763600.new_child(label="Python timoriensis", taxon=tax_4313760464, edge_length=1.49653696768)
+    nd_4313763472 = nd_4313763600.new_child(label="Liasis albertisii", taxon=tax_4313742160, edge_length=1.49653696768)
+    nd_4313776464 = nd_4313776208.new_child(label="Python sebae", taxon=tax_4313760336, edge_length=5.20555672012)
+    nd_4313776336 = nd_4313776208.new_child(label="Antaresia maculosa", taxon=tax_4313741328, edge_length=5.20555672012)
+    nd_4313776912 = nd_4313776720.new_child(label="Node4313776912", taxon=None, edge_length=2.88197852526)
+    nd_4313777296 = nd_4313776720.new_child(label="Node4313777296", taxon=None, edge_length=6.86415378064)
+    nd_4313777040 = nd_4313776912.new_child(label="Python curtus", taxon=tax_4313760016, edge_length=4.46599929505)
+    nd_4313776784 = nd_4313776912.new_child(label="Bothrochilus boa", taxon=tax_4313741968, edge_length=4.46599929505)
+    nd_4313777424 = nd_4313777296.new_child(label="Xenopeltis unicolor", taxon=tax_4313760592, edge_length=0.483824039664)
+    nd_4313777168 = nd_4313777296.new_child(label="Liasis fuscus", taxon=tax_4313742224, edge_length=0.483824039664)
+    nd_4313777936 = nd_4313777808.new_child(label="Morelia tracyae", taxon=tax_4313759824, edge_length=2.17652327312)
+    nd_4313777680 = nd_4313777808.new_child(label="Node4313777680", taxon=None, edge_length=1.67230791531)
+    nd_4313778192 = nd_4313777680.new_child(label="Node4313778192", taxon=None, edge_length=0.491713738136)
+    nd_4313778576 = nd_4313777680.new_child(label="Morelia bredli", taxon=tax_4313742736, edge_length=0.504215357803)
+    nd_4313778320 = nd_4313778192.new_child(label="Morelia viridisN", taxon=tax_4313759888, edge_length=0.0125016196671)
+    nd_4313778064 = nd_4313778192.new_child(label="Python reticulatus", taxon=tax_4313760272, edge_length=0.0125016196671)
+    nd_4313778832 = nd_4313778704.new_child(label="Node4313778832", taxon=None, edge_length=2.98661848623)
+    nd_4313779216 = nd_4313778704.new_child(label="Morelia kinghorni", taxon=tax_4313743056, edge_length=7.18453242432)
+    nd_4313778960 = nd_4313778832.new_child(label="Liasis mackloti", taxon=tax_4313742288, edge_length=4.19791393809)
+    nd_4313778448 = nd_4313778832.new_child(label="Antaresia ramsayi", taxon=tax_4313741712, edge_length=4.19791393809)
+    nd_4313779472 = nd_4313779088.new_child(label="Node4313779472", taxon=None, edge_length=0.207889736001)
+    nd_4313779856 = nd_4313779088.new_child(label="Antaresia stimsoni", taxon=tax_4313741840, edge_length=0.417097561686)
+    nd_4313779600 = nd_4313779472.new_child(label="Python regius", taxon=tax_4313760144, edge_length=0.209207825685)
+    nd_4313779344 = nd_4313779472.new_child(label="Candoia aspera", taxon=tax_4313742032, edge_length=0.209207825685)
+    nd_4313780112 = nd_4313779728.new_child(label="Node4313780112", taxon=None, edge_length=1.24643505521)
+    nd_4313801424 = nd_4313779728.new_child(label="Morelia nauta", taxon=tax_4313743120, edge_length=9.03000401821)
+    nd_4313800784 = nd_4313780112.new_child(label="Node4313800784", taxon=None, edge_length=2.6364754365)
+    nd_4313801168 = nd_4313780112.new_child(label="Node4313801168", taxon=None, edge_length=7.12573328141)
+    nd_4313800912 = nd_4313800784.new_child(label="Python molurus", taxon=tax_4313760080, edge_length=5.1470935265)
+    nd_4313779984 = nd_4313800784.new_child(label="Morelia amethistina", taxon=tax_4313742608, edge_length=5.1470935265)
+    nd_4313801296 = nd_4313801168.new_child(label="Morelia viridisS", taxon=tax_4313759952, edge_length=0.657835681585)
+    nd_4313801104 = nd_4313801168.new_child(label="Liasis olivaceus", taxon=tax_4313742352, edge_length=0.657835681585)
+    nd_4313801808 = nd_4313801552.new_child(label="Apodora papuana", taxon=tax_4313741904, edge_length=0.152425796315)
+    nd_4313801680 = nd_4313801552.new_child(label="Morelia carinata", taxon=tax_4313742800, edge_length=0.152425796315)
+    nd_4313802192 = nd_4313801936.new_child(label="Morelia spilota", taxon=tax_4313743312, edge_length=51.3217384582)
+    nd_4313802064 = nd_4313801936.new_child(label="Morelia oenpelliensis", taxon=tax_4313743248, edge_length=51.3217384582)
+    tree_4313802320 = dendropy.Tree(label="Tree02", taxon_namespace=tree_list.taxon_namespace)
+    tree_list.append(tree_4313802320, reindex_taxa=False)
+    nd_4313802704 = tree_4313802320.seed_node.new_child(label="Node4313802704", taxon=None, edge_length=18.8917197007)
+    nd_4316157840 = tree_4313802320.seed_node.new_child(label="Loxocemus bicolor", taxon=tax_4313742480, edge_length=100.189141925)
+    nd_4313802832 = nd_4313802704.new_child(label="Candoia aspera", taxon=tax_4313742032, edge_length=81.2974222246)
+    nd_4313803024 = nd_4313802704.new_child(label="Node4313803024", taxon=None, edge_length=33.1565984398)
+    nd_4313803152 = nd_4313803024.new_child(label="Node4313803152", taxon=None, edge_length=6.57324583185)
+    nd_4316156752 = nd_4313803024.new_child(label="Node4316156752", taxon=None, edge_length=16.2594519516)
+    nd_4313803280 = nd_4313803152.new_child(label="Node4313803280", taxon=None, edge_length=0.76583222117)
+    nd_4313829328 = nd_4313803152.new_child(label="Node4313829328", taxon=None, edge_length=2.53377266123)
+    nd_4313803408 = nd_4313803280.new_child(label="Node4313803408", taxon=None, edge_length=4.5936111676)
+    nd_4313826384 = nd_4313803280.new_child(label="Node4313826384", taxon=None, edge_length=1.27553605821)
+    nd_4313803536 = nd_4313803408.new_child(label="Node4313803536", taxon=None, edge_length=15.1180336863)
+    nd_4313804432 = nd_4313803408.new_child(label="Node4313804432", taxon=None, edge_length=3.40951166184)
+    nd_4313803664 = nd_4313803536.new_child(label="Morelia amethistina", taxon=tax_4313742608, edge_length=21.0901008779)
+    nd_4313802960 = nd_4313803536.new_child(label="Node4313802960", taxon=None, edge_length=3.38653541663)
+    nd_4313803920 = nd_4313802960.new_child(label="Morelia spilota", taxon=tax_4313743312, edge_length=17.7035654613)
+    nd_4313803792 = nd_4313802960.new_child(label="Node4313803792", taxon=None, edge_length=2.6244717729)
+    nd_4313804112 = nd_4313803792.new_child(label="Morelia oenpelliensis", taxon=tax_4313743248, edge_length=15.0790936884)
+    nd_4313804304 = nd_4313803792.new_child(label="Morelia bredli", taxon=tax_4313742736, edge_length=15.0790936884)
+    nd_4313804560 = nd_4313804432.new_child(label="Node4313804560", taxon=None, edge_length=5.00375936144)
+    nd_4313825616 = nd_4313804432.new_child(label="Node4313825616", taxon=None, edge_length=5.81119736053)
+    nd_4313804688 = nd_4313804560.new_child(label="Morelia carinata", taxon=tax_4313742800, edge_length=27.7948635409)
+    nd_4313804240 = nd_4313804560.new_child(label="Node4313804240", taxon=None, edge_length=8.32618746237)
+    nd_4313825488 = nd_4313804240.new_child(label="Morelia viridisS", taxon=tax_4313759952, edge_length=19.4686760786)
+    nd_4313825360 = nd_4313804240.new_child(label="Morelia viridisN", taxon=tax_4313759888, edge_length=19.4686760786)
+    nd_4313825872 = nd_4313825616.new_child(label="Antaresia maculosa", taxon=tax_4313741328, edge_length=26.9874255418)
+    nd_4313825744 = nd_4313825616.new_child(label="Node4313825744", taxon=None, edge_length=2.25683638168)
+    nd_4313826128 = nd_4313825744.new_child(label="Node4313826128", taxon=None, edge_length=16.6530983052)
+    nd_4313826512 = nd_4313825744.new_child(label="Antaresia perthensis", taxon=tax_4313741584, edge_length=24.7305891602)
+    nd_4313826256 = nd_4313826128.new_child(label="Antaresia childreni", taxon=tax_4313741136, edge_length=8.07749085501)
+    nd_4313826000 = nd_4313826128.new_child(label="Antaresia stimsoni", taxon=tax_4313741840, edge_length=8.07749085501)
+    nd_4313826768 = nd_4313826384.new_child(label="Node4313826768", taxon=None, edge_length=4.33214615343)
+    nd_4313828048 = nd_4313826384.new_child(label="Node4313828048", taxon=None, edge_length=10.9652932592)
+    nd_4313826896 = nd_4313826768.new_child(label="Node4313826896", taxon=None, edge_length=3.37363071467)
+    nd_4313827664 = nd_4313826768.new_child(label="Node4313827664", taxon=None, edge_length=16.3762764593)
+    nd_4313827024 = nd_4313826896.new_child(label="Node4313827024", taxon=None, edge_length=26.1365403684)
+    nd_4313827408 = nd_4313826896.new_child(label="Node4313827408", taxon=None, edge_length=12.1064068345)
+    nd_4313827152 = nd_4313827024.new_child(label="Liasis fuscus", taxon=tax_4313742224, edge_length=5.68389243709)
+    nd_4313826640 = nd_4313827024.new_child(label="Liasis mackloti", taxon=tax_4313742288, edge_length=5.68389243709)
+    nd_4313827536 = nd_4313827408.new_child(label="Apodora papuana", taxon=tax_4313741904, edge_length=19.714025971)
+    nd_4313827280 = nd_4313827408.new_child(label="Liasis olivaceus", taxon=tax_4313742352, edge_length=19.714025971)
+    nd_4313827920 = nd_4313827664.new_child(label="Antaresia melanocephalus", taxon=tax_4313741456, edge_length=18.8177870609)
+    nd_4313827792 = nd_4313827664.new_child(label="Antaresia ramsayi", taxon=tax_4313741712, edge_length=18.8177870609)
+    nd_4313828304 = nd_4313828048.new_child(label="Morelia boeleni", taxon=tax_4313742672, edge_length=28.5609164144)
+    nd_4313828176 = nd_4313828048.new_child(label="Node4313828176", taxon=None, edge_length=11.3916491298)
+    nd_4313828560 = nd_4313828176.new_child(label="Morelia tracyae", taxon=tax_4313759824, edge_length=17.1692672846)
+    nd_4313828432 = nd_4313828176.new_child(label="Node4313828432", taxon=None, edge_length=10.4784522084)
+    nd_4313828816 = nd_4313828432.new_child(label="Node4313828816", taxon=None, edge_length=1.44575855569)
+    nd_4313829200 = nd_4313828432.new_child(label="Morelia clastolepis", taxon=tax_4313742928, edge_length=6.69081507619)
+    nd_4313828944 = nd_4313828816.new_child(label="Morelia kinghorni", taxon=tax_4313743056, edge_length=5.2450565205)
+    nd_4313828688 = nd_4313828816.new_child(label="Morelia nauta", taxon=tax_4313743120, edge_length=5.2450565205)
+    nd_4316155984 = nd_4313829328.new_child(label="Node4316155984", taxon=None, edge_length=23.8923728386)
+    nd_4313802512 = nd_4313829328.new_child(label="Node4313802512", taxon=None, edge_length=22.5856079922)
+    nd_4316156112 = nd_4316155984.new_child(label="Python timoriensis", taxon=tax_4313760464, edge_length=15.1414324532)
+    nd_4316156368 = nd_4316155984.new_child(label="Python reticulatus", taxon=tax_4313760272, edge_length=15.1414324532)
+    nd_4316156496 = nd_4313802512.new_child(label="Bothrochilus boa", taxon=tax_4313741968, edge_length=16.4481972995)
+    nd_4313802448 = nd_4313802512.new_child(label="Liasis albertisii", taxon=tax_4313742160, edge_length=16.4481972995)
+    nd_4316156880 = nd_4316156752.new_child(label="Xenopeltis unicolor", taxon=tax_4313760592, edge_length=31.8813718333)
+    nd_4316156624 = nd_4316156752.new_child(label="Node4316156624", taxon=None, edge_length=5.97313984611)
+    nd_4316157136 = nd_4316156624.new_child(label="Node4316157136", taxon=None, edge_length=8.94343133576)
+    nd_4316157904 = nd_4316156624.new_child(label="Python regius", taxon=tax_4313760144, edge_length=25.9082319872)
+    nd_4316157264 = nd_4316157136.new_child(label="Python curtus", taxon=tax_4313760016, edge_length=16.9648006514)
+    nd_4316157008 = nd_4316157136.new_child(label="Node4316157008", taxon=None, edge_length=4.66373979181)
+    nd_4316157584 = nd_4316157008.new_child(label="Python sebae", taxon=tax_4313760336, edge_length=12.3010608596)
+    nd_4316157456 = nd_4316157008.new_child(label="Python molurus", taxon=tax_4313760080, edge_length=12.3010608596)
+    tree_4316158160 = dendropy.Tree(label="Tree03", taxon_namespace=tree_list.taxon_namespace)
+    tree_list.append(tree_4316158160, reindex_taxa=False)
+    nd_4316158416 = tree_4316158160.seed_node.new_child(label="Candoia aspera", taxon=tax_4313742032, edge_length=109.372663833)
+    nd_4316158608 = tree_4316158160.seed_node.new_child(label="Node4316158608", taxon=None, edge_length=23.0231215792)
+    nd_4316158736 = nd_4316158608.new_child(label="Node4316158736", taxon=None, edge_length=25.5450384687)
+    nd_4316203024 = nd_4316158608.new_child(label="Node4316203024", taxon=None, edge_length=3.40243621372)
+    nd_4316158864 = nd_4316158736.new_child(label="Node4316158864", taxon=None, edge_length=8.2214192007)
+    nd_4316202128 = nd_4316158736.new_child(label="Node4316202128", taxon=None, edge_length=17.448321597)
+    nd_4316158992 = nd_4316158864.new_child(label="Node4316158992", taxon=None, edge_length=6.47536868825)
+    nd_4316201552 = nd_4316158864.new_child(label="Node4316201552", taxon=None, edge_length=22.2791730332)
+    nd_4316159120 = nd_4316158992.new_child(label="Node4316159120", taxon=None, edge_length=2.53965409687)
+    nd_4316179792 = nd_4316158992.new_child(label="Node4316179792", taxon=None, edge_length=7.82653683343)
+    nd_4316159248 = nd_4316159120.new_child(label="Node4316159248", taxon=None, edge_length=2.41802497137)
+    nd_4316178128 = nd_4316159120.new_child(label="Node4316178128", taxon=None, edge_length=5.90175129715)
+    nd_4316159376 = nd_4316159248.new_child(label="Node4316159376", taxon=None, edge_length=6.39175712039)
+    nd_4316177104 = nd_4316159248.new_child(label="Node4316177104", taxon=None, edge_length=9.11329596086)
+    nd_4316159504 = nd_4316159376.new_child(label="Node4316159504", taxon=None, edge_length=4.82772953939)
+    nd_4316176720 = nd_4316159376.new_child(label="Node4316176720", taxon=None, edge_length=17.7955396972)
+    nd_4316159632 = nd_4316159504.new_child(label="Node4316159632", taxon=None, edge_length=18.9362130146)
+    nd_4316176464 = nd_4316159504.new_child(label="Node4316176464", taxon=None, edge_length=8.55401069191)
+    nd_4316159760 = nd_4316159632.new_child(label="Liasis fuscus", taxon=tax_4313742224, edge_length=10.9943371535)
+    nd_4316158544 = nd_4316159632.new_child(label="Liasis mackloti", taxon=tax_4313742288, edge_length=10.9943371535)
+    nd_4316176592 = nd_4316176464.new_child(label="Apodora papuana", taxon=tax_4313741904, edge_length=21.3765394762)
+    nd_4316159888 = nd_4316176464.new_child(label="Liasis olivaceus", taxon=tax_4313742352, edge_length=21.3765394762)
+    nd_4316176976 = nd_4316176720.new_child(label="Antaresia melanocephalus", taxon=tax_4313741456, edge_length=16.9627400103)
+    nd_4316176848 = nd_4316176720.new_child(label="Antaresia ramsayi", taxon=tax_4313741712, edge_length=16.9627400103)
+    nd_4316177360 = nd_4316177104.new_child(label="Morelia boeleni", taxon=tax_4313742672, edge_length=32.036740867)
+    nd_4316177232 = nd_4316177104.new_child(label="Node4316177232", taxon=None, edge_length=14.7791918926)
+    nd_4316177616 = nd_4316177232.new_child(label="Morelia tracyae", taxon=tax_4313759824, edge_length=17.2575489744)
+    nd_4316177488 = nd_4316177232.new_child(label="Node4316177488", taxon=None, edge_length=13.3651095585)
+    nd_4316177872 = nd_4316177488.new_child(label="Node4316177872", taxon=None, edge_length=0.439451186875)
+    nd_4316178256 = nd_4316177488.new_child(label="Morelia clastolepis", taxon=tax_4313742928, edge_length=3.89243941597)
+    nd_4316178000 = nd_4316177872.new_child(label="Morelia kinghorni", taxon=tax_4313743056, edge_length=3.4529882291)
+    nd_4316177744 = nd_4316177872.new_child(label="Morelia nauta", taxon=tax_4313743120, edge_length=3.4529882291)
+    nd_4316178512 = nd_4316178128.new_child(label="Node4316178512", taxon=None, edge_length=11.5242004723)
+    nd_4316179280 = nd_4316178128.new_child(label="Node4316179280", taxon=None, edge_length=13.3915851761)
+    nd_4316178640 = nd_4316178512.new_child(label="Node4316178640", taxon=None, edge_length=1.13984981318)
+    nd_4316179024 = nd_4316178512.new_child(label="Node4316179024", taxon=None, edge_length=13.1543046788)
+    nd_4316178768 = nd_4316178640.new_child(label="Morelia oenpelliensis", taxon=tax_4313743248, edge_length=25.0022602166)
+    nd_4316178384 = nd_4316178640.new_child(label="Morelia amethistina", taxon=tax_4313742608, edge_length=25.0022602166)
+    nd_4316179152 = nd_4316179024.new_child(label="Morelia spilota", taxon=tax_4313743312, edge_length=12.987805351)
+    nd_4316178896 = nd_4316179024.new_child(label="Morelia bredli", taxon=tax_4313742736, edge_length=12.987805351)
+    nd_4316179536 = nd_4316179280.new_child(label="Bothrochilus boa", taxon=tax_4313741968, edge_length=24.274725326)
+    nd_4316179408 = nd_4316179280.new_child(label="Liasis albertisii", taxon=tax_4313742160, edge_length=24.274725326)
+    nd_4316179920 = nd_4316179792.new_child(label="Node4316179920", taxon=None, edge_length=7.8146690322)
+    nd_4316180432 = nd_4316179792.new_child(label="Node4316180432", taxon=None, edge_length=5.10842077756)
+    nd_4316180048 = nd_4316179920.new_child(label="Morelia carinata", taxon=tax_4313742800, edge_length=30.4665100305)
+    nd_4316179664 = nd_4316179920.new_child(label="Node4316179664", taxon=None, edge_length=11.0043198537)
+    nd_4316180304 = nd_4316179664.new_child(label="Morelia viridisS", taxon=tax_4313759952, edge_length=19.4621901768)
+    nd_4316180176 = nd_4316179664.new_child(label="Morelia viridisN", taxon=tax_4313759888, edge_length=19.4621901768)
+    nd_4316201104 = nd_4316180432.new_child(label="Antaresia maculosa", taxon=tax_4313741328, edge_length=33.1727582851)
+    nd_4316158224 = nd_4316180432.new_child(label="Node4316158224", taxon=None, edge_length=4.7141022378)
+    nd_4316201296 = nd_4316158224.new_child(label="Node4316201296", taxon=None, edge_length=19.4308450954)
+    nd_4316201744 = nd_4316158224.new_child(label="Antaresia perthensis", taxon=tax_4313741584, edge_length=28.4586560473)
+    nd_4316201424 = nd_4316201296.new_child(label="Antaresia childreni", taxon=tax_4313741136, edge_length=9.02781095195)
+    nd_4316201616 = nd_4316201296.new_child(label="Antaresia stimsoni", taxon=tax_4313741840, edge_length=9.02781095195)
+    nd_4316202000 = nd_4316201552.new_child(label="Python timoriensis", taxon=tax_4313760464, edge_length=30.3039115511)
+    nd_4316201872 = nd_4316201552.new_child(label="Python reticulatus", taxon=tax_4313760272, edge_length=30.3039115511)
+    nd_4316202384 = nd_4316202128.new_child(label="Node4316202384", taxon=None, edge_length=6.14068810478)
+    nd_4316202896 = nd_4316202128.new_child(label="Python regius", taxon=tax_4313760144, edge_length=43.356182188)
+    nd_4316202512 = nd_4316202384.new_child(label="Python curtus", taxon=tax_4313760016, edge_length=37.2154940833)
+    nd_4316202256 = nd_4316202384.new_child(label="Node4316202256", taxon=None, edge_length=15.2807163378)
+    nd_4316202768 = nd_4316202256.new_child(label="Python sebae", taxon=tax_4313760336, edge_length=21.9347777454)
+    nd_4316202640 = nd_4316202256.new_child(label="Python molurus", taxon=tax_4313760080, edge_length=21.9347777454)
+    nd_4316203280 = nd_4316203024.new_child(label="Loxocemus bicolor", taxon=tax_4313742480, edge_length=82.94710604)
+    nd_4316203152 = nd_4316203024.new_child(label="Xenopeltis unicolor", taxon=tax_4313760592, edge_length=82.94710604)
+    tree_4316203536 = dendropy.Tree(label="Tree04", taxon_namespace=tree_list.taxon_namespace)
+    tree_list.append(tree_4316203536, reindex_taxa=False)
+    nd_4316203792 = tree_4316203536.seed_node.new_child(label="Candoia aspera", taxon=tax_4313742032, edge_length=157.750076773)
+    nd_4316203984 = tree_4316203536.seed_node.new_child(label="Node4316203984", taxon=None, edge_length=44.9789688242)
+    nd_4316204112 = nd_4316203984.new_child(label="Node4316204112", taxon=None, edge_length=19.5811677101)
+    nd_4316252880 = nd_4316203984.new_child(label="Xenopeltis unicolor", taxon=tax_4313760592, edge_length=112.771107949)
+    nd_4316204240 = nd_4316204112.new_child(label="Loxocemus bicolor", taxon=tax_4313742480, edge_length=93.189940239)
+    nd_4316203920 = nd_4316204112.new_child(label="Node4316203920", taxon=None, edge_length=27.291533515)
+    nd_4316204496 = nd_4316203920.new_child(label="Node4316204496", taxon=None, edge_length=10.3875398007)
+    nd_4316251856 = nd_4316203920.new_child(label="Node4316251856", taxon=None, edge_length=19.8895314814)
+    nd_4316204624 = nd_4316204496.new_child(label="Node4316204624", taxon=None, edge_length=9.74044751021)
+    nd_4316251600 = nd_4316204496.new_child(label="Node4316251600", taxon=None, edge_length=22.1202819118)
+    nd_4316204752 = nd_4316204624.new_child(label="Node4316204752", taxon=None, edge_length=2.88538115771)
+    nd_4316227472 = nd_4316204624.new_child(label="Node4316227472", taxon=None, edge_length=2.72676824396)
+    nd_4316204880 = nd_4316204752.new_child(label="Node4316204880", taxon=None, edge_length=1.34047834167)
+    nd_4316227216 = nd_4316204752.new_child(label="Morelia boeleni", taxon=tax_4313742672, edge_length=42.8850382554)
+    nd_4316205008 = nd_4316204880.new_child(label="Node4316205008", taxon=None, edge_length=2.31767871982)
+    nd_4316226768 = nd_4316204880.new_child(label="Node4316226768", taxon=None, edge_length=18.2547475502)
+    nd_4316225680 = nd_4316205008.new_child(label="Node4316225680", taxon=None, edge_length=6.3930928479)
+    nd_4316226448 = nd_4316205008.new_child(label="Node4316226448", taxon=None, edge_length=23.2404397828)
+    nd_4316225808 = nd_4316225680.new_child(label="Node4316225808", taxon=None, edge_length=24.6792964706)
+    nd_4316226192 = nd_4316225680.new_child(label="Node4316226192", taxon=None, edge_length=8.52936801714)
+    nd_4316225936 = nd_4316225808.new_child(label="Liasis fuscus", taxon=tax_4313742224, edge_length=8.15449187544)
+    nd_4316204368 = nd_4316225808.new_child(label="Liasis mackloti", taxon=tax_4313742288, edge_length=8.15449187544)
+    nd_4316226320 = nd_4316226192.new_child(label="Apodora papuana", taxon=tax_4313741904, edge_length=24.3044203289)
+    nd_4316226128 = nd_4316226192.new_child(label="Liasis olivaceus", taxon=tax_4313742352, edge_length=24.3044203289)
+    nd_4316226640 = nd_4316226448.new_child(label="Antaresia melanocephalus", taxon=tax_4313741456, edge_length=15.9864414111)
+    nd_4316226832 = nd_4316226448.new_child(label="Antaresia ramsayi", taxon=tax_4313741712, edge_length=15.9864414111)
+    nd_4316227088 = nd_4316226768.new_child(label="Bothrochilus boa", taxon=tax_4313741968, edge_length=23.2898123636)
+    nd_4316226960 = nd_4316226768.new_child(label="Liasis albertisii", taxon=tax_4313742160, edge_length=23.2898123636)
+    nd_4316227600 = nd_4316227472.new_child(label="Node4316227600", taxon=None, edge_length=14.2175774566)
+    nd_4316229392 = nd_4316227472.new_child(label="Node4316229392", taxon=None, edge_length=3.9347409374)
+    nd_4316227728 = nd_4316227600.new_child(label="Node4316227728", taxon=None, edge_length=12.5474231006)
+    nd_4316228112 = nd_4316227600.new_child(label="Node4316228112", taxon=None, edge_length=1.26678175478)
+    nd_4316227856 = nd_4316227728.new_child(label="Morelia spilota", taxon=tax_4313743312, edge_length=16.278650612)
+    nd_4316227344 = nd_4316227728.new_child(label="Morelia bredli", taxon=tax_4313742736, edge_length=16.278650612)
+    nd_4316228240 = nd_4316228112.new_child(label="Morelia oenpelliensis", taxon=tax_4313743248, edge_length=27.5592919578)
+    nd_4316227984 = nd_4316228112.new_child(label="Node4316227984", taxon=None, edge_length=13.3039152583)
+    nd_4316228496 = nd_4316227984.new_child(label="Morelia tracyae", taxon=tax_4313759824, edge_length=14.2553766995)
+    nd_4316228368 = nd_4316227984.new_child(label="Node4316228368", taxon=None, edge_length=1.66170791236)
+    nd_4316228752 = nd_4316228368.new_child(label="Node4316228752", taxon=None, edge_length=8.89489387836)
+    nd_4316229264 = nd_4316228368.new_child(label="Morelia amethistina", taxon=tax_4313742608, edge_length=12.5936687872)
+    nd_4316228880 = nd_4316228752.new_child(label="Node4316228880", taxon=None, edge_length=0.230110019205)
+    nd_4316229136 = nd_4316228752.new_child(label="Morelia nauta", taxon=tax_4313743120, edge_length=3.69877490882)
+    nd_4316229008 = nd_4316228880.new_child(label="Morelia clastolepis", taxon=tax_4313742928, edge_length=3.46866488962)
+    nd_4316203408 = nd_4316228880.new_child(label="Morelia kinghorni", taxon=tax_4313743056, edge_length=3.46866488962)
+    nd_4316229520 = nd_4316229392.new_child(label="Node4316229520", taxon=None, edge_length=5.88218975316)
+    nd_4316250576 = nd_4316229392.new_child(label="Node4316250576", taxon=None, edge_length=7.11128547149)
+    nd_4316250192 = nd_4316229520.new_child(label="Morelia carinata", taxon=tax_4313742800, edge_length=33.2267204786)
+    nd_4316203600 = nd_4316229520.new_child(label="Node4316203600", taxon=None, edge_length=13.4306199458)
+    nd_4316250448 = nd_4316203600.new_child(label="Morelia viridisS", taxon=tax_4313759952, edge_length=19.7961005329)
+    nd_4316250320 = nd_4316203600.new_child(label="Morelia viridisN", taxon=tax_4313759888, edge_length=19.7961005329)
+    nd_4316250832 = nd_4316250576.new_child(label="Antaresia maculosa", taxon=tax_4313741328, edge_length=31.9976247603)
+    nd_4316250704 = nd_4316250576.new_child(label="Node4316250704", taxon=None, edge_length=4.71436528425)
+    nd_4316251088 = nd_4316250704.new_child(label="Node4316251088", taxon=None, edge_length=15.9285543528)
+    nd_4316251472 = nd_4316250704.new_child(label="Antaresia perthensis", taxon=tax_4313741584, edge_length=27.2832594761)
+    nd_4316251216 = nd_4316251088.new_child(label="Antaresia childreni", taxon=tax_4313741136, edge_length=11.3547051232)
+    nd_4316250960 = nd_4316251088.new_child(label="Antaresia stimsoni", taxon=tax_4313741840, edge_length=11.3547051232)
+    nd_4316251728 = nd_4316251600.new_child(label="Python timoriensis", taxon=tax_4313760464, edge_length=33.3905850116)
+    nd_4316251344 = nd_4316251600.new_child(label="Python reticulatus", taxon=tax_4313760272, edge_length=33.3905850116)
+    nd_4316252112 = nd_4316251856.new_child(label="Node4316252112", taxon=None, edge_length=11.1907198563)
+    nd_4316252624 = nd_4316251856.new_child(label="Python regius", taxon=tax_4313760144, edge_length=46.0088752426)
+    nd_4316252240 = nd_4316252112.new_child(label="Python curtus", taxon=tax_4313760016, edge_length=34.8181553863)
+    nd_4316251984 = nd_4316252112.new_child(label="Node4316251984", taxon=None, edge_length=7.89583224277)
+    nd_4316252496 = nd_4316251984.new_child(label="Python sebae", taxon=tax_4313760336, edge_length=26.9223231435)
+    nd_4316252368 = nd_4316251984.new_child(label="Python molurus", taxon=tax_4313760080, edge_length=26.9223231435)
+    tree_4316252752 = dendropy.Tree(label="Tree05", taxon_namespace=tree_list.taxon_namespace)
+    tree_list.append(tree_4316252752, reindex_taxa=False)
+    nd_4316253264 = tree_4316252752.seed_node.new_child(label="Candoia aspera", taxon=tax_4313742032, edge_length=117.405482731)
+    nd_4316253456 = tree_4316252752.seed_node.new_child(label="Node4316253456", taxon=None, edge_length=26.8517813278)
+    nd_4316253584 = nd_4316253456.new_child(label="Xenopeltis unicolor", taxon=tax_4313760592, edge_length=90.5537014032)
+    nd_4316253392 = nd_4316253456.new_child(label="Node4316253392", taxon=None, edge_length=16.5508683851)
+    nd_4316253776 = nd_4316253392.new_child(label="Node4316253776", taxon=None, edge_length=16.6353899137)
+    nd_4316302224 = nd_4316253392.new_child(label="Loxocemus bicolor", taxon=tax_4313742480, edge_length=74.0028330181)
+    nd_4316253904 = nd_4316253776.new_child(label="Node4316253904", taxon=None, edge_length=6.56321044476)
+    nd_4316301328 = nd_4316253776.new_child(label="Node4316301328", taxon=None, edge_length=18.1852453647)
+    nd_4316254032 = nd_4316253904.new_child(label="Node4316254032", taxon=None, edge_length=12.3993667148)
+    nd_4316301072 = nd_4316253904.new_child(label="Node4316301072", taxon=None, edge_length=23.3069353709)
+    nd_4316254160 = nd_4316254032.new_child(label="Node4316254160", taxon=None, edge_length=1.85501747057)
+    nd_4316276944 = nd_4316254032.new_child(label="Node4316276944", taxon=None, edge_length=2.5597754437)
+    nd_4316274832 = nd_4316254160.new_child(label="Morelia boeleni", taxon=tax_4313742672, edge_length=36.5498484742)
+    nd_4316275024 = nd_4316254160.new_child(label="Node4316275024", taxon=None, edge_length=1.04661210215)
+    nd_4316275152 = nd_4316275024.new_child(label="Node4316275152", taxon=None, edge_length=6.27408700912)
+    nd_4316276432 = nd_4316275024.new_child(label="Node4316276432", taxon=None, edge_length=15.4075337774)
+    nd_4316275280 = nd_4316275152.new_child(label="Node4316275280", taxon=None, edge_length=4.14502379891)
+    nd_4316276048 = nd_4316275152.new_child(label="Node4316276048", taxon=None, edge_length=17.4625828861)
+    nd_4316275408 = nd_4316275280.new_child(label="Node4316275408", taxon=None, edge_length=18.7911640437)
+    nd_4316275792 = nd_4316275280.new_child(label="Node4316275792", taxon=None, edge_length=4.65165886356)
+    nd_4316275536 = nd_4316275408.new_child(label="Liasis fuscus", taxon=tax_4313742224, edge_length=6.29296152027)
+    nd_4316274960 = nd_4316275408.new_child(label="Liasis mackloti", taxon=tax_4313742288, edge_length=6.29296152027)
+    nd_4316275920 = nd_4316275792.new_child(label="Apodora papuana", taxon=tax_4313741904, edge_length=20.4324667004)
+    nd_4316275664 = nd_4316275792.new_child(label="Liasis olivaceus", taxon=tax_4313742352, edge_length=20.4324667004)
+    nd_4316276304 = nd_4316276048.new_child(label="Antaresia melanocephalus", taxon=tax_4313741456, edge_length=11.7665664768)
+    nd_4316276176 = nd_4316276048.new_child(label="Antaresia ramsayi", taxon=tax_4313741712, edge_length=11.7665664768)
+    nd_4316276688 = nd_4316276432.new_child(label="Bothrochilus boa", taxon=tax_4313741968, edge_length=20.0957025946)
+    nd_4316276560 = nd_4316276432.new_child(label="Liasis albertisii", taxon=tax_4313742160, edge_length=20.0957025946)
+    nd_4316277072 = nd_4316276944.new_child(label="Node4316277072", taxon=None, edge_length=10.9999278199)
+    nd_4316299408 = nd_4316276944.new_child(label="Node4316299408", taxon=None, edge_length=4.77611939702)
+    nd_4316277200 = nd_4316277072.new_child(label="Morelia oenpelliensis", taxon=tax_4313743248, edge_length=24.8451626811)
+    nd_4316276816 = nd_4316277072.new_child(label="Node4316276816", taxon=None, edge_length=2.78884378851)
+    nd_4316277392 = nd_4316276816.new_child(label="Node4316277392", taxon=None, edge_length=7.72979171285)
+    nd_4316278352 = nd_4316276816.new_child(label="Node4316278352", taxon=None, edge_length=12.4767981898)
+    nd_4316277520 = nd_4316277392.new_child(label="Morelia tracyae", taxon=tax_4313759824, edge_length=14.3265271797)
+    nd_4316277584 = nd_4316277392.new_child(label="Node4316277584", taxon=None, edge_length=3.38770784397)
+    nd_4316277712 = nd_4316277584.new_child(label="Node4316277712", taxon=None, edge_length=5.65307392619)
+    nd_4316278224 = nd_4316277584.new_child(label="Morelia amethistina", taxon=tax_4313742608, edge_length=10.9388193358)
+    nd_4316277840 = nd_4316277712.new_child(label="Morelia clastolepis", taxon=tax_4313742928, edge_length=5.28574540958)
+    nd_4316253008 = nd_4316277712.new_child(label="Node4316253008", taxon=None, edge_length=2.55413552294)
+    nd_4316278096 = nd_4316253008.new_child(label="Morelia kinghorni", taxon=tax_4313743056, edge_length=2.73160988664)
+    nd_4316277968 = nd_4316253008.new_child(label="Morelia nauta", taxon=tax_4313743120, edge_length=2.73160988664)
+    nd_4316278608 = nd_4316278352.new_child(label="Morelia spilota", taxon=tax_4313743312, edge_length=9.57952070277)
+    nd_4316278480 = nd_4316278352.new_child(label="Morelia bredli", taxon=tax_4313742736, edge_length=9.57952070277)
+    nd_4316299536 = nd_4316299408.new_child(label="Node4316299536", taxon=None, edge_length=6.62946104565)
+    nd_4316300048 = nd_4316299408.new_child(label="Node4316300048", taxon=None, edge_length=3.07031045323)
+    nd_4316299664 = nd_4316299536.new_child(label="Morelia carinata", taxon=tax_4313742800, edge_length=24.4395100584)
+    nd_4316299344 = nd_4316299536.new_child(label="Node4316299344", taxon=None, edge_length=7.74158436759)
+    nd_4316299920 = nd_4316299344.new_child(label="Morelia viridisS", taxon=tax_4313759952, edge_length=16.6979256908)
+    nd_4316299792 = nd_4316299344.new_child(label="Morelia viridisN", taxon=tax_4313759888, edge_length=16.6979256908)
+    nd_4316300304 = nd_4316300048.new_child(label="Antaresia maculosa", taxon=tax_4313741328, edge_length=27.9986606508)
+    nd_4316300176 = nd_4316300048.new_child(label="Node4316300176", taxon=None, edge_length=4.88214307652)
+    nd_4316300560 = nd_4316300176.new_child(label="Node4316300560", taxon=None, edge_length=16.2064963991)
+    nd_4316300944 = nd_4316300176.new_child(label="Antaresia perthensis", taxon=tax_4313741584, edge_length=23.1165175743)
+    nd_4316300688 = nd_4316300560.new_child(label="Antaresia childreni", taxon=tax_4313741136, edge_length=6.91002117518)
+    nd_4316300432 = nd_4316300560.new_child(label="Antaresia stimsoni", taxon=tax_4313741840, edge_length=6.91002117518)
+    nd_4316301200 = nd_4316301072.new_child(label="Python timoriensis", taxon=tax_4313760464, edge_length=27.4972972887)
+    nd_4316300816 = nd_4316301072.new_child(label="Python reticulatus", taxon=tax_4313760272, edge_length=27.4972972887)
+    nd_4316301584 = nd_4316301328.new_child(label="Node4316301584", taxon=None, edge_length=6.08258726043)
+    nd_4316302096 = nd_4316301328.new_child(label="Python regius", taxon=tax_4313760144, edge_length=39.1821977396)
+    nd_4316301712 = nd_4316301584.new_child(label="Python curtus", taxon=tax_4313760016, edge_length=33.0996104792)
+    nd_4316301456 = nd_4316301584.new_child(label="Node4316301456", taxon=None, edge_length=14.293345062)
+    nd_4316301968 = nd_4316301456.new_child(label="Python sebae", taxon=tax_4313760336, edge_length=18.8062654172)
+    nd_4316301840 = nd_4316301456.new_child(label="Python molurus", taxon=tax_4313760080, edge_length=18.8062654172)
+    tree_4316302352 = dendropy.Tree(label="Tree06", taxon_namespace=tree_list.taxon_namespace)
+    tree_list.append(tree_4316302352, reindex_taxa=False)
+    nd_4316302736 = tree_4316302352.seed_node.new_child(label="Candoia aspera", taxon=tax_4313742032, edge_length=126.159749307)
+    nd_4316302928 = tree_4316302352.seed_node.new_child(label="Node4316302928", taxon=None, edge_length=29.5182899091)
+    nd_4316303056 = nd_4316302928.new_child(label="Node4316303056", taxon=None, edge_length=20.4272017262)
+    nd_4316347728 = nd_4316302928.new_child(label="Xenopeltis unicolor", taxon=tax_4313760592, edge_length=96.6414593981)
+    nd_4316303184 = nd_4316303056.new_child(label="Loxocemus bicolor", taxon=tax_4313742480, edge_length=76.2142576718)
+    nd_4316302864 = nd_4316303056.new_child(label="Node4316302864", taxon=None, edge_length=22.3340694246)
+    nd_4316323920 = nd_4316302864.new_child(label="Node4316323920", taxon=None, edge_length=7.33856679933)
+    nd_4316346704 = nd_4316302864.new_child(label="Node4316346704", taxon=None, edge_length=17.9081848887)
+    nd_4316324048 = nd_4316323920.new_child(label="Node4316324048", taxon=None, edge_length=8.24675869634)
+    nd_4316346448 = nd_4316323920.new_child(label="Node4316346448", taxon=None, edge_length=23.556392099)
+    nd_4316324176 = nd_4316324048.new_child(label="Node4316324176", taxon=None, edge_length=1.78294733026)
+    nd_4316326288 = nd_4316324048.new_child(label="Node4316326288", taxon=None, edge_length=3.26747919544)
+    nd_4316324304 = nd_4316324176.new_child(label="Morelia boeleni", taxon=tax_4313742672, edge_length=36.5119154213)
+    nd_4316324496 = nd_4316324176.new_child(label="Node4316324496", taxon=None, edge_length=1.38475127065)
+    nd_4316324624 = nd_4316324496.new_child(label="Node4316324624", taxon=None, edge_length=2.24740276648)
+    nd_4316326032 = nd_4316324496.new_child(label="Node4316326032", taxon=None, edge_length=12.9799500845)
+    nd_4316324752 = nd_4316324624.new_child(label="Node4316324752", taxon=None, edge_length=17.772328432)
+    nd_4316325136 = nd_4316324624.new_child(label="Node4316325136", taxon=None, edge_length=4.4885389798)
+    nd_4316324880 = nd_4316324752.new_child(label="Antaresia melanocephalus", taxon=tax_4313741456, edge_length=15.1074329521)
+    nd_4316324432 = nd_4316324752.new_child(label="Antaresia ramsayi", taxon=tax_4313741712, edge_length=15.1074329521)
+    nd_4316325264 = nd_4316325136.new_child(label="Node4316325264", taxon=None, edge_length=20.8200876951)
+    nd_4316325648 = nd_4316325136.new_child(label="Node4316325648", taxon=None, edge_length=4.37289319177)
+    nd_4316325392 = nd_4316325264.new_child(label="Liasis fuscus", taxon=tax_4313742224, edge_length=7.5711347092)
+    nd_4316325008 = nd_4316325264.new_child(label="Liasis mackloti", taxon=tax_4313742288, edge_length=7.5711347092)
+    nd_4316325776 = nd_4316325648.new_child(label="Apodora papuana", taxon=tax_4313741904, edge_length=24.0183292126)
+    nd_4316325520 = nd_4316325648.new_child(label="Liasis olivaceus", taxon=tax_4313742352, edge_length=24.0183292126)
+    nd_4316326160 = nd_4316326032.new_child(label="Bothrochilus boa", taxon=tax_4313741968, edge_length=22.1472140662)
+    nd_4316325904 = nd_4316326032.new_child(label="Liasis albertisii", taxon=tax_4313742160, edge_length=22.1472140662)
+    nd_4316326416 = nd_4316326288.new_child(label="Node4316326416", taxon=None, edge_length=12.2960670728)
+    nd_4316344784 = nd_4316326288.new_child(label="Node4316344784", taxon=None, edge_length=4.17834235089)
+    nd_4316326544 = nd_4316326416.new_child(label="Node4316326544", taxon=None, edge_length=1.62908011033)
+    nd_4316327184 = nd_4316326416.new_child(label="Node4316327184", taxon=None, edge_length=6.20414166284)
+    nd_4316326672 = nd_4316326544.new_child(label="Node4316326672", taxon=None, edge_length=11.7610480778)
+    nd_4316327056 = nd_4316326544.new_child(label="Morelia oenpelliensis", taxon=tax_4313743248, edge_length=21.1022363729)
+    nd_4316326800 = nd_4316326672.new_child(label="Morelia spilota", taxon=tax_4313743312, edge_length=9.34118829512)
+    nd_4316302480 = nd_4316326672.new_child(label="Morelia bredli", taxon=tax_4313742736, edge_length=9.34118829512)
+    nd_4316327312 = nd_4316327184.new_child(label="Morelia tracyae", taxon=tax_4313759824, edge_length=16.5271748204)
+    nd_4316326928 = nd_4316327184.new_child(label="Node4316326928", taxon=None, edge_length=4.27339647744)
+    nd_4316327568 = nd_4316326928.new_child(label="Node4316327568", taxon=None, edge_length=7.91289243511)
+    nd_4316344656 = nd_4316326928.new_child(label="Morelia amethistina", taxon=tax_4313742608, edge_length=12.253778343)
+    nd_4316327696 = nd_4316327568.new_child(label="Node4316327696", taxon=None, edge_length=1.89524872622)
+    nd_4316344528 = nd_4316327568.new_child(label="Morelia clastolepis", taxon=tax_4313742928, edge_length=4.34088590785)
+    nd_4316327824 = nd_4316327696.new_child(label="Morelia nauta", taxon=tax_4313743120, edge_length=2.44563718163)
+    nd_4316327440 = nd_4316327696.new_child(label="Morelia kinghorni", taxon=tax_4313743056, edge_length=2.44563718163)
+    nd_4316344912 = nd_4316344784.new_child(label="Node4316344912", taxon=None, edge_length=5.03710806168)
+    nd_4316345424 = nd_4316344784.new_child(label="Node4316345424", taxon=None, edge_length=4.09757269601)
+    nd_4316345040 = nd_4316344912.new_child(label="Morelia carinata", taxon=tax_4313742800, edge_length=25.8119331435)
+    nd_4316344400 = nd_4316344912.new_child(label="Node4316344400", taxon=None, edge_length=11.4337686931)
+    nd_4316345296 = nd_4316344400.new_child(label="Morelia viridisS", taxon=tax_4313759952, edge_length=14.3781644505)
+    nd_4316345168 = nd_4316344400.new_child(label="Morelia viridisN", taxon=tax_4313759888, edge_length=14.3781644505)
+    nd_4316345680 = nd_4316345424.new_child(label="Antaresia maculosa", taxon=tax_4313741328, edge_length=26.7514685092)
+    nd_4316345552 = nd_4316345424.new_child(label="Node4316345552", taxon=None, edge_length=2.33638652753)
+    nd_4316345936 = nd_4316345552.new_child(label="Node4316345936", taxon=None, edge_length=17.4436719435)
+    nd_4316346320 = nd_4316345552.new_child(label="Antaresia perthensis", taxon=tax_4313741584, edge_length=24.4150819817)
+    nd_4316346064 = nd_4316345936.new_child(label="Antaresia childreni", taxon=tax_4313741136, edge_length=6.97141003814)
+    nd_4316345808 = nd_4316345936.new_child(label="Antaresia stimsoni", taxon=tax_4313741840, edge_length=6.97141003814)
+    nd_4316346576 = nd_4316346448.new_child(label="Python timoriensis", taxon=tax_4313760464, edge_length=22.9852293488)
+    nd_4316346192 = nd_4316346448.new_child(label="Python reticulatus", taxon=tax_4313760272, edge_length=22.9852293488)
+    nd_4316346960 = nd_4316346704.new_child(label="Node4316346960", taxon=None, edge_length=6.75664224871)
+    nd_4316347472 = nd_4316346704.new_child(label="Python regius", taxon=tax_4313760144, edge_length=35.9720033585)
+    nd_4316347088 = nd_4316346960.new_child(label="Python curtus", taxon=tax_4313760016, edge_length=29.2153611098)
+    nd_4316346832 = nd_4316346960.new_child(label="Node4316346832", taxon=None, edge_length=8.15978945225)
+    nd_4316347344 = nd_4316346832.new_child(label="Python sebae", taxon=tax_4313760336, edge_length=21.0555716576)
+    nd_4316347216 = nd_4316346832.new_child(label="Python molurus", taxon=tax_4313760080, edge_length=21.0555716576)
+    tree_4316347600 = dendropy.Tree(label="Tree07", taxon_namespace=tree_list.taxon_namespace)
+    tree_list.append(tree_4316347600, reindex_taxa=False)
+    nd_4316348112 = tree_4316347600.seed_node.new_child(label="Candoia aspera", taxon=tax_4313742032, edge_length=124.564186516)
+    nd_4316348304 = tree_4316347600.seed_node.new_child(label="Node4316348304", taxon=None, edge_length=36.3676780441)
+    nd_4316368976 = nd_4316348304.new_child(label="Node4316368976", taxon=None, edge_length=11.1789504571)
+    nd_4316397200 = nd_4316348304.new_child(label="Xenopeltis unicolor", taxon=tax_4313760592, edge_length=88.196508472)
+    nd_4316369104 = nd_4316368976.new_child(label="Node4316369104", taxon=None, edge_length=20.3346663059)
+    nd_4316396944 = nd_4316368976.new_child(label="Loxocemus bicolor", taxon=tax_4313742480, edge_length=77.0175580149)
+    nd_4316369232 = nd_4316369104.new_child(label="Node4316369232", taxon=None, edge_length=10.7040090023)
+    nd_4316396048 = nd_4316369104.new_child(label="Node4316396048", taxon=None, edge_length=19.1477140595)
+    nd_4316369360 = nd_4316369232.new_child(label="Node4316369360", taxon=None, edge_length=7.28944403695)
+    nd_4316395664 = nd_4316369232.new_child(label="Node4316395664", taxon=None, edge_length=20.035415736)
+    nd_4316369488 = nd_4316369360.new_child(label="Node4316369488", taxon=None, edge_length=2.76745367097)
+    nd_4316372752 = nd_4316369360.new_child(label="Node4316372752", taxon=None, edge_length=1.09335779327)
+    nd_4316369616 = nd_4316369488.new_child(label="Node4316369616", taxon=None, edge_length=12.5842551962)
+    nd_4316371280 = nd_4316369488.new_child(label="Node4316371280", taxon=None, edge_length=7.66239308607)
+    nd_4316369744 = nd_4316369616.new_child(label="Node4316369744", taxon=None, edge_length=12.0191169658)
+    nd_4316370128 = nd_4316369616.new_child(label="Node4316370128", taxon=None, edge_length=2.25737379321)
+    nd_4316369872 = nd_4316369744.new_child(label="Morelia spilota", taxon=tax_4313743312, edge_length=11.3186128368)
+    nd_4316348240 = nd_4316369744.new_child(label="Morelia bredli", taxon=tax_4313742736, edge_length=11.3186128368)
+    nd_4316370256 = nd_4316370128.new_child(label="Morelia oenpelliensis", taxon=tax_4313743248, edge_length=21.0803560094)
+    nd_4316370000 = nd_4316370128.new_child(label="Node4316370000", taxon=None, edge_length=6.93019779087)
+    nd_4316370512 = nd_4316370000.new_child(label="Morelia tracyae", taxon=tax_4313759824, edge_length=14.1501582185)
+    nd_4316370384 = nd_4316370000.new_child(label="Node4316370384", taxon=None, edge_length=3.68709033509)
+    nd_4316347856 = nd_4316370384.new_child(label="Node4316347856", taxon=None, edge_length=4.34576715349)
+    nd_4316371152 = nd_4316370384.new_child(label="Morelia amethistina", taxon=tax_4313742608, edge_length=10.4630678834)
+    nd_4316370768 = nd_4316347856.new_child(label="Morelia nauta", taxon=tax_4313743120, edge_length=6.11730072991)
+    nd_4316370640 = nd_4316347856.new_child(label="Node4316370640", taxon=None, edge_length=2.07374196069)
+    nd_4316371024 = nd_4316370640.new_child(label="Morelia kinghorni", taxon=tax_4313743056, edge_length=4.04355876922)
+    nd_4316370896 = nd_4316370640.new_child(label="Morelia clastolepis", taxon=tax_4313742928, edge_length=4.04355876922)
+    nd_4316371536 = nd_4316371280.new_child(label="Node4316371536", taxon=None, edge_length=5.79725116231)
+    nd_4316372048 = nd_4316371280.new_child(label="Node4316372048", taxon=None, edge_length=3.71811515781)
+    nd_4316371664 = nd_4316371536.new_child(label="Morelia carinata", taxon=tax_4313742800, edge_length=22.4623407504)
+    nd_4316371408 = nd_4316371536.new_child(label="Node4316371408", taxon=None, edge_length=5.91246822929)
+    nd_4316371920 = nd_4316371408.new_child(label="Morelia viridisS", taxon=tax_4313759952, edge_length=16.5498725211)
+    nd_4316371792 = nd_4316371408.new_child(label="Morelia viridisN", taxon=tax_4313759888, edge_length=16.5498725211)
+    nd_4316372304 = nd_4316372048.new_child(label="Antaresia maculosa", taxon=tax_4313741328, edge_length=24.5414767549)
+    nd_4316372176 = nd_4316372048.new_child(label="Node4316372176", taxon=None, edge_length=5.68085904285)
+    nd_4316372496 = nd_4316372176.new_child(label="Node4316372496", taxon=None, edge_length=11.551508813)
+    nd_4316372944 = nd_4316372176.new_child(label="Antaresia perthensis", taxon=tax_4313741584, edge_length=18.8606177121)
+    nd_4316372624 = nd_4316372496.new_child(label="Antaresia childreni", taxon=tax_4313741136, edge_length=7.30910889905)
+    nd_4316372816 = nd_4316372496.new_child(label="Antaresia stimsoni", taxon=tax_4313741840, edge_length=7.30910889905)
+    nd_4316393744 = nd_4316372752.new_child(label="Morelia boeleni", taxon=tax_4313742672, edge_length=37.5960808765)
+    nd_4316393616 = nd_4316372752.new_child(label="Node4316393616", taxon=None, edge_length=2.73294846613)
+    nd_4316393936 = nd_4316393616.new_child(label="Node4316393936", taxon=None, edge_length=2.22916081797)
+    nd_4316395408 = nd_4316393616.new_child(label="Node4316395408", taxon=None, edge_length=13.2144705479)
+    nd_4316394064 = nd_4316393936.new_child(label="Node4316394064", taxon=None, edge_length=19.1660901297)
+    nd_4316394512 = nd_4316393936.new_child(label="Node4316394512", taxon=None, edge_length=7.55440984409)
+    nd_4316394192 = nd_4316394064.new_child(label="Antaresia melanocephalus", taxon=tax_4313741456, edge_length=13.4678814627)
+    nd_4316394384 = nd_4316394064.new_child(label="Antaresia ramsayi", taxon=tax_4313741712, edge_length=13.4678814627)
+    nd_4316394640 = nd_4316394512.new_child(label="Node4316394640", taxon=None, edge_length=18.9933631628)
+    nd_4316395024 = nd_4316394512.new_child(label="Node4316395024", taxon=None, edge_length=5.77339164759)
+    nd_4316394768 = nd_4316394640.new_child(label="Liasis fuscus", taxon=tax_4313742224, edge_length=6.08619858554)
+    nd_4316394320 = nd_4316394640.new_child(label="Liasis mackloti", taxon=tax_4313742288, edge_length=6.08619858554)
+    nd_4316395152 = nd_4316395024.new_child(label="Apodora papuana", taxon=tax_4313741904, edge_length=19.3061701007)
+    nd_4316394896 = nd_4316395024.new_child(label="Liasis olivaceus", taxon=tax_4313742352, edge_length=19.3061701007)
+    nd_4316395536 = nd_4316395408.new_child(label="Bothrochilus boa", taxon=tax_4313741968, edge_length=21.6486618625)
+    nd_4316395280 = nd_4316395408.new_child(label="Liasis albertisii", taxon=tax_4313742160, edge_length=21.6486618625)
+    nd_4316395920 = nd_4316395664.new_child(label="Python timoriensis", taxon=tax_4313760464, edge_length=25.9434669707)
+    nd_4316395792 = nd_4316395664.new_child(label="Python reticulatus", taxon=tax_4313760272, edge_length=25.9434669707)
+    nd_4316396304 = nd_4316396048.new_child(label="Node4316396304", taxon=None, edge_length=7.07244422644)
+    nd_4316396816 = nd_4316396048.new_child(label="Python regius", taxon=tax_4313760144, edge_length=37.5351776494)
+    nd_4316396432 = nd_4316396304.new_child(label="Python curtus", taxon=tax_4313760016, edge_length=30.462733423)
+    nd_4316396176 = nd_4316396304.new_child(label="Node4316396176", taxon=None, edge_length=14.4432592042)
+    nd_4316396688 = nd_4316396176.new_child(label="Python sebae", taxon=tax_4313760336, edge_length=16.0194742188)
+    nd_4316396560 = nd_4316396176.new_child(label="Python molurus", taxon=tax_4313760080, edge_length=16.0194742188)
+    tree_4316397072 = dendropy.Tree(label="Tree08", taxon_namespace=tree_list.taxon_namespace)
+    tree_list.append(tree_4316397072, reindex_taxa=False)
+    nd_4316418128 = tree_4316397072.seed_node.new_child(label="Candoia aspera", taxon=tax_4313742032, edge_length=95.8502441646)
+    nd_4316418320 = tree_4316397072.seed_node.new_child(label="Node4316418320", taxon=None, edge_length=21.8741644934)
+    nd_4316418448 = nd_4316418320.new_child(label="Xenopeltis unicolor", taxon=tax_4313760592, edge_length=73.9760796713)
+    nd_4316418256 = nd_4316418320.new_child(label="Node4316418256", taxon=None, edge_length=9.52951598189)
+    nd_4316418704 = nd_4316418256.new_child(label="Loxocemus bicolor", taxon=tax_4313742480, edge_length=64.4465636894)
+    nd_4316418576 = nd_4316418256.new_child(label="Node4316418576", taxon=None, edge_length=22.9882151659)
+    nd_4316418960 = nd_4316418576.new_child(label="Node4316418960", taxon=None, edge_length=6.43697452247)
+    nd_4316445776 = nd_4316418576.new_child(label="Node4316445776", taxon=None, edge_length=13.8002436509)
+    nd_4316419088 = nd_4316418960.new_child(label="Node4316419088", taxon=None, edge_length=6.7231540226)
+    nd_4316445392 = nd_4316418960.new_child(label="Node4316445392", taxon=None, edge_length=15.1871703268)
+    nd_4316397392 = nd_4316419088.new_child(label="Node4316397392", taxon=None, edge_length=1.03257897787)
+    nd_4316443088 = nd_4316419088.new_child(label="Node4316443088", taxon=None, edge_length=1.21568464049)
+    nd_4316419216 = nd_4316397392.new_child(label="Node4316419216", taxon=None, edge_length=7.52518318022)
+    nd_4316421136 = nd_4316397392.new_child(label="Node4316421136", taxon=None, edge_length=2.39608223465)
+    nd_4316419344 = nd_4316419216.new_child(label="Node4316419344", taxon=None, edge_length=2.42060556338)
+    nd_4316421008 = nd_4316419216.new_child(label="Morelia oenpelliensis", taxon=tax_4313743248, edge_length=19.7404578203)
+    nd_4316419472 = nd_4316419344.new_child(label="Node4316419472", taxon=None, edge_length=8.29264517113)
+    nd_4316419856 = nd_4316419344.new_child(label="Node4316419856", taxon=None, edge_length=6.70163613113)
+    nd_4316419600 = nd_4316419472.new_child(label="Morelia spilota", taxon=tax_4313743312, edge_length=9.02720708579)
+    nd_4316418832 = nd_4316419472.new_child(label="Morelia bredli", taxon=tax_4313742736, edge_length=9.02720708579)
+    nd_4316419984 = nd_4316419856.new_child(label="Morelia tracyae", taxon=tax_4313759824, edge_length=10.6182161258)
+    nd_4316419728 = nd_4316419856.new_child(label="Node4316419728", taxon=None, edge_length=3.47880840545)
+    nd_4316420240 = nd_4316419728.new_child(label="Node4316420240", taxon=None, edge_length=4.27223311967)
+    nd_4316420752 = nd_4316419728.new_child(label="Morelia amethistina", taxon=tax_4313742608, edge_length=7.13940772034)
+    nd_4316420368 = nd_4316420240.new_child(label="Morelia clastolepis", taxon=tax_4313742928, edge_length=2.86717460067)
+    nd_4316420112 = nd_4316420240.new_child(label="Node4316420112", taxon=None, edge_length=0.371215520464)
+    nd_4316420624 = nd_4316420112.new_child(label="Morelia nauta", taxon=tax_4313743120, edge_length=2.49595908021)
+    nd_4316420496 = nd_4316420112.new_child(label="Morelia kinghorni", taxon=tax_4313743056, edge_length=2.49595908021)
+    nd_4316421264 = nd_4316421136.new_child(label="Node4316421264", taxon=None, edge_length=3.24355281662)
+    nd_4316421776 = nd_4316421136.new_child(label="Node4316421776", taxon=None, edge_length=3.2027013644)
+    nd_4316421392 = nd_4316421264.new_child(label="Morelia carinata", taxon=tax_4313742800, edge_length=21.6260059492)
+    nd_4316420880 = nd_4316421264.new_child(label="Node4316420880", taxon=None, edge_length=9.1533399099)
+    nd_4316421648 = nd_4316420880.new_child(label="Morelia viridisS", taxon=tax_4313759952, edge_length=12.4726660393)
+    nd_4316421520 = nd_4316420880.new_child(label="Morelia viridisN", taxon=tax_4313759888, edge_length=12.4726660393)
+    nd_4316422032 = nd_4316421776.new_child(label="Antaresia maculosa", taxon=tax_4313741328, edge_length=21.6668574015)
+    nd_4316421904 = nd_4316421776.new_child(label="Node4316421904", taxon=None, edge_length=4.33602073619)
+    nd_4316442832 = nd_4316421904.new_child(label="Node4316442832", taxon=None, edge_length=11.5233229214)
+    nd_4316443216 = nd_4316421904.new_child(label="Antaresia perthensis", taxon=tax_4313741584, edge_length=17.3308366653)
+    nd_4316442960 = nd_4316442832.new_child(label="Antaresia childreni", taxon=tax_4313741136, edge_length=5.8075137439)
+    nd_4316442704 = nd_4316442832.new_child(label="Antaresia stimsoni", taxon=tax_4313741840, edge_length=5.8075137439)
+    nd_4316443472 = nd_4316443088.new_child(label="Morelia boeleni", taxon=tax_4313742672, edge_length=27.0825353379)
+    nd_4316443344 = nd_4316443088.new_child(label="Node4316443344", taxon=None, edge_length=1.37229916019)
+    nd_4316443728 = nd_4316443344.new_child(label="Node4316443728", taxon=None, edge_length=2.64946637554)
+    nd_4316445136 = nd_4316443344.new_child(label="Node4316445136", taxon=None, edge_length=11.4050202795)
+    nd_4316443856 = nd_4316443728.new_child(label="Node4316443856", taxon=None, edge_length=13.5545767859)
+    nd_4316444240 = nd_4316443728.new_child(label="Node4316444240", taxon=None, edge_length=4.67390676307)
+    nd_4316443984 = nd_4316443856.new_child(label="Antaresia melanocephalus", taxon=tax_4313741456, edge_length=9.50619301624)
+    nd_4316443600 = nd_4316443856.new_child(label="Antaresia ramsayi", taxon=tax_4313741712, edge_length=9.50619301624)
+    nd_4316444368 = nd_4316444240.new_child(label="Node4316444368", taxon=None, edge_length=12.8995814401)
+    nd_4316444752 = nd_4316444240.new_child(label="Node4316444752", taxon=None, edge_length=1.38849394051)
+    nd_4316444496 = nd_4316444368.new_child(label="Liasis fuscus", taxon=tax_4313742224, edge_length=5.48728159904)
+    nd_4316444112 = nd_4316444368.new_child(label="Liasis mackloti", taxon=tax_4313742288, edge_length=5.48728159904)
+    nd_4316444880 = nd_4316444752.new_child(label="Apodora papuana", taxon=tax_4313741904, edge_length=16.9983690986)
+    nd_4316444624 = nd_4316444752.new_child(label="Liasis olivaceus", taxon=tax_4313742352, edge_length=16.9983690986)
+    nd_4316445264 = nd_4316445136.new_child(label="Bothrochilus boa", taxon=tax_4313741968, edge_length=14.3052158982)
+    nd_4316445008 = nd_4316445136.new_child(label="Liasis albertisii", taxon=tax_4313742160, edge_length=14.3052158982)
+    nd_4316445648 = nd_4316445392.new_child(label="Python timoriensis", taxon=tax_4313760464, edge_length=19.8342036742)
+    nd_4316445520 = nd_4316445392.new_child(label="Python reticulatus", taxon=tax_4313760272, edge_length=19.8342036742)
+    nd_4316446032 = nd_4316445776.new_child(label="Node4316446032", taxon=None, edge_length=3.93234771773)
+    nd_4316446544 = nd_4316445776.new_child(label="Python regius", taxon=tax_4313760144, edge_length=27.6581048725)
+    nd_4316446160 = nd_4316446032.new_child(label="Python curtus", taxon=tax_4313760016, edge_length=23.7257571548)
+    nd_4316445904 = nd_4316446032.new_child(label="Node4316445904", taxon=None, edge_length=7.63118798191)
+    nd_4316446416 = nd_4316445904.new_child(label="Python sebae", taxon=tax_4313760336, edge_length=16.0945691729)
+    nd_4316446288 = nd_4316445904.new_child(label="Python molurus", taxon=tax_4313760080, edge_length=16.0945691729)
+    tree_4316446672 = dendropy.Tree(label="Tree09", taxon_namespace=tree_list.taxon_namespace)
+    tree_list.append(tree_4316446672, reindex_taxa=False)
+    nd_4316467600 = tree_4316446672.seed_node.new_child(label="Candoia aspera", taxon=tax_4313742032, edge_length=126.147943419)
+    nd_4316467344 = tree_4316446672.seed_node.new_child(label="Node4316467344", taxon=None, edge_length=40.5157695372)
+    nd_4316467792 = nd_4316467344.new_child(label="Node4316467792", taxon=None, edge_length=13.6608326978)
+    nd_4316512592 = nd_4316467344.new_child(label="Xenopeltis unicolor", taxon=tax_4313760592, edge_length=85.6321738818)
+    nd_4316467920 = nd_4316467792.new_child(label="Node4316467920", taxon=None, edge_length=17.1918011574)
+    nd_4316512336 = nd_4316467792.new_child(label="Loxocemus bicolor", taxon=tax_4313742480, edge_length=71.971341184)
+    nd_4316468048 = nd_4316467920.new_child(label="Node4316468048", taxon=None, edge_length=5.77299961102)
+    nd_4316490896 = nd_4316467920.new_child(label="Node4316490896", taxon=None, edge_length=21.166651853)
+    nd_4316468176 = nd_4316468048.new_child(label="Node4316468176", taxon=None, edge_length=13.7010200713)
+    nd_4316490576 = nd_4316468048.new_child(label="Node4316490576", taxon=None, edge_length=24.6978541753)
+    nd_4316468304 = nd_4316468176.new_child(label="Node4316468304", taxon=None, edge_length=0.617658990864)
+    nd_4316470480 = nd_4316468176.new_child(label="Node4316470480", taxon=None, edge_length=1.35670146604)
+    nd_4316468432 = nd_4316468304.new_child(label="Morelia boeleni", taxon=tax_4313742672, edge_length=34.6878613533)
+    nd_4316467408 = nd_4316468304.new_child(label="Node4316467408", taxon=None, edge_length=2.23641376685)
+    nd_4316468688 = nd_4316467408.new_child(label="Node4316468688", taxon=None, edge_length=10.5685372807)
+    nd_4316469072 = nd_4316467408.new_child(label="Node4316469072", taxon=None, edge_length=2.11685229318)
+    nd_4316468816 = nd_4316468688.new_child(label="Bothrochilus boa", taxon=tax_4313741968, edge_length=21.8829103058)
+    nd_4316468560 = nd_4316468688.new_child(label="Liasis albertisii", taxon=tax_4313742160, edge_length=21.8829103058)
+    nd_4316469200 = nd_4316469072.new_child(label="Node4316469200", taxon=None, edge_length=17.1757724208)
+    nd_4316469584 = nd_4316469072.new_child(label="Node4316469584", taxon=None, edge_length=3.96372676423)
+    nd_4316469328 = nd_4316469200.new_child(label="Antaresia melanocephalus", taxon=tax_4313741456, edge_length=13.1588228725)
+    nd_4316468944 = nd_4316469200.new_child(label="Antaresia ramsayi", taxon=tax_4313741712, edge_length=13.1588228725)
+    nd_4316469712 = nd_4316469584.new_child(label="Node4316469712", taxon=None, edge_length=19.5683902852)
+    nd_4316470096 = nd_4316469584.new_child(label="Node4316470096", taxon=None, edge_length=4.82785669688)
+    nd_4316469840 = nd_4316469712.new_child(label="Liasis fuscus", taxon=tax_4313742224, edge_length=6.80247824391)
+    nd_4316469456 = nd_4316469712.new_child(label="Liasis mackloti", taxon=tax_4313742288, edge_length=6.80247824391)
+    nd_4316470224 = nd_4316470096.new_child(label="Apodora papuana", taxon=tax_4313741904, edge_length=21.5430118322)
+    nd_4316469968 = nd_4316470096.new_child(label="Liasis olivaceus", taxon=tax_4313742352, edge_length=21.5430118322)
+    nd_4316470608 = nd_4316470480.new_child(label="Node4316470608", taxon=None, edge_length=10.3401216686)
+    nd_4316488848 = nd_4316470480.new_child(label="Node4316488848", taxon=None, edge_length=7.03488295134)
+    nd_4316470736 = nd_4316470608.new_child(label="Node4316470736", taxon=None, edge_length=2.31259127041)
+    nd_4316488592 = nd_4316470608.new_child(label="Node4316488592", taxon=None, edge_length=13.9826784484)
+    nd_4316470864 = nd_4316470736.new_child(label="Node4316470864", taxon=None, edge_length=7.79402846351)
+    nd_4316488336 = nd_4316470736.new_child(label="Morelia oenpelliensis", taxon=tax_4313743248, edge_length=21.2961059391)
+    nd_4316470992 = nd_4316470864.new_child(label="Morelia tracyae", taxon=tax_4313759824, edge_length=13.5020774756)
+    nd_4316470352 = nd_4316470864.new_child(label="Node4316470352", taxon=None, edge_length=3.5072599877)
+    nd_4316471248 = nd_4316470352.new_child(label="Node4316471248", taxon=None, edge_length=6.20487512451)
+    nd_4316488208 = nd_4316470352.new_child(label="Morelia amethistina", taxon=tax_4313742608, edge_length=9.99481748791)
+    nd_4316487824 = nd_4316471248.new_child(label="Morelia nauta", taxon=tax_4313743120, edge_length=3.78994236341)
+    nd_4316471120 = nd_4316471248.new_child(label="Node4316471120", taxon=None, edge_length=1.25204312348)
+    nd_4316488080 = nd_4316471120.new_child(label="Morelia clastolepis", taxon=tax_4313742928, edge_length=2.53789923993)
+    nd_4316487952 = nd_4316471120.new_child(label="Morelia kinghorni", taxon=tax_4313743056, edge_length=2.53789923993)
+    nd_4316488720 = nd_4316488592.new_child(label="Morelia spilota", taxon=tax_4313743312, edge_length=9.62601876119)
+    nd_4316488464 = nd_4316488592.new_child(label="Morelia bredli", taxon=tax_4313742736, edge_length=9.62601876119)
+    nd_4316489104 = nd_4316488848.new_child(label="Node4316489104", taxon=None, edge_length=4.39672345568)
+    nd_4316489616 = nd_4316488848.new_child(label="Node4316489616", taxon=None, edge_length=3.94778865925)
+    nd_4316489232 = nd_4316489104.new_child(label="Morelia carinata", taxon=tax_4313742800, edge_length=22.5172124711)
+    nd_4316488976 = nd_4316489104.new_child(label="Node4316488976", taxon=None, edge_length=11.9061835258)
+    nd_4316489488 = nd_4316488976.new_child(label="Morelia viridisS", taxon=tax_4313759952, edge_length=10.6110289453)
+    nd_4316489360 = nd_4316488976.new_child(label="Morelia viridisN", taxon=tax_4313759888, edge_length=10.6110289453)
+    nd_4316489872 = nd_4316489616.new_child(label="Antaresia maculosa", taxon=tax_4313741328, edge_length=22.9661472676)
+    nd_4316489744 = nd_4316489616.new_child(label="Node4316489744", taxon=None, edge_length=3.7713704432)
+    nd_4316490128 = nd_4316489744.new_child(label="Node4316490128", taxon=None, edge_length=11.9409692012)
+    nd_4316490512 = nd_4316489744.new_child(label="Antaresia perthensis", taxon=tax_4313741584, edge_length=19.1947768244)
+    nd_4316490256 = nd_4316490128.new_child(label="Antaresia childreni", taxon=tax_4313741136, edge_length=7.25380762314)
+    nd_4316490000 = nd_4316490128.new_child(label="Antaresia stimsoni", taxon=tax_4313741840, edge_length=7.25380762314)
+    nd_4316490768 = nd_4316490576.new_child(label="Python timoriensis", taxon=tax_4313760464, edge_length=24.3086862402)
+    nd_4316490640 = nd_4316490576.new_child(label="Python reticulatus", taxon=tax_4313760272, edge_length=24.3086862402)
+    nd_4316491152 = nd_4316490896.new_child(label="Node4316491152", taxon=None, edge_length=5.80983351578)
+    nd_4316491664 = nd_4316490896.new_child(label="Python regius", taxon=tax_4313760144, edge_length=33.6128881735)
+    nd_4316491280 = nd_4316491152.new_child(label="Python curtus", taxon=tax_4313760016, edge_length=27.8030546577)
+    nd_4316491024 = nd_4316491152.new_child(label="Node4316491024", taxon=None, edge_length=10.4395768579)
+    nd_4316491536 = nd_4316491024.new_child(label="Python sebae", taxon=tax_4313760336, edge_length=17.3634777999)
+    nd_4316491408 = nd_4316491024.new_child(label="Python molurus", taxon=tax_4313760080, edge_length=17.3634777999)
+    tree_4316512464 = dendropy.Tree(label="Tree10", taxon_namespace=tree_list.taxon_namespace)
+    tree_list.append(tree_4316512464, reindex_taxa=False)
+    nd_4316512976 = tree_4316512464.seed_node.new_child(label="Candoia aspera", taxon=tax_4313742032, edge_length=146.770054852)
+    nd_4316513168 = tree_4316512464.seed_node.new_child(label="Node4316513168", taxon=None, edge_length=49.9930471528)
+    nd_4316513296 = nd_4316513168.new_child(label="Node4316513296", taxon=None, edge_length=13.4634525107)
+    nd_4316558096 = nd_4316513168.new_child(label="Xenopeltis unicolor", taxon=tax_4313760592, edge_length=96.7770076997)
+    nd_4316513424 = nd_4316513296.new_child(label="Loxocemus bicolor", taxon=tax_4313742480, edge_length=83.313555189)
+    nd_4316513104 = nd_4316513296.new_child(label="Node4316513104", taxon=None, edge_length=30.4776798161)
+    nd_4316513680 = nd_4316513104.new_child(label="Node4316513680", taxon=None, edge_length=8.15039409252)
+    nd_4316536464 = nd_4316513104.new_child(label="Node4316536464", taxon=None, edge_length=14.4274535052)
+    nd_4316513808 = nd_4316513680.new_child(label="Node4316513808", taxon=None, edge_length=9.51023675819)
+    nd_4316512784 = nd_4316513680.new_child(label="Node4316512784", taxon=None, edge_length=25.3895116055)
+    nd_4316513936 = nd_4316513808.new_child(label="Node4316513936", taxon=None, edge_length=1.94845077373)
+    nd_4316534096 = nd_4316513808.new_child(label="Node4316534096", taxon=None, edge_length=1.20705242999)
+    nd_4316514064 = nd_4316513936.new_child(label="Node4316514064", taxon=None, edge_length=3.25905094181)
+    nd_4316515728 = nd_4316513936.new_child(label="Node4316515728", taxon=None, edge_length=10.0348009179)
+    nd_4316514192 = nd_4316514064.new_child(label="Node4316514192", taxon=None, edge_length=4.82232736143)
+    nd_4316514704 = nd_4316514064.new_child(label="Node4316514704", taxon=None, edge_length=4.58004825968)
+    nd_4316514320 = nd_4316514192.new_child(label="Morelia carinata", taxon=tax_4313742800, edge_length=25.1454154452)
+    nd_4316513552 = nd_4316514192.new_child(label="Node4316513552", taxon=None, edge_length=10.5788836702)
+    nd_4316514576 = nd_4316513552.new_child(label="Morelia viridisS", taxon=tax_4313759952, edge_length=14.566531775)
+    nd_4316514448 = nd_4316513552.new_child(label="Morelia viridisN", taxon=tax_4313759888, edge_length=14.566531775)
+    nd_4316514960 = nd_4316514704.new_child(label="Antaresia maculosa", taxon=tax_4313741328, edge_length=25.387694547)
+    nd_4316514832 = nd_4316514704.new_child(label="Node4316514832", taxon=None, edge_length=5.75635440071)
+    nd_4316515216 = nd_4316514832.new_child(label="Node4316515216", taxon=None, edge_length=12.7115868187)
+    nd_4316515600 = nd_4316514832.new_child(label="Antaresia perthensis", taxon=tax_4313741584, edge_length=19.6313401463)
+    nd_4316515344 = nd_4316515216.new_child(label="Antaresia childreni", taxon=tax_4313741136, edge_length=6.91975332756)
+    nd_4316515088 = nd_4316515216.new_child(label="Antaresia stimsoni", taxon=tax_4313741840, edge_length=6.91975332756)
+    nd_4316515856 = nd_4316515728.new_child(label="Node4316515856", taxon=None, edge_length=14.4811675935)
+    nd_4316516240 = nd_4316515728.new_child(label="Node4316516240", taxon=None, edge_length=0.795030531054)
+    nd_4316515984 = nd_4316515856.new_child(label="Morelia spilota", taxon=tax_4313743312, edge_length=8.71082523711)
+    nd_4316515472 = nd_4316515856.new_child(label="Morelia bredli", taxon=tax_4313742736, edge_length=8.71082523711)
+    nd_4316532816 = nd_4316516240.new_child(label="Node4316532816", taxon=None, edge_length=7.95009719313)
+    nd_4316533840 = nd_4316516240.new_child(label="Morelia oenpelliensis", taxon=tax_4313743248, edge_length=22.3969622995)
+    nd_4316532944 = nd_4316532816.new_child(label="Morelia tracyae", taxon=tax_4313759824, edge_length=14.4468651064)
+    nd_4316516112 = nd_4316532816.new_child(label="Node4316516112", taxon=None, edge_length=2.27822479328)
+    nd_4316533200 = nd_4316516112.new_child(label="Node4316533200", taxon=None, edge_length=6.98040063458)
+    nd_4316533648 = nd_4316516112.new_child(label="Morelia amethistina", taxon=tax_4313742608, edge_length=12.1686403131)
+    nd_4316533328 = nd_4316533200.new_child(label="Morelia clastolepis", taxon=tax_4313742928, edge_length=5.18823967855)
+    nd_4316533072 = nd_4316533200.new_child(label="Node4316533072", taxon=None, edge_length=1.71064757594)
+    nd_4316533520 = nd_4316533072.new_child(label="Morelia kinghorni", taxon=tax_4313743056, edge_length=3.47759210261)
+    nd_4316533712 = nd_4316533072.new_child(label="Morelia nauta", taxon=tax_4313743120, edge_length=3.47759210261)
+    nd_4316534224 = nd_4316534096.new_child(label="Morelia boeleni", taxon=tax_4313742672, edge_length=33.9681920922)
+    nd_4316533968 = nd_4316534096.new_child(label="Node4316533968", taxon=None, edge_length=1.28486469944)
+    nd_4316534480 = nd_4316533968.new_child(label="Node4316534480", taxon=None, edge_length=12.4520799939)
+    nd_4316534864 = nd_4316533968.new_child(label="Node4316534864", taxon=None, edge_length=1.68023264943)
+    nd_4316534608 = nd_4316534480.new_child(label="Bothrochilus boa", taxon=tax_4313741968, edge_length=20.2312473989)
+    nd_4316534352 = nd_4316534480.new_child(label="Liasis albertisii", taxon=tax_4313742160, edge_length=20.2312473989)
+    nd_4316534992 = nd_4316534864.new_child(label="Node4316534992", taxon=None, edge_length=19.0383478987)
+    nd_4316535376 = nd_4316534864.new_child(label="Node4316535376", taxon=None, edge_length=4.75943584051)
+    nd_4316535120 = nd_4316534992.new_child(label="Antaresia melanocephalus", taxon=tax_4313741456, edge_length=11.9647468446)
+    nd_4316534736 = nd_4316534992.new_child(label="Antaresia ramsayi", taxon=tax_4313741712, edge_length=11.9647468446)
+    nd_4316535504 = nd_4316535376.new_child(label="Node4316535504", taxon=None, edge_length=17.1180008393)
+    nd_4316535888 = nd_4316535376.new_child(label="Node4316535888", taxon=None, edge_length=2.6817531508)
+    nd_4316535632 = nd_4316535504.new_child(label="Liasis fuscus", taxon=tax_4313742224, edge_length=9.12565806357)
+    nd_4316535248 = nd_4316535504.new_child(label="Liasis mackloti", taxon=tax_4313742288, edge_length=9.12565806357)
+    nd_4316536016 = nd_4316535888.new_child(label="Apodora papuana", taxon=tax_4313741904, edge_length=23.561905752)
+    nd_4316512720 = nd_4316535888.new_child(label="Liasis olivaceus", taxon=tax_4313742352, edge_length=23.561905752)
+    nd_4316536336 = nd_4316512784.new_child(label="Python timoriensis", taxon=tax_4313760464, edge_length=19.2959696748)
+    nd_4316536208 = nd_4316512784.new_child(label="Python reticulatus", taxon=tax_4313760272, edge_length=19.2959696748)
+    nd_4316536656 = nd_4316536464.new_child(label="Node4316536656", taxon=None, edge_length=6.97033769)
+    nd_4316557776 = nd_4316536464.new_child(label="Python regius", taxon=tax_4313760144, edge_length=38.4084218677)
+    nd_4316536784 = nd_4316536656.new_child(label="Python curtus", taxon=tax_4313760016, edge_length=31.4380841777)
+    nd_4316557520 = nd_4316536656.new_child(label="Node4316557520", taxon=None, edge_length=13.5557299793)
+    nd_4316557648 = nd_4316557520.new_child(label="Python sebae", taxon=tax_4313760336, edge_length=17.8823541984)
+    nd_4316557456 = nd_4316557520.new_child(label="Python molurus", taxon=tax_4313760080, edge_length=17.8823541984)
+    tree_4316557904 = dendropy.Tree(label="Tree11", taxon_namespace=tree_list.taxon_namespace)
+    tree_list.append(tree_4316557904, reindex_taxa=False)
+    nd_4316558480 = tree_4316557904.seed_node.new_child(label="Node4316558480", taxon=None, edge_length=71.3865451194)
+    nd_4316607440 = tree_4316557904.seed_node.new_child(label="Candoia aspera", taxon=tax_4313742032, edge_length=211.46817309)
+    nd_4316558608 = nd_4316558480.new_child(label="Xenopeltis unicolor", taxon=tax_4313760592, edge_length=140.081627971)
+    nd_4316558800 = nd_4316558480.new_child(label="Node4316558800", taxon=None, edge_length=26.0234610563)
+    nd_4316558928 = nd_4316558800.new_child(label="Loxocemus bicolor", taxon=tax_4313742480, edge_length=114.058166915)
+    nd_4316558736 = nd_4316558800.new_child(label="Node4316558736", taxon=None, edge_length=40.1699176918)
+    nd_4316559184 = nd_4316558736.new_child(label="Node4316559184", taxon=None, edge_length=12.7558559915)
+    nd_4316606544 = nd_4316558736.new_child(label="Node4316606544", taxon=None, edge_length=17.7722054357)
+    nd_4316559312 = nd_4316559184.new_child(label="Node4316559312", taxon=None, edge_length=12.1691499629)
+    nd_4316585744 = nd_4316559184.new_child(label="Node4316585744", taxon=None, edge_length=28.055060848)
+    nd_4316559440 = nd_4316559312.new_child(label="Node4316559440", taxon=None, edge_length=4.4227398576)
+    nd_4316583696 = nd_4316559312.new_child(label="Node4316583696", taxon=None, edge_length=3.26334313874)
+    nd_4316559568 = nd_4316559440.new_child(label="Node4316559568", taxon=None, edge_length=5.98387013923)
+    nd_4316561232 = nd_4316559440.new_child(label="Node4316561232", taxon=None, edge_length=17.9619739892)
+    nd_4316559696 = nd_4316559568.new_child(label="Node4316559696", taxon=None, edge_length=7.53311752135)
+    nd_4316560208 = nd_4316559568.new_child(label="Node4316560208", taxon=None, edge_length=4.18557870369)
+    nd_4316559824 = nd_4316559696.new_child(label="Morelia carinata", taxon=tax_4313742800, edge_length=31.0235157502)
+    nd_4316559056 = nd_4316559696.new_child(label="Node4316559056", taxon=None, edge_length=12.7654788618)
+    nd_4316560080 = nd_4316559056.new_child(label="Morelia viridisS", taxon=tax_4313759952, edge_length=18.2580368884)
+    nd_4316559952 = nd_4316559056.new_child(label="Morelia viridisN", taxon=tax_4313759888, edge_length=18.2580368884)
+    nd_4316560464 = nd_4316560208.new_child(label="Antaresia maculosa", taxon=tax_4313741328, edge_length=34.3710545678)
+    nd_4316560336 = nd_4316560208.new_child(label="Node4316560336", taxon=None, edge_length=7.81168349566)
+    nd_4316560720 = nd_4316560336.new_child(label="Node4316560720", taxon=None, edge_length=15.350690271)
+    nd_4316561104 = nd_4316560336.new_child(label="Antaresia perthensis", taxon=tax_4313741584, edge_length=26.5593710722)
+    nd_4316560848 = nd_4316560720.new_child(label="Antaresia childreni", taxon=tax_4313741136, edge_length=11.2086808012)
+    nd_4316560592 = nd_4316560720.new_child(label="Antaresia stimsoni", taxon=tax_4313741840, edge_length=11.2086808012)
+    nd_4316561360 = nd_4316561232.new_child(label="Node4316561360", taxon=None, edge_length=1.39777518086)
+    nd_4316583312 = nd_4316561232.new_child(label="Node4316583312", taxon=None, edge_length=13.6008570384)
+    nd_4316582032 = nd_4316561360.new_child(label="Morelia oenpelliensis", taxon=tax_4313743248, edge_length=25.1807542407)
+    nd_4316560976 = nd_4316561360.new_child(label="Node4316560976", taxon=None, edge_length=7.6242060025)
+    nd_4316582288 = nd_4316560976.new_child(label="Morelia tracyae", taxon=tax_4313759824, edge_length=17.5565482382)
+    nd_4316582160 = nd_4316560976.new_child(label="Node4316582160", taxon=None, edge_length=3.73213849687)
+    nd_4316582544 = nd_4316582160.new_child(label="Node4316582544", taxon=None, edge_length=8.62088071739)
+    nd_4316583056 = nd_4316582160.new_child(label="Morelia amethistina", taxon=tax_4313742608, edge_length=13.8244097414)
+    nd_4316582672 = nd_4316582544.new_child(label="Morelia clastolepis", taxon=tax_4313742928, edge_length=5.20352902397)
+    nd_4316582416 = nd_4316582544.new_child(label="Node4316582416", taxon=None, edge_length=2.83199057731)
+    nd_4316582928 = nd_4316582416.new_child(label="Morelia kinghorni", taxon=tax_4313743056, edge_length=2.37153844665)
+    nd_4316582800 = nd_4316582416.new_child(label="Morelia nauta", taxon=tax_4313743120, edge_length=2.37153844665)
+    nd_4316583440 = nd_4316583312.new_child(label="Morelia spilota", taxon=tax_4313743312, edge_length=12.9776723832)
+    nd_4316583184 = nd_4316583312.new_child(label="Morelia bredli", taxon=tax_4313742736, edge_length=12.9776723832)
+    nd_4316583824 = nd_4316583696.new_child(label="Node4316583824", taxon=None, edge_length=3.5026210108)
+    nd_4316585104 = nd_4316583696.new_child(label="Node4316585104", taxon=None, edge_length=0.85376044557)
+    nd_4316583952 = nd_4316583824.new_child(label="Node4316583952", taxon=None, edge_length=23.3327851176)
+    nd_4316584336 = nd_4316583824.new_child(label="Node4316584336", taxon=None, edge_length=12.8697548268)
+    nd_4316584080 = nd_4316583952.new_child(label="Antaresia melanocephalus", taxon=tax_4313741456, edge_length=18.8644940012)
+    nd_4316583568 = nd_4316583952.new_child(label="Antaresia ramsayi", taxon=tax_4313741712, edge_length=18.8644940012)
+    nd_4316584464 = nd_4316584336.new_child(label="Node4316584464", taxon=None, edge_length=22.0000981488)
+    nd_4316558288 = nd_4316584336.new_child(label="Node4316558288", taxon=None, edge_length=5.24037108992)
+    nd_4316584592 = nd_4316584464.new_child(label="Liasis fuscus", taxon=tax_4313742224, edge_length=7.32742614319)
+    nd_4316584208 = nd_4316584464.new_child(label="Liasis mackloti", taxon=tax_4313742288, edge_length=7.32742614319)
+    nd_4316584848 = nd_4316558288.new_child(label="Apodora papuana", taxon=tax_4313741904, edge_length=24.0871532021)
+    nd_4316558224 = nd_4316558288.new_child(label="Liasis olivaceus", taxon=tax_4313742352, edge_length=24.0871532021)
+    nd_4316585232 = nd_4316585104.new_child(label="Node4316585232", taxon=None, edge_length=19.3585494438)
+    nd_4316585616 = nd_4316585104.new_child(label="Morelia boeleni", taxon=tax_4313742672, edge_length=44.846139684)
+    nd_4316585360 = nd_4316585232.new_child(label="Bothrochilus boa", taxon=tax_4313741968, edge_length=25.4875902402)
+    nd_4316584976 = nd_4316585232.new_child(label="Liasis albertisii", taxon=tax_4313742160, edge_length=25.4875902402)
+    nd_4316585872 = nd_4316585744.new_child(label="Python timoriensis", taxon=tax_4313760464, edge_length=33.0773323832)
+    nd_4316585488 = nd_4316585744.new_child(label="Python reticulatus", taxon=tax_4313760272, edge_length=33.0773323832)
+    nd_4316606800 = nd_4316606544.new_child(label="Node4316606800", taxon=None, edge_length=13.6364666177)
+    nd_4316607312 = nd_4316606544.new_child(label="Python regius", taxon=tax_4313760144, edge_length=56.1160437871)
+    nd_4316606928 = nd_4316606800.new_child(label="Python curtus", taxon=tax_4313760016, edge_length=42.4795771694)
+    nd_4316606672 = nd_4316606800.new_child(label="Node4316606672", taxon=None, edge_length=16.6495052056)
+    nd_4316607184 = nd_4316606672.new_child(label="Python sebae", taxon=tax_4313760336, edge_length=25.8300719638)
+    nd_4316607056 = nd_4316606672.new_child(label="Python molurus", taxon=tax_4313760080, edge_length=25.8300719638)
+
+    # set labels of nodes with taxa to taxon label, else oid (for consistent
+    # identification in debugging)
+    # for t in tree_list:
+    #     t.assign_node_labels_from_taxon_or_oid()
+
+    return tree_list
+
+
+class TreeEuclideanDistTest(unittest.TestCase):
+
+    def runTest(self):
+         tree_list = dendropy.TreeList.get_from_stream(
+            StringIO("""((t5:0.161175,t6:0.161175):0.392293,((t4:0.104381,(t2:0.075411,t1:0.075411):1):0.065840,t3:0.170221):0.383247);
+                        ((t5:2.161175,t6:0.161175):0.392293,((t4:0.104381,(t2:0.075411,t1:0.075411):1):0.065840,t3:0.170221):0.383247);
+                        ((t5:0.161175,t6:0.161175):0.392293,((t2:0.075411,(t4:0.104381,t1:0.075411):1):0.065840,t3:0.170221):0.383247);
+                        ((t5:0.161175,t6:0.161175):0.392293,((t4:0.104381,(t2:0.075411,t1:0.075411):0.028969):0.065840,t3:0.170221):0.383247);
+                        """),
+            schema="newick")
+         for t in tree_list:
+             t.encode_bipartitions()
+         self.assertAlmostEqual(treecompare.euclidean_distance(tree_list[0], tree_list[1]), 2.0)
+         self.assertAlmostEqual(treecompare.euclidean_distance(tree_list[0], tree_list[2]), math.sqrt(2.0))
+         self.assertAlmostEqual(treecompare.euclidean_distance(tree_list[0], tree_list[3]), 0.97103099999999998)
+         self.assertAlmostEqual(treecompare.euclidean_distance(tree_list[1], tree_list[2]), math.sqrt(6.0))
+         self.assertAlmostEqual(treecompare.euclidean_distance(tree_list[1], tree_list[3]), 2.2232636377544162)
+         self.assertAlmostEqual(treecompare.euclidean_distance(tree_list[2], tree_list[3]), 1.000419513484718)
+
+class TreeSymmetricDistTest(unittest.TestCase):
+
+    def runTest(self):
+         ref = dendropy.Tree.get_from_stream(StringIO("((t5,t6),((t4,(t2,t1)),t3));"), schema="newick")
+         taxon_namespace = ref.taxon_namespace
+         ref.encode_bipartitions()
+         o_tree = dendropy.Tree.get_from_stream(StringIO("((t1,t2),((t4,(t5,t6)),t3));"), schema="newick", taxon_namespace=taxon_namespace)
+         o_tree.encode_bipartitions()
+         self.assertEqual(treecompare.symmetric_difference(o_tree, ref), 2)
+
+class TreePatristicDistTest(unittest.TestCase):
+
+    def setUp(self):
+        self.tree = dendropy.Tree.get_from_string("(((a:1, b:1):1, c:2):1, (d:2, (e:1,f:1):1):1):0;", schema="newick")
+
+    def testPatDistMatrix(self):
+        pdm = treemeasure.PatristicDistanceMatrix(self.tree)
+        def _chk_distance(pdm, t1, t2, exp_distance):
+            tax1 = self.tree.taxon_namespace.require_taxon(label=t1)
+            tax2 = self.tree.taxon_namespace.require_taxon(label=t2)
+            pd = pdm(tax1, tax2)
+            self.assertEqual(pd, exp_distance, "{}: {} <-> {}: {} instead of {}".format(self.tree, t1, t2, pd, exp_distance))
+        _chk_distance(pdm, "a", "b", 2)
+        _chk_distance(pdm, "a", "c", 4)
+        _chk_distance(pdm, "b", "c", 4)
+        _chk_distance(pdm, "a", "d", 6)
+        _chk_distance(pdm, "f", "d", 4)
+        _chk_distance(pdm, "c", "d", 6)
+
+    def testPatDistFunc(self):
+        self.tree.encode_bipartitions()
+        def _chk_distance(t1, t2, exp_distance):
+            tax1 = self.tree.taxon_namespace.get_taxon(label=t1)
+            tax2 = self.tree.taxon_namespace.get_taxon(label=t2)
+            pd = treemeasure.patristic_distance(self.tree, tax1, tax2)
+            self.assertEqual(pd, exp_distance)
+        _chk_distance("a", "b", 2)
+        _chk_distance("a", "c", 4)
+        _chk_distance("b", "c", 4)
+        _chk_distance("a", "d", 6)
+        _chk_distance("f", "d", 4)
+        _chk_distance("c", "d", 6)
+
+class TreeUnaryMetricsTest(unittest.TestCase):
+
+    def testNBar(self):
+        trees = _get_reference_tree_list()
+        # trees = dendropy.TreeList.get_from_path(
+        #         src=pathmap.tree_source_path("pythonidae.beast.mcmc.trees"),
+        #         schema='nexus')
+        expected_values = [
+            7.818181818181818,
+            7.515151515151516,
+            7.666666666666667,
+            8.727272727272727,
+            8.757575757575758,
+            8.636363636363637,
+            8.727272727272727,
+            8.757575757575758,
+            8.727272727272727,
+            8.727272727272727,
+            8.575757575757576,
+            ]
+        for idx, tree in enumerate(trees):
+            observed = treemeasure.N_bar(tree)
+            expected = expected_values[idx]
+            self.assertAlmostEqual(expected, observed)
+
+    def test_colless_tree_imbalance(self):
+        trees = _get_reference_tree_list()
+        # for tree in trees:
+        #     print tree.colless_tree_imbalance()
+        expected_values = [
+            0.3024193548387097,
+            0.2540322580645161,
+            0.2762096774193548,
+            0.3548387096774194,
+            0.35685483870967744,
+            0.344758064516129,
+            0.3548387096774194,
+            0.35685483870967744,
+            0.3548387096774194,
+            0.3548387096774194,
+            0.3407258064516129,
+            ]
+        for idx, tree in enumerate(trees):
+            observed = treemeasure.colless_tree_imbalance(tree, normalize="max")
+            expected = expected_values[idx]
+            self.assertAlmostEqual(expected, observed)
+
+    def test_colless_tree_imbalance2(self):
+        # library(apTreeshape)
+        # data(hivtree.treeshape)
+        # print(paste("colless, raw: ", colless(hivtree.treeshape), sep=""))
+        # print(paste("colless, pda: ", colless(hivtree.treeshape, "pda"), sep=""))
+        # print(paste("colless, yule: ", colless(hivtree.treeshape, "yule"), sep=""))
+        # # [1] "colless, raw: 992"
+        # # [1] "colless, pda: 0.369977836654251"
+        # # [1] "colless, yule: 0.993137704712054"
+        tree = dendropy.Tree.get_from_path(
+                src=pathmap.tree_source_path("hiv1.nexus"),
+                schema='nexus')
+        self.assertAlmostEqual(treemeasure.colless_tree_imbalance(tree, normalize=None), 992)
+        self.assertAlmostEqual(treemeasure.colless_tree_imbalance(tree, normalize="pda"), 0.3699778366542512686443)
+        self.assertAlmostEqual(treemeasure.colless_tree_imbalance(tree, normalize="yule"), 0.9931377047120540924041)
+
+    def test_sackin_index(self):
+        # library(apTreeshape)
+        # data(hivtree.treeshape)
+        # print(paste("sackin, raw: ", sackin(hivtree.treeshape), sep=""))
+        # print(paste("sackin, pda: ", sackin(hivtree.treeshape, "pda"), sep=""))
+        # print(paste("sackin, yule: ", sackin(hivtree.treeshape, "yule"), sep=""))
+        # # [1] "sackin, raw: 2028"
+        # # [1] "sackin, pda: 0.756365980579457"
+        # # [1] "sackin, yule: 0.822783440343329"
+        tree = dendropy.Tree.get_from_path(
+                src=pathmap.tree_source_path("hiv1.nexus"),
+                schema='nexus')
+        self.assertAlmostEqual(treemeasure.sackin_index(tree, normalize=None), 2028)
+        self.assertAlmostEqual(treemeasure.sackin_index(tree, normalize="pda"), 0.756365980579457)
+        self.assertAlmostEqual(treemeasure.sackin_index(tree, normalize="yule"), 0.822783440343329)
+
+    def test_b1(self):
+        trees = _get_reference_tree_list()
+        # for tree in trees:
+        #     print tree.B1()
+        expected_values = [
+            18.686544011544008,
+            16.862301587301587,
+            18.012301587301586,
+            15.803210678210679,
+            15.803210678210679,
+            16.219877344877347,
+            15.80321067821068,
+            15.80321067821068,
+            15.803210678210679,
+            15.80321067821068,
+            16.10321067821068,
+            ]
+        for idx, tree in enumerate(trees):
+            observed = treemeasure.B1(tree)
+            expected = expected_values[idx]
+            self.assertAlmostEqual(expected, observed)
+
+    def test_treeness(self):
+        trees = _get_reference_tree_list()
+        # for tree in trees:
+        #     print tree.treeness()
+        expected_values = [
+            0.82043976304486,
+            0.30678033634423607,
+            0.2686940663128338,
+            0.2674702980152253,
+            0.2731856127080352,
+            0.26942308963183575,
+            0.2764640737121644,
+            0.26096444220828763,
+            0.2846852453916621,
+            0.2791363657987356,
+            0.28304948441090816,
+            ]
+        for idx, tree in enumerate(trees):
+            observed = treemeasure.treeness(tree)
+            expected = expected_values[idx]
+            self.assertAlmostEqual(expected, observed)
+
+    def test_gamma2(self):
+        trees = _get_reference_tree_list()
+        # for tree in trees:
+        #     print tree.pybus_harvey_gamma()
+        expected_values = [
+            6.690070011342222,
+            -2.1016546214332665,
+            -2.2071830302961493,
+            -0.9868763184862083,
+            -1.1223514055125514,
+            -1.0914035287339103,
+            -0.9432772103480326,
+            -0.9855794349340775,
+            -0.7566110136514949,
+            -0.4693672063234924,
+            0.08314644690264045,
+            ]
+        for idx, tree in enumerate(trees):
+            observed = treemeasure.pybus_harvey_gamma(tree)
+            expected = expected_values[idx]
+            self.assertAlmostEqual(expected, observed)
+
+    def testPHGamma(self):
+        newick_str = "((t5:0.161175,t6:0.161175):0.392293,((t4:0.104381,(t2:0.075411,t1:0.075411):0.028969):0.065840,t3:0.170221):0.383247);"
+        tree = dendropy.Tree.get_from_stream(StringIO(newick_str), schema="newick")
+        g = treemeasure.pybus_harvey_gamma(tree)
+        self.assertAlmostEqual(g, 0.546276, 4)
+
+class TreeCompareTests(dendropytest.ExtendedTestCase):
+
+    def setUp(self):
+        tns = dendropy.TaxonNamespace()
+        self.tree_list1 = _get_reference_tree_list(taxon_namespace=tns)
+        self.tree_list2 = _get_reference_tree_list(taxon_namespace=tns)
+
+    # def testNonMutatingDistinctTaxonNamespaceSameStructComparisons(self):
+    #     tl1_ts = self.tree_list1.taxon_namespace
+    #     tl2_ts = self.tree_list2.taxon_namespace
+    #     self.assertIsNot(tl1_ts, tl2_ts)
+    #     for i, t1 in enumerate(self.tree_list1):
+    #         t2 = self.tree_list2[i]
+    #         t1_ts = t1.taxon_namespace
+    #         t2_ts = t2.taxon_namespace
+    #         self.assertIsNot(t1_ts, t2_ts)
+    #         self.assertEqual(t1.symmetric_difference(t2), 0)
+    #         self.assertAlmostEqual(t1.euclidean_distance(t2), 0)
+    #         self.assertAlmostEqual(t1.robinson_foulds_distance(t2), 0)
+    #         self.assertIs(t1.taxon_namespace, t1_ts)
+    #         self.assertIs(t2.taxon_namespace, t2_ts)
+    #     self.assertIs(self.tree_list1.taxon_namespace, tl1_ts)
+    #     self.assertIs(self.tree_list2.taxon_namespace, tl2_ts)
+
+    def testSymmetricDifferences(self):
+        expected = {
+            (0,1):60, (0,2):60, (0,3):60, (0,4):60, (0,5):60, (0,6):60, (0,7):60, (0,8):60,
+            (0,9):60, (0,10):60, (1,2):14, (1,3):24, (1,4):22, (1,5):20, (1,6):24, (1,7):22, (1,8):24,
+            (1,9):22, (1,10):22, (2,3):18, (2,4):16, (2,5):16, (2,6):18, (2,7):16, (2,8):18, (2,9):16, (2,10):16,
+            (3,4):4, (3,5):4, (3,6):0, (3,7):4, (3,8):0, (3,9):2, (3,10):4, (4,5):2, (4,6):4, (4,7):0, (4,8):4,
+            (4,9):2, (4,10):4, (5,6):4, (5,7):2, (5,8):4, (5,9):2, (5,10):4, (6,7):4, (6,8):0, (6,9):2, (6,10):4,
+            (7,8):4, (7,9):2, (7,10):4, (8,9):2, (8,10):4, (9,10):2,
+        }
+        for i, t1 in enumerate(self.tree_list1[:-1]):
+            for j, t2 in enumerate(self.tree_list2[i+1:]):
+                v = treecompare.symmetric_difference(t1, t2)
+                self.assertEqual(expected[(i, i+j+1)], v)
+#                print "(%d,%d):%d," % (i, i+j+1, v),
+#                if (i * i+j+1) % 6 == 0:
+#                    print
+
+    def testEuclideanDistances(self):
+        expected = {
+            (0,1):442.518379997, (0,2):458.269219125, (0,3):492.707662859, (0,4):457.731995932, (0,5):463.419798784, (0,6):462.181969494,
+            (0,7):439.865064545, (0,8):462.3054297, (0,9):479.06569226, (0,10):544.720324057, (1,2):105.534825723, (1,3):168.86739068, (1,4):119.287056085, (1,5):127.221894919, (1,6):125.918517173,
+            (1,7):102.290062347, (1,8):130.5296198, (1,9):154.336066685, (1,10):247.555999428, (2,3):89.1098950842, (2,4):45.5124918081,
+            (2,5):52.2607244547, (2,6):53.0477320261, (2,7):62.1391636266, (2,8):59.898883066, (2,9):79.3921379438, (2,10):172.187021923,
+            (3,4):73.4046806483, (3,5):61.7211889655, (3,6):63.308525227,
+            (3,7):113.043429355, (3,8):64.9098905352, (3,9):43.9926843558, (3,10):91.395044825, (4,5):22.881252195, (4,6):24.686671743,
+            (4,7):47.14854215, (4,8):30.4425119229, (4,9):58.4893274048, (4,10):158.948156946, (5,6):24.7029660833, (5,7):56.9022982438, (5,8):25.0745838358, (5,9):45.9638357231, (5,10):146.364107049,
+            (6,7):56.1301333366, (6,8):20.3469798051, (6,9):43.429825221, (6,10):145.712937469, (7,8):58.1647873304, (7,9):89.4537113125, (7,10):197.098347126, (8,9):40.5187846693, (8,10):145.393476072,
+            (9,10):111.210401924,
+        }
+        for i, t1 in enumerate(self.tree_list1[:-1]):
+            for j, t2 in enumerate(self.tree_list2[i+1:]):
+                v = treecompare.euclidean_distance(t1, t2)
+                self.assertAlmostEqual(expected[(i, i+j+1)], v)
+#                print "(%d,%d):%s," % (i, i+j+1, v),
+#                if (i * i+j+1) % 6 == 0:
+#                    print
+
+    def testRobinsonFouldsDistances(self):
+        expected = {
+            (0,1):1849.2928245, (0,2):2058.49072588, (0,3):2196.0995614, (0,4):1953.16064964, (0,5):1984.76411566, (0,6):1943.24487014,
+            (0,7):1723.09194669, (0,8):1920.18504491, (0,9):1998.04696628, (0,10):2406.42091465, (1,2):508.212960297, (1,3):702.092000773, (1,4):579.45550447, (1,5):577.047914047, (1,6):596.881857714,
+            (1,7):535.123132276, (1,8):611.28408319, (1,9):632.852687475, (1,10):857.759045631, (2,3):364.804588356, (2,4):283.907134148,
+            (2,5):305.534136399, (2,6):318.128572842, (2,7):424.71989186, (2,8):351.751319705, (2,9):358.03680072, (2,10):531.731219604,
+            (3,4):315.556017395, (3,5):271.016494089, (3,6):314.906668504,
+            (3,7):517.444273417, (3,8):343.014958112, (3,9):266.498405531, (3,10):278.870282525, (4,5):133.642994808, (4,6):134.649689854,
+            (4,7):260.010627711, (4,8):148.901405649, (4,9):187.954728978, (4,10):477.315325085, (5,6):150.970483084, (5,7):277.342311245, (5,8):144.704886539, (5,9):159.326519241, (5,10):449.629738145,
+            (6,7):256.511541887, (6,8):119.487158128, (6,9):182.878241583, (6,10):493.201642403, (7,8):237.16728985, (7,9):296.353239488, (7,10):709.696300851, (8,9):171.021015022, (8,10):522.572965967,
+            (9,10):435.439226227,
+        }
+        for i, t1 in enumerate(self.tree_list1[:-1]):
+            for j, t2 in enumerate(self.tree_list2[i+1:]):
+                v = treecompare.robinson_foulds_distance(t1, t2)
+                self.assertAlmostEqual(expected[(i, i+j+1)], v)
+#                print "(%d,%d):%s," % (i, i+j+1, v),
+#                if (i * i+j+1) % 6 == 0:
+#                    print
+
+class FrequencyOfSplitsTest(unittest.TestCase):
+
+    def setUp(self):
+        self.trees = dendropy.TreeList.get_from_path(
+                src=pathmap.tree_source_path('pythonidae.random.bd0301.tre'),
+                schema='nexus')
+
+    def testCount1(self):
+        bipartition_leaves = ['Python regius', 'Apodora papuana']
+        f = self.trees.frequency_of_bipartition(labels=bipartition_leaves)
+        self.assertAlmostEqual(f, 0.04)
+
+    def testRaisesIndexError(self):
+        bipartition_leaves = ['Bad Taxon', 'Apodora papuana']
+        self.assertRaises(IndexError, self.trees.frequency_of_bipartition, labels=bipartition_leaves)
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/dendropy/test/test_tree_from_splits.py b/dendropy/test/test_tree_from_splits.py
new file mode 100644
index 0000000..e2919b5
--- /dev/null
+++ b/dendropy/test/test_tree_from_splits.py
@@ -0,0 +1,55 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+NEXUS data read/write parse/format tests.
+"""
+
+import unittest
+from dendropy.test.support import pathmap
+from dendropy.utility.messaging import get_logger
+from dendropy.calculate import treecompare
+import dendropy
+
+_LOG = get_logger(__name__)
+
+class TreeFromBipartitionsTest(unittest.TestCase):
+    def testTrees(self):
+        tree_files = [
+                ("dendropy-test-trees-n33-unrooted-x100a.nexus", "force-unrooted", False),
+                ("dendropy-test-trees-multifurcating-unrooted.nexus", "force-unrooted", False),
+                ("pythonidae.beast.summary.tre", "force-rooted", True),
+                ("primates.beast.mcct.medianh.tre", "force-rooted", True),
+                ]
+        for tree_file, rooting, is_rooted in tree_files:
+            ref_tree = dendropy.Tree.get_from_path(pathmap.tree_source_path(tree_file),
+                    "nexus",
+                    rooting=rooting)
+            bipartition_encoding = ref_tree.encode_bipartitions()
+            t_tree = dendropy.Tree.from_bipartition_encoding(
+                    bipartition_encoding,
+                    taxon_namespace=ref_tree.taxon_namespace,
+                    is_rooted=ref_tree.is_rooted)
+            # t_tree.encode_bipartitions()
+            _LOG.debug("--\n       File: {} ({})".format(tree_file, ref_tree.is_rooted))
+            _LOG.debug("     Original: {}".format(ref_tree.as_string("newick")))
+            _LOG.debug("Reconstructed: {}".format(t_tree.as_string("newick")))
+            self.assertEqual(treecompare.symmetric_difference(ref_tree, t_tree), 0)
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/dendropy/test/test_tree_operations_and_manipulations.py b/dendropy/test/test_tree_operations_and_manipulations.py
new file mode 100644
index 0000000..2f38c8e
--- /dev/null
+++ b/dendropy/test/test_tree_operations_and_manipulations.py
@@ -0,0 +1,842 @@
+
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Tests native tree structuring routines.
+"""
+
+try:
+    from StringIO import StringIO # Python 2 legacy support: StringIO in this module is the one needed (not io)
+except ImportError:
+    from io import StringIO # Python 3
+import sys
+if not (sys.version_info.major >= 3 and sys.version_info.minor >= 4):
+    from dendropy.utility.filesys import pre_py34_open as open
+import unittest
+from dendropy.test.support import curated_test_tree
+from dendropy.test.support import pathmap
+from dendropy.test.support import dendropytest
+from dendropy.utility import messaging
+from dendropy.test.support.dendropytest import ExtendedTestCase
+from dendropy.test.support.mockrandom import MockRandom
+import dendropy
+from dendropy.calculate import treecompare
+import re
+
+_LOG = messaging.get_logger(__name__)
+
+class ScaleTest(unittest.TestCase):
+
+    def testScaleEdgesNoLens(self):
+        newick_list = ['(5,((4,3),2),1);',
+            '(5,(4,3,2),1);',
+            '(5,((4,3),2),1);',
+            '(5,(4,3),2,1);',
+            '(5,((4,3),2),1);',
+            '(5,4,3,2,1);']
+        tree_list = dendropy.TreeList.get_from_stream(
+                        StringIO("""%s""" % "\n".join(newick_list)),
+                        schema="newick")
+        for n, tree in enumerate(tree_list):
+            tree.scale_edges(2.0)
+            self.assertEqual(newick_list[n], "%s;" % tree._as_newick_string())
+    def testScaleEdgesRealTest(self):
+        newick_list = ['(5:3,((4:1,3:1):1.5,2:3),1:0);',
+            '(5:7.5,4:1,3:-2,2:4,1:.1);']
+        doubled = ['(5:6.0,((4:2.0,3:2.0):3.0,2:6.0),1:0.0);',
+                    '(5:15.0,4:2.0,3:-4.0,2:8.0,1:0.2);']
+        as_f = ['(5:3.0,((4:1.0,3:1.0):1.5,2:3.0),1:0.0);',
+            '(5:7.5,4:1.0,3:-2.0,2:4.0,1:0.1);']
+        tree_list = dendropy.TreeList.get_from_stream(
+                        StringIO("""%s""" % "\n".join(newick_list)),
+                        schema="newick")
+        for n, tree in enumerate(tree_list):
+            tree.scale_edges(2)
+            self.assertEqual(doubled[n], "%s;" % tree._as_newick_string())
+        for n, tree in enumerate(tree_list):
+            tree.scale_edges(.5)
+            self.assertEqual(as_f[n], "%s;" % tree._as_newick_string())
+
+class RandomlyRotateTest(unittest.TestCase):
+
+    def runTest(self):
+        n = '(Basichlsac,(Lamprothma,Mougeotisp),(((Haplomitr2,Petalaphy),((Angiopteri,(((Azollacaro,((Dennstasam,(Oleandrapi,Polypodapp)),Dicksonant)),Vittarifle),Botrychbit)),(Isoetesmel,((((Agathismac,Agathisova),Pseudotsu),(((Libocedrus,Juniperusc),Callitris),Athrotaxi)),((Liriodchi,Nelumbo),Sagittari))))),Thuidium));'
+        trees = dendropy.TreeList.get_from_stream(StringIO(n+n), schema="newick")
+        ref = trees[0]
+        changing = trees[1]
+        rng = MockRandom()
+        ref.encode_bipartitions()
+        changing.encode_bipartitions()
+        orig_root = changing.seed_node
+        for i in range(50):
+            changing.randomly_rotate(rng=rng)
+            self.assertNotEqual(str(changing), n)
+            self.assertEqual(orig_root, changing.seed_node)
+            changing._debug_check_tree(logger_obj=_LOG, check_bipartitions=True)
+            if treecompare.symmetric_difference(ref, changing) != 0:
+                self.fail("\n%s\n!=\n%s" % (str(ref), str(changing)))
+
+class RandomlyReorientTest(unittest.TestCase):
+
+    def runTest(self):
+        n = '(Basichlsac,(Lamprothma,Mougeotisp),(((Haplomitr2,Petalaphy),((Angiopteri,(((Azollacaro,((Dennstasam,(Oleandrapi,Polypodapp)),Dicksonant)),Vittarifle),Botrychbit)),(Isoetesmel,((((Agathismac,Agathisova),Pseudotsu),(((Libocedrus,Juniperusc),Callitris),Athrotaxi)),((Liriodchi,Nelumbo),Sagittari))))),Thuidium));'
+        k = dendropy.TreeList.get_from_stream(StringIO(n), schema="newick")[0]
+        trees = dendropy.TreeList.get_from_stream(StringIO(n+n), schema="newick", taxon_namespace=k.taxon_namespace)
+        ref = trees[0]
+        changing = trees[1]
+        rng = MockRandom()
+        for i in range(50):
+            changing.randomly_reorient(rng=rng, update_bipartitions=True)
+            self.assertNotEqual(str(changing), n)
+            changing._debug_check_tree(logger_obj=_LOG, check_bipartitions=True)
+            d = treecompare.symmetric_difference(ref, changing, is_bipartitions_updated=False)
+            if d != 0:
+                self.fail("\n{}\n!=\n{}\nRF={}".format(str(ref), str(changing), d))
+
+class CollapseConflictingTest(unittest.TestCase):
+
+    def runTest(self):
+
+        taxon_namespace = dendropy.TaxonNamespace([str(i+1) for i in range(5)])
+        tree_list = dendropy.TreeList.get_from_stream(
+            StringIO("""
+            (5,((4,3),2),1);
+            (5,(4,3,2),1);
+            (5,((4,3),2),1);
+            (5,(4,3),2,1);
+            (5,((4,3),2),1);
+            (5,4,3,2,1);
+            """),
+            schema="newick",
+            taxon_namespace=taxon_namespace)
+
+        tree = tree_list[0]
+        expected_tree = tree_list[1]
+        tree.encode_bipartitions()
+        tree_leafset_bitmask = tree.seed_node.edge.bipartition._leafset_bitmask
+        bipartition_to_target = dendropy.Bipartition(
+                bitmask=0xA,
+                tree_leafset_bitmask=tree_leafset_bitmask,
+                compule_bitmasks=True)
+        assert bipartition_to_target._lowest_relevant_bit is not None
+        tree.seed_node.collapse_conflicting(bipartition_to_target)
+        tree.encode_bipartitions()
+        expected_tree.encode_bipartitions()
+        self.assertEqual(treecompare.symmetric_difference(tree, expected_tree), 0)
+
+        tree = tree_list[2]
+        expected_tree = tree_list[3]
+        tree.encode_bipartitions()
+        tree_leafset_bitmask = tree.seed_node.edge.bipartition._leafset_bitmask
+        bipartition_to_target = dendropy.Bipartition(bitmask=0x3,
+                tree_leafset_bitmask=tree_leafset_bitmask,
+                compile_bipartition=True)
+        tree.seed_node.collapse_conflicting(bipartition_to_target)
+        tree.encode_bipartitions()
+        expected_tree.encode_bipartitions()
+        self.assertEqual(treecompare.symmetric_difference(tree, expected_tree), 0)
+
+        tree = tree_list[4]
+        expected_tree = tree_list[5]
+        tree.encode_bipartitions()
+        tree_leafset_bitmask = tree.seed_node.edge.bipartition._leafset_bitmask
+        bipartition_to_target = dendropy.Bipartition(bitmask=0x5,
+                tree_leafset_bitmask=tree_leafset_bitmask,
+                compile_bipartition=True)
+        tree.seed_node.collapse_conflicting(bipartition_to_target)
+        tree.encode_bipartitions()
+        expected_tree.encode_bipartitions()
+        self.assertEqual(treecompare.symmetric_difference(tree, expected_tree), 0)
+
+class PruneTest(unittest.TestCase):
+
+    def check(self,
+            title,
+            src_prefix,
+            to_retain=False):
+        input_ds = dendropy.DataSet.get_from_path(
+                src=pathmap.tree_source_path(src_prefix + ".pre-pruned.nex"),
+                schema='nexus')
+        tns1 = dendropy.TaxonNamespace()
+        input_ds.attach_taxon_namespace(tns1)
+        input_taxa = input_ds.taxon_namespaces[0]
+        output_ds = dendropy.DataSet.get_from_path(
+                src=pathmap.tree_source_path(src_prefix + ".paup-pruned.nex"),
+                schema='nexus',
+                taxon_namespace=input_taxa)
+        tns2 = dendropy.TaxonNamespace()
+        output_ds.attach_taxon_namespace(tns2)
+        if to_retain:
+            taxf = open(pathmap.tree_source_path(src_prefix + ".retained_taxa.txt"), "r")
+        else:
+            taxf = open(pathmap.tree_source_path(src_prefix + ".pruned_taxa.txt"), "r")
+        rows = taxf.readlines()
+        taxon_idxs_list = [ [int(i) for i in row.split()] for row in rows ]
+        for set_idx, src_trees in enumerate(input_ds.tree_lists):
+            src_trees = input_ds.tree_lists[set_idx]
+            ref_trees = output_ds.tree_lists[set_idx]
+            taxon_idxs = taxon_idxs_list[set_idx]
+            sub_taxa = [src_trees.taxon_namespace[i] for i in taxon_idxs]
+            for tree_idx, src_tree in enumerate(src_trees):
+                _LOG.debug("%s Set %d/%d, Tree %d/%d" % (title, set_idx+1, len(input_ds.tree_lists), tree_idx+1, len(src_trees)))
+                ref_tree = ref_trees[tree_idx]
+                if to_retain:
+                    src_tree.retain_taxa(sub_taxa)
+                else:
+                    src_tree.prune_taxa(sub_taxa)
+                # tree_dist = paup.symmetric_difference(src_tree, ref_tree)
+                self.assertEqual(treecompare.symmetric_difference(src_tree, ref_tree), 0)
+        taxf.close()
+
+    def testPruneTaxaUnrooted(self):
+        self.check("Unrooted", "prune_unrooted", False)
+
+    def testPruneTaxaRooted(self):
+        self.check("Rooted", "prune_rooted", False)
+
+    def testRetainTaxaUnrooted(self):
+        self.check("Unrooted", "prune_unrooted", True)
+
+    def testRetainTaxaRooted(self):
+        self.check("Rooted", "prune_rooted", True)
+
+class TruncateTree(unittest.TestCase):
+
+    def setUp(self):
+        self.trees = dendropy.TreeList.get_from_path(pathmap.tree_source_path("pythonidae.reference-trees.nexus"), "nexus")
+
+    def check_ultrametric_tree(self, tree, dist):
+        self.assertTrue(tree._debug_tree_is_valid())
+        tree.calc_node_root_distances()
+        for nd in tree.leaf_node_iter():
+            self.assertAlmostEqual(nd.root_distance, nd.distance_from_root())
+            self.assertAlmostEqual(dist, nd.root_distance)
+
+    def test_truncate_ultrametric(self):
+        for tree in self.trees:
+            dists = tree.calc_node_root_distances()
+            min_dist, max_dist = tree.minmax_leaf_distance_from_root()
+            trunc_dists = [(max_dist * f) for f in (0.25, 0.5, 0.75, 0.90)]
+            for td in trunc_dists:
+                working = dendropy.Tree(tree)
+                working.truncate_from_root(td)
+                for idx, leaf in enumerate(working.leaf_node_iter()):
+                    if leaf.label is None and leaf.taxon is None:
+                        leaf.taxon = dendropy.Taxon(label="t%s" % (idx+1))
+                self.check_ultrametric_tree(working, td)
+
+class TestTreeLadderization(unittest.TestCase):
+
+    def setUp(self):
+        self.tree_str = "[&R] ((A, (B, (C, (D, E)))),(F, (G, H)));"
+
+    def clean_newick_str(self, s):
+        """
+        Strips out everything but the core tree statement characters from a
+        NEWICK string.
+        """
+        return re.sub(r'[^()A-H,]', '', s)
+
+    def testLadderizeLeft(self):
+        tree = dendropy.Tree.get_from_string(self.tree_str, "newick")
+        tree.ladderize(ascending=True)
+        self.assertEqual(self.clean_newick_str(tree.as_string("newick")),
+                self.clean_newick_str("[&R] ((F,(G,H)),(A,(B,(C,(D,E)))));"))
+
+    def testLadderizeRight(self):
+        tree = dendropy.Tree.get_from_string(self.tree_str, "newick")
+        tree.ladderize(ascending=False)
+        self.assertEqual(self.clean_newick_str(tree.as_string("newick")),
+               self.clean_newick_str("[&R] (((((D,E),C),B),A),((G,H),F));"))
+
+class TreeMidpointRootingTest(ExtendedTestCase):
+
+    def testMidpointRooting(self):
+        taxa = dendropy.TaxonNamespace()
+        test_trees = dendropy.TreeList.get_from_path(pathmap.tree_source_path('pythonidae.random.bd0301.randomly-rooted.tre'),
+                "nexus",
+                taxon_namespace=taxa,
+                rooting="force-rooted")
+        expected_trees = dendropy.TreeList.get_from_path(pathmap.tree_source_path('pythonidae.random.bd0301.midpoint-rooted.tre'),
+                "nexus",
+                taxon_namespace=taxa,
+                rooting="force-rooted")
+        for idx, test_tree in enumerate(test_trees):
+            expected_tree = expected_trees[idx]
+            test_tree.reroot_at_midpoint(update_bipartitions=True)
+            self.assertEqual(treecompare.symmetric_difference(test_tree, expected_tree), 0)
+            for bipartition in test_tree.bipartition_encoding:
+                if test_tree.bipartition_edge_map[bipartition].head_node is test_tree.seed_node:
+                    continue
+                # self.assertAlmostEqual(bipartition.edge.length, expected_tree.split_bitmask_edge_map[bipartition.split_bitmask].length, 3)
+                self.assertAlmostEqual(test_tree.bipartition_edge_map[bipartition].length,
+                        expected_tree.bipartition_edge_map[bipartition].length,
+                        3)
+
+class TreeRerootingTests(dendropytest.ExtendedTestCase):
+    #                  a
+    #                 / \
+    #                /   \
+    #               /     \
+    #              /       \
+    #             /         \
+    #            /           \
+    #           /             c
+    #          b             / \
+    #         / \           /   \
+    #        /   e         /     f
+    #       /   / \       /     / \
+    #      /   /   \     g     /   h
+    #     /   /     \   / \   /   / \
+    #    i    j     k  l  m  n   o   p
+
+    # change in orientation == exchange in branch length
+    # removing of outdegreee one
+    # infomation is as follows
+    # 'reseed' : { 'x1' : ('z1', 'z2' ..), 'x2': ('z1',) }
+    #    - reseed: the new root
+    #    - x1: the edge which has its length changed
+    #    - z1, z2, z3 ... : the edges for which lengths, when summed, give the new length for x1
+    reseeded_with_unifurcations_suppressed_edge_length_updates = {
+            'a': {},
+            'b': {'c': ('c', 'b'), },
+            'c': {'b': ('c', 'b'), },
+            'e': {'b': ('e'), 'c': ('b', 'c'), },
+            'f': {'c': ('f'), 'b': ('c', 'b'), },
+            'g': {'c': ('g'), 'b': ('c', 'b'), },
+            'h': {'f': ('h'), 'c': ('f'), 'b': ('c', 'b'), },
+            'i': {'b': ('i'), 'c': ('b', 'c'), },
+            'j': {'e': ('j'), 'b': ('e'), 'c':('b','c'), },
+            'k': {'e': ('k'), 'b': ('e'), 'c':('b','c'), },
+            'l': {'g': ('l'), 'c': ('g'), 'b':('c','b'), },
+            'm': {'g': ('m'), 'c': ('g'), 'b':('c','b'), },
+            'n': {'f': ('n'), 'c': ('f'), 'b':('c','b'), },
+            'o': {'h': ('o'), 'f': ('h'), 'c': ('f'), 'b':('c','b'), },
+            'p': {'h': ('p'), 'f': ('h'), 'c': ('f'), 'b':('c','b'), },
+    }
+    reseeded_with_unifurcations_preserved_edge_length_updates = {
+            'a': {},
+            'b': {'a':('b'), },
+            'c': {'a':('c'), },
+            'e': {'b': ('e'), 'a': ('b'), 'c': ('c'),},
+            'f': {'c': ('f'), 'a': ('c'), },
+            'g': {'c': ('g'), 'a': ('c'), },
+            'h': {'f': ('h'), 'a': ('c'), 'c':('f'), },
+            'i': {'b': ('i'), 'a': ('b'), },
+            'j': {'e': ('j'), 'a': ('b'), 'b': ('e'), },
+            'k': {'e': ('k'), 'a': ('b'), 'b': ('e'), },
+            'l': {'g': ('l'), 'c': ('g'), 'a':('c'), },
+            'm': {'g': ('m'), 'c': ('g'), 'a':('c'), },
+            'n': {'f': ('n'), 'c': ('f'), 'a':('c'), },
+            'o': {'h': ('o'), 'f': ('h'), 'c': ('f'), 'a':('c'), },
+            'p': {'h': ('p'), 'f': ('h'), 'c': ('f'), 'a':('c'), },
+    }
+
+    reseeded_with_unifurcations_suppressed_preorder_labels = {
+            'a': ['a', 'b', 'i', 'e', 'j', 'k', 'c', 'g', 'l', 'm', 'f', 'n', 'h', 'o', 'p'],
+            'b': ['b', 'i', 'e', 'j', 'k', 'c', 'g', 'l', 'm', 'f', 'n', 'h', 'o', 'p'],
+            'c': ['c', 'g', 'l', 'm', 'f', 'n', 'h', 'o', 'p', 'b', 'i', 'e', 'j', 'k'],
+            'e': ['e', 'j', 'k', 'b', 'i', 'c', 'g', 'l', 'm', 'f', 'n', 'h', 'o', 'p'],
+            'f': ['f', 'n', 'h', 'o', 'p', 'c', 'g', 'l', 'm', 'b', 'i', 'e', 'j', 'k'],
+            'g': ['g', 'l', 'm', 'c', 'f', 'n', 'h', 'o', 'p', 'b', 'i', 'e', 'j', 'k'],
+            'h': ['h', 'o', 'p', 'f', 'n', 'c', 'g', 'l', 'm', 'b', 'i', 'e', 'j', 'k'],
+            'i': ['i', 'e', 'j', 'k', 'c', 'g', 'l', 'm', 'f', 'n', 'h', 'o', 'p'],
+            'j': ['j', 'k', 'b', 'i', 'c', 'g', 'l', 'm', 'f', 'n', 'h', 'o', 'p'],
+            'k': ['k', 'j', 'b', 'i', 'c', 'g', 'l', 'm', 'f', 'n', 'h', 'o', 'p'],
+            'l': ['l', 'm', 'c', 'f', 'n', 'h', 'o', 'p', 'b', 'i', 'e', 'j', 'k'],
+            'm': ['m', 'l', 'c', 'f', 'n', 'h', 'o', 'p', 'b', 'i', 'e', 'j', 'k'],
+            'n': ['n', 'h', 'o', 'p', 'c', 'g', 'l', 'm', 'b', 'i', 'e', 'j', 'k'],
+            'o': ['o', 'p', 'f', 'n', 'c', 'g', 'l', 'm', 'b', 'i', 'e', 'j', 'k'],
+            'p': ['p', 'o', 'f', 'n', 'c', 'g', 'l', 'm', 'b', 'i', 'e', 'j', 'k']
+    }
+    reseeded_with_unifurcations_preserved_preorder_labels = {
+            'a': ['a', 'b', 'i', 'e', 'j', 'k', 'c', 'g', 'l', 'm', 'f', 'n', 'h', 'o', 'p'],
+            'b': ['b', 'i', 'e', 'j', 'k', 'a', 'c', 'g', 'l', 'm', 'f', 'n', 'h', 'o', 'p'],
+            'c': ['c', 'g', 'l', 'm', 'f', 'n', 'h', 'o', 'p', 'a', 'b', 'i', 'e', 'j', 'k'],
+            'e': ['e', 'j', 'k', 'b', 'i', 'a', 'c', 'g', 'l', 'm', 'f', 'n', 'h', 'o', 'p'],
+            'f': ['f', 'n', 'h', 'o', 'p', 'c', 'g', 'l', 'm', 'a', 'b', 'i', 'e', 'j', 'k'],
+            'g': ['g', 'l', 'm', 'c', 'f', 'n', 'h', 'o', 'p', 'a', 'b', 'i', 'e', 'j', 'k'],
+            'h': ['h', 'o', 'p', 'f', 'n', 'c', 'g', 'l', 'm', 'a', 'b', 'i', 'e', 'j', 'k'],
+            'i': ['i', 'b', 'e', 'j', 'k', 'a', 'c', 'g', 'l', 'm', 'f', 'n', 'h', 'o', 'p'],
+            'j': ['j', 'e', 'k', 'b', 'i', 'a', 'c', 'g', 'l', 'm', 'f', 'n', 'h', 'o', 'p'],
+            'k': ['k', 'e', 'j', 'b', 'i', 'a', 'c', 'g', 'l', 'm', 'f', 'n', 'h', 'o', 'p'],
+            'l': ['l', 'g', 'm', 'c', 'f', 'n', 'h', 'o', 'p', 'a', 'b', 'i', 'e', 'j', 'k'],
+            'm': ['m', 'g', 'l', 'c', 'f', 'n', 'h', 'o', 'p', 'a', 'b', 'i', 'e', 'j', 'k'],
+            'n': ['n', 'f', 'h', 'o', 'p', 'c', 'g', 'l', 'm', 'a', 'b', 'i', 'e', 'j', 'k'],
+            'o': ['o', 'h', 'p', 'f', 'n', 'c', 'g', 'l', 'm', 'a', 'b', 'i', 'e', 'j', 'k'],
+            'p': ['p', 'h', 'o', 'f', 'n', 'c', 'g', 'l', 'm', 'a', 'b', 'i', 'e', 'j', 'k']
+    }
+    reseeded_with_unifurcations_suppressed_relations = {
+        'a': {'a': ('b', 'c'),
+            'b': ('i', 'e'),
+            'c': ('g', 'f'),
+            'e': ('j', 'k'),
+            'f': ('n', 'h'),
+            'g': ('l', 'm'),
+            'h': ('o', 'p')},
+        'b': {'b': ('i', 'e', 'c'),
+            'c': ('g', 'f'),
+            'e': ('j', 'k'),
+            'f': ('n', 'h'),
+            'g': ('l', 'm'),
+            'h': ('o', 'p')},
+        'c': {'b': ('i', 'e'),
+            'c': ('g', 'f', 'b'),
+            'e': ('j', 'k'),
+            'f': ('n', 'h'),
+            'g': ('l', 'm'),
+            'h': ('o', 'p')},
+        'e': {'b': ('i', 'c'),
+            'c': ('g', 'f'),
+            'e': ('j', 'k', 'b'),
+            'f': ('n', 'h'),
+            'g': ('l', 'm'),
+            'h': ('o', 'p')},
+        'f': {'b': ('i', 'e'),
+            'c': ('g', 'b'),
+            'e': ('j', 'k'),
+            'f': ('n', 'h', 'c'),
+            'g': ('l', 'm'),
+            'h': ('o', 'p')},
+        'g': {'b': ('i', 'e'),
+            'c': ('f', 'b'),
+            'e': ('j', 'k'),
+            'f': ('n', 'h'),
+            'g': ('l', 'm', 'c'),
+            'h': ('o', 'p')},
+        'h': {'b': ('i', 'e'),
+            'c': ('g', 'b'),
+            'e': ('j', 'k'),
+            'f': ('n', 'c'),
+            'g': ('l', 'm'),
+            'h': ('o', 'p', 'f')},
+        'i': {'c': ('g', 'f'),
+            'e': ('j', 'k'),
+            'f': ('n', 'h'),
+            'g': ('l', 'm'),
+            'h': ('o', 'p'),
+            'i': ('e', 'c')},
+        'j': {'b': ('i', 'c'),
+            'c': ('g', 'f'),
+            'f': ('n', 'h'),
+            'g': ('l', 'm'),
+            'h': ('o', 'p'),
+            'j': ('k', 'b')},
+        'k': {'b': ('i', 'c'),
+            'c': ('g', 'f'),
+            'f': ('n', 'h'),
+            'g': ('l', 'm'),
+            'h': ('o', 'p'),
+            'k': ('j', 'b')},
+        'l': {'b': ('i', 'e'),
+            'c': ('f', 'b'),
+            'e': ('j', 'k'),
+            'f': ('n', 'h'),
+            'h': ('o', 'p'),
+            'l': ('m', 'c')},
+        'm': {'b': ('i', 'e'),
+            'c': ('f', 'b'),
+            'e': ('j', 'k'),
+            'f': ('n', 'h'),
+            'h': ('o', 'p'),
+            'm': ('l', 'c')},
+        'n': {'b': ('i', 'e'),
+            'c': ('g', 'b'),
+            'e': ('j', 'k'),
+            'g': ('l', 'm'),
+            'h': ('o', 'p'),
+            'n': ('h', 'c')},
+        'o': {'b': ('i', 'e'),
+            'c': ('g', 'b'),
+            'e': ('j', 'k'),
+            'f': ('n', 'c'),
+            'g': ('l', 'm'),
+            'o': ('p', 'f')},
+        'p': {'b': ('i', 'e'),
+            'c': ('g', 'b'),
+            'e': ('j', 'k'),
+            'f': ('n', 'c'),
+            'g': ('l', 'm'),
+            'p': ('o', 'f')}}
+    reseeded_with_unifurcations_preserved_relations = {
+        'a': {'a': ('b', 'c'),
+            'b': ('i', 'e'),
+            'c': ('g', 'f'),
+            'e': ('j', 'k'),
+            'f': ('n', 'h'),
+            'g': ('l', 'm'),
+            'h': ('o', 'p')},
+        'b': {'a': ('c',),
+            'b': ('i', 'e', 'a'),
+            'c': ('g', 'f'),
+            'e': ('j', 'k'),
+            'f': ('n', 'h'),
+            'g': ('l', 'm'),
+            'h': ('o', 'p')},
+        'c': {'a': ('b',),
+            'b': ('i', 'e'),
+            'c': ('g', 'f', 'a'),
+            'e': ('j', 'k'),
+            'f': ('n', 'h'),
+            'g': ('l', 'm'),
+            'h': ('o', 'p')},
+        'e': {'a': ('c',),
+            'b': ('i', 'a'),
+            'c': ('g', 'f'),
+            'e': ('j', 'k', 'b'),
+            'f': ('n', 'h'),
+            'g': ('l', 'm'),
+            'h': ('o', 'p')},
+        'f': {'a': ('b',),
+            'b': ('i', 'e'),
+            'c': ('g', 'a'),
+            'e': ('j', 'k'),
+            'f': ('n', 'h', 'c'),
+            'g': ('l', 'm'),
+            'h': ('o', 'p')},
+        'g': {'a': ('b',),
+            'b': ('i', 'e'),
+            'c': ('f', 'a'),
+            'e': ('j', 'k'),
+            'f': ('n', 'h'),
+            'g': ('l', 'm', 'c'),
+            'h': ('o', 'p')},
+        'h': {'a': ('b',),
+            'b': ('i', 'e'),
+            'c': ('g', 'a'),
+            'e': ('j', 'k'),
+            'f': ('n', 'c'),
+            'g': ('l', 'm'),
+            'h': ('o', 'p', 'f')},
+        'i': {'a': ('c',),
+            'b': ('e', 'a'),
+            'c': ('g', 'f'),
+            'e': ('j', 'k'),
+            'f': ('n', 'h'),
+            'g': ('l', 'm'),
+            'h': ('o', 'p'),
+            'i': ('b',)},
+        'j': {'a': ('c',),
+            'b': ('i', 'a'),
+            'c': ('g', 'f'),
+            'e': ('k', 'b'),
+            'f': ('n', 'h'),
+            'g': ('l', 'm'),
+            'h': ('o', 'p'),
+            'j': ('e',)},
+        'k': {'a': ('c',),
+            'b': ('i', 'a'),
+            'c': ('g', 'f'),
+            'e': ('j', 'b'),
+            'f': ('n', 'h'),
+            'g': ('l', 'm'),
+            'h': ('o', 'p'),
+            'k': ('e',)},
+        'l': {'a': ('b',),
+            'b': ('i', 'e'),
+            'c': ('f', 'a'),
+            'e': ('j', 'k'),
+            'f': ('n', 'h'),
+            'g': ('m', 'c'),
+            'h': ('o', 'p'),
+            'l': ('g',)},
+        'm': {'a': ('b',),
+            'b': ('i', 'e'),
+            'c': ('f', 'a'),
+            'e': ('j', 'k'),
+            'f': ('n', 'h'),
+            'g': ('l', 'c'),
+            'h': ('o', 'p'),
+            'm': ('g',)},
+        'n': {'a': ('b',),
+            'b': ('i', 'e'),
+            'c': ('g', 'a'),
+            'e': ('j', 'k'),
+            'f': ('h', 'c'),
+            'g': ('l', 'm'),
+            'h': ('o', 'p'),
+            'n': ('f',)},
+        'o': {'a': ('b',),
+            'b': ('i', 'e'),
+            'c': ('g', 'a'),
+            'e': ('j', 'k'),
+            'f': ('n', 'c'),
+            'g': ('l', 'm'),
+            'h': ('p', 'f'),
+            'o': ('h',)},
+        'p': {'a': ('b',),
+            'b': ('i', 'e'),
+            'c': ('g', 'a'),
+            'e': ('j', 'k'),
+            'f': ('n', 'c'),
+            'g': ('l', 'm'),
+            'h': ('o', 'f'),
+            'p': ('h',)}}
+
+    def test_reseeding(self):
+        # import pprint
+        curated_tree_gen = curated_test_tree.CuratedTestTree()
+        # x1 = {}
+        # x2 = {}
+        # for reseed_at_label in curated_tree_gen.internal_labels:
+        # to do: test (1) update bipartitions, (2) edge length
+        ref_tree, all_nodes, leaf_nodes, internal_nodes = curated_tree_gen.get_tree(
+                suppress_internal_node_taxa=True,
+                suppress_leaf_node_taxa=True)
+        ref_edge_lengths = {}
+        for nd in ref_tree:
+            ref_edge_lengths[nd.label] = nd.edge.length
+        for is_rooted in (True, False):
+            for suppress_unifurcations, expected_preorder_labels, expected_relations, edge_length_updates in (
+                    (True, self.reseeded_with_unifurcations_suppressed_preorder_labels, self.reseeded_with_unifurcations_suppressed_relations, self.reseeded_with_unifurcations_suppressed_edge_length_updates),
+                    (False, self.reseeded_with_unifurcations_preserved_preorder_labels, self.reseeded_with_unifurcations_preserved_relations, self.reseeded_with_unifurcations_preserved_edge_length_updates),
+                    ):
+                for reseed_at_label in curated_tree_gen.all_labels:
+
+                    if reseed_at_label == "a":
+                        continue
+
+                    # identify
+                    # print("\n### is_rooted = {}, suppress_unifurcations = {}, reseed_at = '{}'".format(is_rooted, suppress_unifurcations, reseed_at_label))
+
+                    # get tree
+                    tree, all_nodes, leaf_nodes, internal_nodes = curated_tree_gen.get_tree(
+                            suppress_internal_node_taxa=False,
+                            suppress_leaf_node_taxa=False)
+
+                    # label nodes
+                    for nd in tree:
+                        nd.label = nd.taxon.label
+                    # set rooting
+                    tree.is_rooted = is_rooted
+
+                    # calc bipartitions
+                    tree.encode_bipartitions(suppress_unifurcations=False, collapse_unrooted_basal_bifurcation=False)
+
+                    # save old root
+                    old_root = tree.seed_node
+
+                    # find new root
+                    new_root = tree.find_node_with_label(reseed_at_label)
+
+                    # reroot it
+                    tree.reseed_at(
+                            new_root,
+                            suppress_unifurcations=suppress_unifurcations,
+                            collapse_unrooted_basal_bifurcation=False,
+                            update_bipartitions=True)
+
+                    # check that new root is as expected
+                    self.assertEqual(tree.seed_node.label, reseed_at_label)
+
+                    # check old root integrity
+                    if old_root is not new_root:
+                        old_root_still_in_tree = False
+                        for nd in tree:
+                            if nd is old_root:
+                                old_root_still_in_tree = True
+                                break
+                        if old_root_still_in_tree:
+                            self.assertIsNot(old_root._parent_node, None)
+                            self.assertIs(old_root._edge._head_node, old_root)
+                            self.assertIs(old_root._edge.tail_node, old_root._parent_node)
+                            found_parent = None
+                            for node in all_nodes:
+                                if old_root in node._child_nodes:
+                                    self.assertIs(found_parent, None)
+                                    found_parent = node
+                            self.assertIsNot(found_parent, None)
+                            self.assertIs(found_parent, old_root._parent_node)
+                        else:
+                            self.assertEqual(len(old_root._child_nodes), 1)
+
+                    # check new root integrity
+                    self.assertIs(new_root._parent_node, None)
+                    self.assertIs(new_root._edge._head_node, new_root)
+                    self.assertIs(new_root._edge.tail_node, None)
+                    for node in all_nodes:
+                        self.assertNotIn(new_root, node._child_nodes)
+
+                    # check that rooting state is as expected
+                    self.assertEqual(tree.is_rooted, is_rooted)
+
+                    # check for structural integrity, including bipartitions
+                    if is_rooted and suppress_unifurcations:
+                        tree._debug_check_tree(
+                                logger_obj=_LOG,
+                                check_bipartitions=True,
+                                unique_bipartition_edge_mapping=True)
+                    else:
+                        tree._debug_check_tree(
+                                logger_obj=_LOG,
+                                check_bipartitions=True,
+                                unique_bipartition_edge_mapping=False)
+
+                    # check that traversal is as expected
+                    preorder_labels = [nd.label for nd in tree]
+                    self.assertEqual(preorder_labels, expected_preorder_labels[reseed_at_label])
+
+                    # check that parent-child relationships are as expected
+                    relations = {}
+                    for nd in tree:
+                        child_labels = tuple(ch.label for ch in nd._child_nodes)
+                        if child_labels:
+                            relations[nd.label] = child_labels
+                    self.assertEqual(relations, expected_relations[reseed_at_label])
+
+                    # check that edge lengths are correct
+                    updated_edge_lengths = edge_length_updates[reseed_at_label]
+                    for nd in tree:
+                        if nd.label == reseed_at_label:
+                            self.assertEqual(nd.edge.length, ref_edge_lengths['a'])
+                        elif nd.label not in updated_edge_lengths:
+                            self.assertEqual(nd.edge.length, ref_edge_lengths[nd.label], "New seed: {}, Current node: {}".format(reseed_at_label, nd.label))
+                        else:
+                            new_length = sum([ref_edge_lengths[clabel] for clabel in updated_edge_lengths[nd.label]])
+                            self.assertEqual(nd.edge.length, new_length, "New seed: {}, Current node: {}".format(reseed_at_label, nd.label))
+
+class ResolvePolytomiesTestCase(dendropytest.ExtendedTestCase):
+
+    def verify_resolve_polytomies(self, tree_string, rng):
+        tree = dendropy.Tree.get_from_string(tree_string, "newick")
+        if "&U" in tree_string:
+            assert not tree.is_rooted
+        else:
+            assert tree.is_rooted
+        for nd in tree:
+            nd.edge.length = 100
+        tree.resolve_polytomies(rng=rng)
+        tree.encode_bipartitions()
+        tree._debug_check_tree(
+                check_bipartitions=True,
+                unique_bipartition_edge_mapping=True)
+        for nd in tree:
+            if nd is tree.seed_node and not tree.is_rooted:
+                self.assertEqual(len(nd._child_nodes), 3)
+            elif len(nd._child_nodes) > 0:
+                self.assertEqual(len(nd._child_nodes), 2)
+        tree2 = dendropy.Tree.get_from_string(tree_string, "newick", taxon_namespace=tree.taxon_namespace)
+        self.assertNotEqual(treecompare.symmetric_difference(tree, tree2), 0)
+        tree.collapse_unweighted_edges()
+        self.assertEqual(treecompare.symmetric_difference(tree, tree2), 0)
+
+    def test_resolve_polytomies_at_root(self):
+        for tree_string in (
+                "(a,b,c,d)e;",
+                ):
+            for rooting in ("[&R]", "[&U]"):
+                tree_string2 = rooting + " " +  tree_string
+                # cycle through rng period
+                self.verify_resolve_polytomies(tree_string2, None)
+                for x in range(1001):
+                    rng = MockRandom()
+                    for i in range(x):
+                        rng.uniform(0, 1)
+                    self.verify_resolve_polytomies(tree_string2, rng)
+
+    def test_resolve_polytomies(self):
+        for tree_string in (
+                "((((Homo:0.21,Bogus1:0.23,Pongo:0.21)N1:0.28,Bogus2:0.49,Macaca:0.49)N2:0.13,Bogus3:0.62,Ateles:0.62)N3:0.38,Galago:1.00)N4:0.0;",
+                "(((t52,t62,(t2,t58,(t32,(t55,t28,t39,t17,t4,t44,t25)internal6,t26,t9,t48,(t41,t45)internal7)internal5,t56)internal4,t54,((t18,t14,t34)internal9,(t49,t22,t50,t27,t16,t40,t6,t19)internal10,t13,(t51,t35,t61,t53,t43)internal11)internal8,((t42,t5,t7,t33,t30,t21,t47,t38)internal13,t23,t1,t11,t46)internal12,(t63,t3,t37,t59)internal14)internal3,t57,t64,t31,(t12,(t60,t24,t10,t15,t20)internal16,t36)internal15,t8)internal2,t29)internal1;",
+                "((t13,t37,t19,((t21,t44)internal4,(t61,t46,t4,t8,t63)internal5,t23,t28)internal3,t52,t64,(t39,t40,t24)internal6,(t54,t62,t15,t55,t51)internal7)internal2,(t3,(t53,t33,(t47,t9,t25)internal10,t45,t18,t27,t17)internal9,(t10,t22)internal11,(t59,t20,t12,t57,t56,t38,t7,t11)internal12,((t1,t31,t43,t36,t34,t14,t49)internal14,t2,t41,t50,((t30,t32,t58,t6)internal16,t60,t16)internal15,t26,t48,t35)internal13,t42,t29,t5)internal8)internal1;",
+                "((t7,t13,(t42,t51,t20,t26,(t21,t18,t16)internal4)internal3,(t6,t48,(t23,t33,t34,t15,t2,t25)internal6,(t64,t45,t49,t3,t55,t31,(t19,t47,t38,t35)internal8,t14)internal7,t9,(t5,(t62,t50)internal10,t54,t32,(t40,t8,t58,t60,t10,t30)internal11)internal9)internal5)internal2,t28,(t4,t57,t52,t43,t46,t39)internal12,t63,t11,(t44,(t12,t22,t36,t29,t24,t1,t17,t56)internal14,(t41,t59,t53,t61)internal15,t37)internal13,t27)internal1;",
+                "(t10,t42,(t54,t12,t40,(t30,(t55,t3,t22,t56)internal4,t39)internal3,t51,((t41,(t28,t52,t24,t14,t49,t38,t36,(t35,t34,t13,t9,t59,t58)internal8)internal7,t5,t45,t17,(t23,(t11,t53,t57,t19,t26)internal10)internal9,t63,(t62,t29,t18,t20,t27,t43)internal11)internal6,t4,(t25,t7,(((t47,t61,t21,t64)internal15,t44,t46,t15,t6,t37,t48)internal14,t50,t60)internal13)internal12)internal5,t31)internal2,t1,t32,t33,(t2,t8)internal16,t16)internal1;",
+                "(t60,(((t29,t35,t5,t59,t4,t9,(t6,t25,t37,t44,t54)internal5)internal4,(t53,t23,t28,t48,((t16,t46,t26,t10,t52)internal8,t30,t17,t51,t40)internal7,t31,t3)internal6,t14,t7,t2,t22,(t45,t56,t20,t36,t43,t47)internal9,t8)internal3,t18,(t39,(t61,t27,t21,t58,t24)internal11,t50,(t64,t38,(t32,t11,t49,t63)internal13)internal12,t13,((t57,t34,t15,t55,t19)internal15,t1,t42)internal14,t33)internal10,(t62,t12,t41)internal16)internal2)internal1;",
+                "((t12,t16,(t51,t24,t27,(t38,t44,t52,t6,t9,t53,t20,t18)internal4,(t31,t47,t56,t60)internal5)internal3,t34,t42)internal2,t5,t62,((t61,t64,t59,t15,t48,t2,t35)internal7,t57,(t14,t28,t40,t22,t58,t7,(t25,t4)internal9,((t8,t29,t21)internal11,t17,t1,t46,(t33,t50,t11)internal12,t13,t45,t37)internal10)internal8,(t26,t10,(t3,t30,t32,(t19,t36,t39,t43)internal15,t23,t54,t55)internal14)internal13,t41,t49,t63)internal6)internal1;",
+                "(((t31,t19,t12,(t55,t25)internal4,t11,t60,(t46,t8,t56)internal5)internal3,(t52,t18)internal6,(((t24,t16,t7)internal9,t57,t40,(t2,t9,t50,t37,t43,t20,t15,t22)internal10,t42,(t47,t28,t58,t10)internal11)internal8,t21,(t14,(t51,t53,t26,t35,t49)internal13,t33,t62,t34,t54,t17)internal12,t5,t61,t4)internal7,t3,t36,t45,t63)internal2,(t27,t32,t30,t48)internal14,(t13,t64,(t23,t59,t41,t38,t44,t1)internal16,t29,t39,t6)internal15)internal1;",
+                "(((t8,t40,t25,t36,t37,t11)internal3,t29,t31,t51,t3,((t61,t43,t63,t50,t23,t52,t24,t30)internal5,t39,t44,t58,((t15,t64,t9,t28,(t5,t34,t38,t22,(t33,t12,t35,t42,t10,t2,t27,t45)internal9)internal8,t7,t21,t60)internal7,t41,(t48,t17,t14)internal10)internal6,t13,t18)internal4)internal2,((t47,t16,t49,(t53,t26)internal13,(t19,t20,(t55,t54)internal15,t32,t4)internal14,t59,t57,t6)internal12,t46,t56,t1)internal11,t62)internal1;",
+                "(t33,t39,t18,t35,((t19,t62,t55,(t41,t14,(t1,t36,t16,t38,t25,t59,t34)internal5)internal4,t61,t50)internal3,(((t2,t48,t22)internal8,t28,t37,t47,(t60,t30)internal9,t27)internal7,t12,(t31,t21,(t3,t5,t45,t53)internal11,(t23,t54,t20,t4,t64,t56,(t58,t13,t26,t11,t57,t44,t42,t46)internal13)internal12,t40)internal10)internal6,t7,(t52,(t43,t24)internal15,(t49,t29,t63,t32)internal16,t9,t6,t8,t15)internal14,t51)internal2,t10,t17)internal1;",
+                "(t35,((t60,t47,t58,t26,t9)internal3,t3,((t13,t25)internal5,t1,((t6,t32,(t53,t7,t64,t34,t18,t23,t30,t33)internal8,t55,(t48,t20,t12,t4,t38,t28,t36)internal9)internal7,t8,((t57,t40,t52,t31,t43,t39)internal11,t59,(t37,t16,t27,(t44,t41,t10)internal13,t50)internal12,t24,t63)internal10,(t5,t56,t61,t29)internal14,t21,t49)internal6)internal4,((t17,t42,t62,t15)internal16,(t19,t2,t51,(t22,t14,(t45,t54,t46)internal19)internal18)internal17,t11)internal15)internal2)internal1;",
+                ):
+            for rooting in ("[&R]", "[&U]"):
+                tree_string2 = rooting + " " +  tree_string
+                for rng in (MockRandom(), None):
+                    self.verify_resolve_polytomies(tree_string2, rng)
+
+class TreeRestructuring(dendropytest.ExtendedTestCase):
+
+    def test_collapse_basal_bifurcation(self):
+        self.assertFalse(self.fail_incomplete_tests())
+
+    def test_reseed_at(self):
+        self.assertFalse(self.fail_incomplete_tests())
+
+    def test_to_outgroup_position(self):
+        self.assertFalse(self.fail_incomplete_tests())
+
+    def test_reroot_at_node(self):
+        self.assertFalse(self.fail_incomplete_tests())
+
+    def test_reroot_at_edge(self):
+        self.assertFalse(self.fail_incomplete_tests())
+
+    def test_reroot_at_midpoint(self):
+        self.assertFalse(self.fail_incomplete_tests())
+
+    def test_suppress_unifurcations(self):
+        self.assertFalse(self.fail_incomplete_tests())
+
+    def test_collapse_unweighted_edges(self):
+        self.assertFalse(self.fail_incomplete_tests())
+
+    def test_resolve_polytomies(self):
+        self.assertFalse(self.fail_incomplete_tests())
+
+    def test_prune_subtree(self):
+        self.assertFalse(self.fail_incomplete_tests())
+
+    def test_prune_leaves_without_taxa(self):
+        self.assertFalse(self.fail_incomplete_tests())
+
+    def test_prune_taxa(self):
+        self.assertFalse(self.fail_incomplete_tests())
+
+    def test_prune_nodes(self):
+        self.assertFalse(self.fail_incomplete_tests())
+
+    def test_prune_taxa_with_labels(self):
+        self.assertFalse(self.fail_incomplete_tests())
+
+    def test_retain_taxa(self):
+        self.assertFalse(self.fail_incomplete_tests())
+
+    def test_retain_taxa_with_labels(self):
+        self.assertFalse(self.fail_incomplete_tests())
+
+    def test_randomly_reorient_tree(self):
+        self.assertFalse(self.fail_incomplete_tests())
+
+    def test_randomly_rotate(self):
+        self.assertFalse(self.fail_incomplete_tests())
+
+    def test_ladderize(self):
+        self.assertFalse(self.fail_incomplete_tests())
+
+    def test_truncate_from_root(self):
+        self.assertFalse(self.fail_incomplete_tests())
+
+    def test_scale_edges(self):
+        self.assertFalse(self.fail_incomplete_tests())
+
+    def test_set_edge_lengths_from_node_ages(self):
+        self.assertFalse(self.fail_incomplete_tests())
+
+if __name__ == "__main__":
+    unittest.main()
+
diff --git a/dendropy/test/test_tree_reconciliation_and_discordance.py b/dendropy/test/test_tree_reconciliation_and_discordance.py
new file mode 100644
index 0000000..738f5bb
--- /dev/null
+++ b/dendropy/test/test_tree_reconciliation_and_discordance.py
@@ -0,0 +1,159 @@
+#! /usr/bin/env python
+
+############################################################################
+##  Part of the DendroPy library for phylogenetic computing.
+##
+##  Copyright 2008 Jeet Sukumaran and Mark T. Holder.
+##
+##  This program is free software; you can redistribute it and/or modify
+##  it under the terms of the GNU General Public License as published by
+##  the Free Software Foundation; either version 3 of the License, or
+##  (at your option) any later version.
+##
+##  This program is distributed in the hope that it will be useful,
+##  but WITHOUT ANY WARRANTY; without even the implied warranty of
+##  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+##  GNU General Public License for more details.
+##
+##  You should have received a copy of the GNU General Public License along
+##  with this program. If not, see <http://www.gnu.org/licenses/>.
+##
+############################################################################
+
+"""
+Tests reconciliation calculations.
+"""
+
+import os
+import unittest
+import dendropy
+from dendropy.test.support import pathmap
+from dendropy.utility.messaging import get_logger
+_LOG = get_logger(__name__)
+
+from dendropy.model import reconcile
+
+class ContainingTreeDeepCoalescenceSmall(unittest.TestCase):
+
+    def setUp(self):
+        dataset = dendropy.DataSet.get_from_path(pathmap.tree_source_path(filename="deepcoal1.nex"), "nexus")
+        self.species_tree = dataset.get_tree_list(label="ContainingTree")[0]
+        self.gene_trees = dataset.get_tree_list(label="EmbeddedTrees")
+        self.species_tree.taxon_namespace.is_mutable = False
+        self.gene_taxon_to_population_taxon_map = dendropy.TaxonNamespaceMapping(
+                domain_taxon_namespace=self.gene_trees.taxon_namespace,
+                range_taxon_namespace=self.species_tree.taxon_namespace,
+                mapping_fn=lambda t: self.species_tree.taxon_namespace.require_taxon(label=t.label[0].upper()))
+        self.expected_under_original_brlens = [4, 6, 4, 2, 4, 3, 3, 4, 5, 4]
+
+    def testFixedEdgesDeepCoalCount(self):
+        results = []
+        for idx, gt in enumerate(self.gene_trees):
+            ct = reconcile.ContainingTree(containing_tree=self.species_tree,
+                    contained_taxon_namespace=self.gene_trees.taxon_namespace,
+                    contained_to_containing_taxon_map=self.gene_taxon_to_population_taxon_map,
+                    contained_trees=[gt],
+                    fit_containing_edge_lengths=False,
+                    )
+            dc = ct.num_deep_coalescences()
+            results.append(dc)
+
+            ## FOR DEBUGGING
+            # mesqf = pathmap.named_output_stream("ContainingTreeDeepCoalescence_Small_FixedEdges_t%02d_dc%02d.nex" % (idx+1, dc), False)
+            # with mesqf:
+            #     ct.write_as_mesquite(mesqf)
+
+        self.assertEqual(results, self.expected_under_original_brlens)
+
+    def testFittedEdgesDeepCoalCount(self):
+        for idx, gt in enumerate(self.gene_trees):
+            gt.encode_bipartitions()
+            ct = reconcile.ContainingTree(containing_tree=self.species_tree,
+                    contained_taxon_namespace=self.gene_trees.taxon_namespace,
+                    contained_to_containing_taxon_map=self.gene_taxon_to_population_taxon_map,
+                    contained_trees=[gt],
+                    fit_containing_edge_lengths=True,
+                    )
+            dc = ct.num_deep_coalescences()
+
+            ## FOR DEBUGGING
+            # mesqf = pathmap.named_output_stream("ContainingTreeDeepCoalescence_Small_FittedEdges_t%02d_dc%02d.nex" % (idx+1, dc), False)
+            # with mesqf:
+            #     ct.write_as_mesquite(mesqf)
+
+class DeepCoalTest(unittest.TestCase):
+
+    def testFittedDeepCoalCounting(self):
+
+        taxa = dendropy.TaxonNamespace()
+
+        gene_trees = dendropy.TreeList.get_from_string("""
+            [&R] (A,(B,(C,D))); [&R] ((A,C),(B,D)); [&R] (C,(A,(B,D)));
+            """, "newick", taxon_namespace=taxa)
+
+        species_trees = dendropy.TreeList.get_from_string("""
+            [&R] (A,(B,(C,D)));
+            [&R] (A,(C,(B,D)));
+            [&R] (A,(D,(C,B)));
+            [&R] (B,(A,(C,D)));
+            [&R] (B,(C,(A,D)));
+            [&R] (B,(D,(C,A)));
+            [&R] (C,(A,(B,D)));
+            [&R] (C,(B,(A,D)));
+            [&R] (C,(D,(B,A)));
+            [&R] (D,(A,(B,C)));
+            [&R] (D,(B,(A,C)));
+            [&R] (D,(C,(B,A)));
+            [&R] ((A,B),(C,D));
+            [&R] ((A,C),(B,D));
+            [&R] ((A,D),(C,B));
+            """, "NEWICK", taxon_namespace=taxa)
+
+        # expected results, for each gene tree / species tree pairing, with
+        # cycling through species trees for each gene tree
+        expected_deep_coalescences = [ 0, 1, 1, 1, 2, 2, 3, 3, 3, 3, 3, 3, 1, 2, 2,
+                                            2, 1, 2, 2, 2, 1, 1, 2, 2, 2, 1, 2, 2, 0, 2,
+                                            2, 1, 2, 3, 3, 3, 0, 1, 1, 3, 3, 3, 2, 1, 2 ]
+        assert len(expected_deep_coalescences) == len(gene_trees) * len(species_trees)
+
+        for t in gene_trees + species_trees:
+            t.update_bipartitions()
+        idx = 0
+        _LOG.info("Species\t\tGene\t\tDC\t\tExp.DC\t\tDiff")
+        for gt in gene_trees:
+            gt.update_bipartitions()
+            for st in species_trees:
+                st.update_bipartitions()
+                dc = reconcile.reconciliation_discordance(gt, st)
+                _LOG.info("%s\t\t%s\t\t%s\t\t%s\t\t%s"
+                    % (st._as_newick_string(),
+                       gt._as_newick_string(),
+                       dc,
+                       expected_deep_coalescences[idx],
+                       dc - expected_deep_coalescences[idx]))
+                assert dc == expected_deep_coalescences[idx]
+                idx += 1
+
+    def testGroupedDeepCoalCounting(self):
+            src_trees = [
+                        "((a1,a2)x,b1)y;",
+                        "((a1, (a2, a3), b1), (b2,(b3,b4)));",
+                        "(((((a1, a2),a3), b1), b2), (b3, ((b4,b5),b6)));",
+                        "((b1, (b2, b3), a1), (a2,(a3, a4)));",
+                        "(((((b1, b2),b3), a1), a2), (a3, ((a4,a5),a6)));",
+                        "((a1,a2),(b1,b2),(c1,c2));",
+                        "((a1,a2),(b1,b2,c3),(c1,c2));",
+                        "(((a1,a2),(b1,b2),c1),c2);",
+                        ]
+            scores = [ 0, 1, 2, 1, 2, 0, 1, 1 ]
+            for src_tree, expected in zip(src_trees, scores):
+                tree = dendropy.Tree.get_from_string(src_tree, "NEWICK")
+                groups = dendropy.TaxonNamespacePartition(tree.taxon_namespace,
+                    membership_fn=lambda x: x.label[0])
+                dc = reconcile.monophyletic_partition_discordance(tree, groups)
+            assert dc == expected, \
+                "deep coalescences by groups: expecting %d, but found %d" % (expected, dc)
+
+if __name__ == "__main__":
+    unittest.main()
+
diff --git a/dendropy/test/test_tree_split_compatibility.py b/dendropy/test/test_tree_split_compatibility.py
new file mode 100644
index 0000000..b117f40
--- /dev/null
+++ b/dendropy/test/test_tree_split_compatibility.py
@@ -0,0 +1,167 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+import unittest
+import dendropy
+import json
+from dendropy.test.support import pathmap
+from dendropy.test.support import dendropytest
+
+def generate_bipartitions(
+        trees,
+        generation_mode="from-trees",
+        is_rooted=None):
+    if is_rooted is None:
+        is_rooted = trees[0].is_rooted
+    if generation_mode == "all":
+        bipartitions = []
+        all_taxa_bitmask = trees.taxon_namespace.all_taxa_bitmask()
+        for s in range(1, all_taxa_bitmask):
+            b = dendropy.Bipartition(
+                    leafset_bitmask=s,
+                    tree_leafset_bitmask=all_taxa_bitmask,
+                    is_rooted=is_rooted,
+                    compile_bipartition=True)
+            bipartitions.append(b)
+    elif generation_mode == "from-trees":
+        bipartitions = []
+        for tree in trees:
+            bipartitions.extend(tree.encode_bipartitions())
+    else:
+        raise ValueError(generation_mode)
+    unique_bipartitions = {}
+    for b in bipartitions:
+        if is_rooted:
+            unique_bipartitions[b.leafset_bitmask] = b
+        else:
+            unique_bipartitions[b.split_bitmask] = b
+    unique_bipartitions_keys = sorted(unique_bipartitions.keys())
+    bipartitions = []
+    for key in unique_bipartitions_keys:
+        unique_bipartitions[key].is_mutable = False
+        bipartitions.append(unique_bipartitions[key])
+    return bipartitions
+
+class TestSplitCompatibility(dendropytest.ExtendedTestCase):
+
+    def test_compatibility(self):
+        regimes = (
+            ("dendropy-test-trees-n12-x2.nexus", "all"),
+            ("dendropy-test-trees-n33-unrooted-x100a.nexus", "from-trees"),
+            ("dendropy-test-trees-n10-rooted-treeshapes.nexus", "all"),
+        )
+        for trees_filename_idx, (trees_filename, bipartition_generation_mode) in enumerate(regimes):
+            trees_filepath = pathmap.tree_source_path(trees_filename)
+            trees = dendropy.TreeList.get_from_path(
+                    trees_filepath,
+                    "nexus",)
+            bipartitions = generate_bipartitions(trees, bipartition_generation_mode, is_rooted=trees[0].is_rooted)
+            # for bipartition1_idx, bipartition1 in enumerate(bipartitions):
+            for bipartition1_idx, bipartition1 in enumerate(bipartitions):
+                for tree_idx, tree in enumerate(trees):
+                    compatible_bipartitions = set()
+                    incompatible_bipartitions = set()
+                    bipartition_encoding = tree.encode_bipartitions()
+                    for biparition2_idx, bipartition2 in enumerate(bipartition_encoding):
+                        if bipartition2.is_compatible_with(bipartition1):
+                            self.assertTrue(bipartition1.is_compatible_with(bipartition2))
+                            compatible_bipartitions.add(bipartition2)
+                        else:
+                            self.assertFalse(bipartition1.is_compatible_with(bipartition2))
+                            incompatible_bipartitions.add(bipartition2)
+                    is_compatible = tree.is_compatible_with_bipartition(bipartition1)
+                    self.assertEqual(len(compatible_bipartitions) + len(incompatible_bipartitions), len(bipartition_encoding))
+                    if is_compatible:
+                        self.assertEqual(len(incompatible_bipartitions), 0,
+                                "Tree {} of '{}': bipartition {} (leafset = {}, index = {}) found compatible with tree, but is incompatible with following bipartitions on tree: {}".
+                                format(
+                                    tree_idx,
+                                    trees_filename,
+                                    bipartition1.split_as_bitstring(),
+                                    bipartition1.leafset_as_bitstring(),
+                                    bipartition1_idx,
+                                    [b.split_as_bitstring() for b in incompatible_bipartitions],
+                                    ))
+                        self.assertEqual(len(compatible_bipartitions), len(bipartition_encoding))
+                    else:
+                        self.assertTrue(len(incompatible_bipartitions) > 0,
+                                "Tree {} of '{}': bipartition {} (leafset = {}, index = {}) found incompatible with tree, but is compatible with all bipartitions on tree: {}".
+                                format(
+                                    tree_idx,
+                                    trees_filename,
+                                    bipartition1.split_as_bitstring(),
+                                    bipartition1.leafset_as_bitstring(),
+                                    bipartition1_idx,
+                                    [b.split_as_bitstring() for b in compatible_bipartitions],
+                                    ))
+
+
+
+    # def test_compatibility(self):
+    #     regime_source_names = (
+    #             ("dendropy-test-trees-n10-rooted-treeshapes.nexus", "dendropy-test-trees-n10-rooted-treeshapes.split-compatibilities-all.json", ),
+    #     )
+    #     for tree_filename, ref_filename in regime_source_names:
+
+    #         with open(pathmap.tree_source_path(ref_filename), "r") as src:
+    #             regimed = json.load(src)
+
+    #         bipartitions = {}
+    #         for bidx in regimed["bipartitions"]:
+    #             bdesc = regimed["bipartitions"][bidx]
+    #             b = dendropy.Bipartition(
+    #                     leafset_bitmask=bdesc["leafset_bitmask"],
+    #                     tree_leafset_bitmask=bdesc["tree_leafset_bitmask"],
+    #                     is_rooted=bdesc["is_rooted"])
+    #             bipartitions[int(bidx)] = b
+    #         bipartition_indexes = sorted(bipartitions.keys())
+
+    #         tree_data = {}
+    #         for tree_idx in regimed["trees"]:
+    #             i = int(tree_idx)
+    #             tree_data[i] = {}
+    #             tree_data[i]["compatible_bipartitions"] = set([int(j) for j in regimed["trees"][tree_idx]["compatible_bipartitions"]])
+    #             tree_data[i]["incompatible_bipartitions"] = set([int(j) for j in regimed["trees"][tree_idx]["incompatible_bipartitions"]])
+
+    #         trees = dendropy.TreeList.get_from_path(
+    #                 pathmap.tree_source_path(tree_filename),
+    #                 "nexus")
+    #         for tree_idx, tree in enumerate(trees):
+    #             tree_bipartitions = tree.encode_bipartitions()
+    #             for bipartition_idx in bipartition_indexes:
+    #                 is_compatible = tree.is_compatible_with_bipartition(bipartition)
+
+    #                 bipartition = bipartitions[bipartition_idx]
+    #                 if bipartition_idx in tree_data[tree_idx]["compatible_bipartitions"]:
+    #                     expected_is_compatible = True
+    #                 elif bipartition_idx in tree_data[tree_idx]["incompatible_bipartitions"]:
+    #                     expected_is_compatible = False
+    #                 else:
+    #                     raise Exception("Bipartition {} ('{}') not reported for tree {}".format(bipartition_idx, bipartition.split_as_bitstring(), tree_idx))
+    #                 self.assertIs(is_compatible, expected_is_compatible,
+    #                         "Bipartition {} ('{}') for tree {} in '{}': expected '{}' for compatibility but instead observed '{}'".format(
+    #                             bipartition_idx,
+    #                             bipartition.split_as_bitstring(),
+    #                             tree_idx,
+    #                             tree_filename,
+    #                             expected_is_compatible,
+    #                             is_compatible,
+    #                             ))
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/dendropy/test/test_tree_summarization_and_consensus.py b/dendropy/test/test_tree_summarization_and_consensus.py
new file mode 100644
index 0000000..83ac93a
--- /dev/null
+++ b/dendropy/test/test_tree_summarization_and_consensus.py
@@ -0,0 +1,384 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Tests of summarization.
+"""
+
+import collections
+import unittest
+import dendropy
+import random
+import itertools
+from dendropy.calculate import treecompare
+from dendropy.test.support import pathmap
+from dendropy.calculate import statistics
+from dendropy.test.support import dendropytest
+
+class TestConsensusTree(unittest.TestCase):
+
+    def setUp(self):
+        self.tree_list = dendropy.TreeList()
+        for t in range(1, 5):
+            tf = pathmap.tree_source_path('pythonidae.mb.run%d.t' % t)
+            self.tree_list.read_from_path(tf,
+                    'nexus',
+                    collection_offset=0,
+                    tree_offset=25)
+        self.mb_con_tree = dendropy.Tree.get_from_path(
+                pathmap.tree_source_path("pythonidae.mb.con"),
+                schema="nexus",
+                taxon_namespace=self.tree_list.taxon_namespace)
+        self.mb_con_tree.encode_bipartitions()
+
+    def testConsensus(self):
+        con_tree = self.tree_list.consensus(
+                min_freq=0.50,
+                is_bipartitions_updated=False,
+                support_label_decimals=2)
+        con_tree.encode_bipartitions()
+        self.assertEqual(treecompare.symmetric_difference(self.mb_con_tree, con_tree), 0)
+        self.assertEqual(len(con_tree.bipartition_encoding), len(self.mb_con_tree.bipartition_encoding))
+        for bipartition in self.mb_con_tree.bipartition_encoding:
+            edge1 = self.mb_con_tree.bipartition_edge_map[bipartition]
+            edge2 = con_tree.bipartition_edge_map[bipartition]
+            if edge1.head_node.label and edge2.head_node.label:
+                s1 = float(edge1.head_node.label)
+                s2 = round(float(edge2.head_node.label), 2)
+                self.assertAlmostEqual(s1, s2, 2)
+
+class TestBasicCredibilityScoring(unittest.TestCase):
+
+    def get_trees(self):
+        trees = dendropy.TreeList.get_from_path(
+                pathmap.tree_source_path("issue_mth_2009-02-03.rooted.nexus"),
+                "nexus")
+        return trees
+
+    def setUp(self):
+        self.trees = self.get_trees()
+
+    def test_product_of_credibilities(self):
+        ta = self.trees.as_tree_array(is_rooted_trees=True)
+        sd = self.get_trees().split_distribution(is_bipartitions_updated=False) # for independent verification
+        scores, max_idx = ta.calculate_log_product_of_split_supports()
+        self.assertEqual(len(scores), len(self.trees))
+        for score, tree in zip(scores, self.trees):
+            self.assertAlmostEqual(score, sd.log_product_of_split_support_on_tree(tree))
+        self.assertEqual(max_idx, 70)
+        self.assertAlmostEqual(scores[max_idx], -33.888380488585284)
+        t0 = self.trees[70]
+        t1 = ta.maximum_product_of_split_support_tree()
+        self.assertEqual(treecompare.symmetric_difference(t0, t1), 0)
+
+    def test_sum_of_credibilities(self):
+        ta = self.trees.as_tree_array(is_rooted_trees=True)
+        sd = self.get_trees().split_distribution(is_bipartitions_updated=False) # for independent verification
+        scores, max_idx = ta.calculate_sum_of_split_supports()
+        self.assertEqual(len(scores), len(self.trees))
+        for score, tree in zip(scores, self.trees):
+            self.assertAlmostEqual(score, sd.sum_of_split_support_on_tree(tree))
+        self.assertEqual(max_idx, 73)
+        self.assertAlmostEqual(scores[max_idx], 30.89)
+        t0 = self.trees[73]
+        t1 = ta.maximum_sum_of_split_support_tree()
+        self.assertEqual(treecompare.symmetric_difference(t0, t1), 0)
+
+    def test_split_distribution_max_sum_of_credibilities(self):
+        sd = self.trees.split_distribution(is_bipartitions_updated=False)
+        t0 = self.trees[73]
+        score = sd.sum_of_split_support_on_tree(t0)
+        self.assertAlmostEqual(score, 30.89)
+
+        # # Best tree: bootrep74 (tree number 74)
+        # # Highest Sum Clade Credibility: 30.89
+        # # Best tree: bootrep74 (tree number 74)
+        # # Highest Sum Clade Credibility: 31.185714285714287
+
+        # scores, max_idx = ta.calculate_log_product_of_split_supports()
+        # self.assertEqual(len(scores), len(self.trees))
+        # self.assertEqual(max_idx, 70)
+        # t1 = ta.maximum_product_of_split_support_tree()
+        # self.assertEqual(treecompare.symmetric_difference(t0, t1), 0)
+        # t1, t2 = tsum.calculate_tree_clade_credibilities(trees=trees)
+        # for t in trees:
+        #     self.assertTrue(hasattr(t, "log_product_of_split_support"))
+        #     self.assertTrue(hasattr(t, "sum_of_split_posteriors"))
+        # self.assertEqual(trees.index(t1), 70)
+        # self.assertAlmostEqual(t1.log_product_of_split_support, -33.888380488585284)
+        # # self.assertAlmostEqual(t1.sum_of_split_posteriors, 85.83000000000001)
+
+    # def test_from_trees_noburnin_max_sum_cc(self):
+        # trees = dendropy.TreeList.get_from_path(
+        #         pathmap.tree_source_path("issue_mth_2009-02-03.rooted.nexus"),
+        #         "nexus")
+        # tsum = treesum.TreeSummarizer()
+        # t1, t2 = tsum.calculate_tree_clade_credibilities(trees=trees)
+        # for t in trees:
+        #     self.assertTrue(hasattr(t, "log_product_of_split_support"))
+        #     self.assertTrue(hasattr(t, "sum_of_split_posteriors"))
+        # self.assertEqual(trees.index(t2), 73)
+        # # self.assertAlmostEqual(t2.log_product_of_split_support, -38.45253940270466)
+        # self.assertAlmostEqual(t2.sum_of_split_posteriors, 89.89000000000001)
+
+    # def test_from_trees_with_burnin_max_product_cc(self):
+        # trees = dendropy.TreeList.get_from_path(
+        #         pathmap.tree_source_path("issue_mth_2009-02-03.rooted.nexus"),
+        #         "nexus")
+        # burnin = 30
+        # tsum = treesum.TreeSummarizer()
+        # t1, t2 = tsum.calculate_tree_clade_credibilities(
+        #         trees=trees,
+        #         burnin=burnin)
+        # for t in trees[burnin:]:
+        #     self.assertTrue(hasattr(t, "log_product_of_split_support"))
+        #     self.assertTrue(hasattr(t, "sum_of_split_posteriors"))
+
+        # # Best tree: bootrep71 (tree number 71)
+        # # Highest Log Clade Credibility: -33.95771606695942
+        # self.assertEqual(trees.index(t1), 70)
+        # self.assertAlmostEqual(t1.log_product_of_split_support, -33.95771606695942)
+        # # self.assertAlmostEqual(t1.sum_of_split_posteriors, 85.98571428571427)
+
+    # def test_from_trees_with_burnin_max_sum_cc(self):
+        # trees = dendropy.TreeList.get_from_path(
+        #         pathmap.tree_source_path("issue_mth_2009-02-03.rooted.nexus"),
+        #         "nexus")
+        # burnin = 30
+        # tsum = treesum.TreeSummarizer()
+        # t1, t2 = tsum.calculate_tree_clade_credibilities(
+        #         trees=trees,
+        #         burnin=burnin)
+        # for t in trees[burnin:]:
+        #     self.assertTrue(hasattr(t, "log_product_of_split_support"))
+        #     self.assertTrue(hasattr(t, "sum_of_split_posteriors"))
+
+        # # Best tree: bootrep74 (tree number 74)
+        # # Highest Sum Clade Credibility: 30.89
+        # # Best tree: bootrep74 (tree number 74)
+        # # Highest Sum Clade Credibility: 31.185714285714287
+        # self.assertTrue(trees.index(t2), 73)
+        # # self.assertAlmostEqual(t2.log_product_of_split_support, -37.912350577390605)
+        # self.assertAlmostEqual(t2.sum_of_split_posteriors, 31.185714285714287)
+
+class TestTreeEdgeSummarization(unittest.TestCase):
+
+    def setUp(self):
+        self.support_trees_path = pathmap.tree_source_path("primates.beast.mcmc.trees")
+        self.target_tree_path = pathmap.tree_source_path("primates.beast.mcct.noedgelens.tree")
+        self.expected_tree_path = pathmap.tree_source_path("primates.beast.mcct.medianh.tre")
+        self.burnin = 40
+
+    def testMeanNodeAgeSummarizationOnMCCT(self):
+        tree_array = dendropy.TreeArray(ignore_node_ages=False)
+        tree_array.read_from_path(
+                self.support_trees_path,
+                "nexus",
+                # colleciton_offset=0,
+                tree_offset=self.burnin,
+                )
+        target_tree = dendropy.Tree.get_from_path(
+                self.target_tree_path,
+                schema="nexus",
+                taxon_namespace=tree_array.taxon_namespace,
+                )
+        tree_array.summarize_splits_on_tree(
+                tree=target_tree,
+                is_bipartitions_updated=False,
+                set_edge_lengths="median-age",
+                )
+        expected_tree = dendropy.Tree.get_from_path(
+                self.expected_tree_path,
+                "nexus",
+                taxon_namespace=tree_array.taxon_namespace)
+        expected_tree.encode_bipartitions()
+        expected_tree.calc_node_ages()
+        self.assertEqual(expected_tree.bipartition_encoding, target_tree.bipartition_encoding)
+        for exp_bipartition in expected_tree.bipartition_encoding:
+            exp_edge = expected_tree.bipartition_edge_map[exp_bipartition]
+            obs_edge = target_tree.bipartition_edge_map[exp_bipartition]
+            self.assertAlmostEqual(obs_edge.head_node.age, exp_edge.head_node.age)
+
+class TestTopologyCounter(dendropytest.ExtendedTestCase):
+
+    def get_regime(self,
+            is_rooted,
+            is_multifurcating,
+            is_weighted,
+            tree_offset=0,
+            taxon_namespace=None,
+            num_trees=500):
+        if taxon_namespace is None:
+            taxon_namespace = dendropy.TaxonNamespace()
+        if is_multifurcating:
+            if is_rooted:
+                tree_filename = "dendropy-test-trees-multifurcating-rooted.nexus"
+            else:
+                tree_filename = "dendropy-test-trees-multifurcating-unrooted.nexus"
+        else:
+            if is_rooted:
+                tree_filename = "dendropy-test-trees-n10-rooted-treeshapes.nexus"
+            else:
+                tree_filename = "dendropy-test-trees-n14-unrooted-treeshapes.nexus"
+        source_trees = dendropy.TreeList.get_from_path(
+                pathmap.tree_source_path(tree_filename),
+                "nexus",
+                taxon_namespace=taxon_namespace)
+        for tree in source_trees:
+            tree.encode_bipartitions()
+            tree.key = frozenset(tree.bipartition_encoding)
+            tree.total_weighted_count = 0.0
+            tree.actual_count = 0
+        # if is_weighted:
+        #     weights = []
+        #     for tree in source_trees:
+        #         w = random.uniform(0.1, 10)
+        #         tree.weight = w
+        #         weights.append(w)
+        # else:
+        #     weights = [1.0 for i in len(source_trees)]
+        test_tree_strings = []
+        total_weight = 0.0
+        while len(test_tree_strings) < num_trees:
+            tree = random.choice(source_trees)
+            if len(test_tree_strings) >= tree_offset:
+                tree.actual_count += 1
+            if is_weighted:
+                weight = random.choice([0.25, 1.0, 2.8, 5.6, 11.0,])
+                tree.weight = weight
+                if len(test_tree_strings) >= tree_offset:
+                    tree.total_weighted_count += weight
+                    total_weight += weight
+            else:
+                tree.weight = None
+                if len(test_tree_strings) >= tree_offset:
+                    tree.total_weighted_count += 1.0
+                    total_weight += 1.0
+            for nd in tree:
+                nd.edge.length = random.uniform(0, 100)
+            test_tree_strings.append(tree.as_string(
+                schema="newick",
+                store_tree_weights=is_weighted,
+                suppress_edge_lengths=False,
+                suppress_internal_node_labels=True,
+                suppress_internal_taxon_labels=True,
+                ))
+        test_trees_string = "\n".join(test_tree_strings)
+        bipartition_encoding_freqs = {}
+        source_trees.total_weight = total_weight
+        for tree in source_trees:
+            tree.frequency = float(tree.total_weighted_count) / total_weight
+            bipartition_encoding_freqs[tree.key] = tree.frequency
+        return source_trees, bipartition_encoding_freqs, test_trees_string
+
+    def testVariants(self):
+        for tree_offset, is_weighted, is_multifurcating, is_rooted in itertools.product( (100,), (False, True, ), (False, True, ), (False, True, ),  ):
+        # for tree_offset, is_weighted, is_multifurcating, is_rooted in itertools.product( (0, 100), (True,), (False,), (False,),  ):
+            # print("is_rooted: {is_rooted}, is_multifurcating: {is_multifurcating}, is_weighted: {is_weighted}, tree_offset: {tree_offset}".format(
+            #     is_rooted=is_rooted,
+            #     is_multifurcating=is_multifurcating,
+            #     is_weighted=is_weighted,
+            #     tree_offset=tree_offset))
+            source_trees, bipartition_encoding_freqs, test_trees_string = self.get_regime(
+                    is_rooted=is_rooted,
+                    is_multifurcating=is_multifurcating,
+                    is_weighted=is_weighted,
+                    tree_offset=tree_offset)
+            ta = dendropy.TreeArray(
+                    is_rooted_trees=is_rooted,
+                    use_tree_weights=is_weighted,
+                    taxon_namespace=source_trees.taxon_namespace,
+                    )
+            ta.read_from_string(
+                    test_trees_string,
+                    "newick",
+                    tree_offset=tree_offset,
+                    store_tree_weights=is_weighted)
+            be_to_tree = {}
+            for tree in source_trees:
+                be_to_tree[tree.key] = tree
+            topologies = ta.topologies()
+            for tree in topologies:
+                b = frozenset(tree.encode_bipartitions())
+                # stree = be_to_tree[b]
+                # print("{} ({}): {}".format(
+                #     calculated_topology_freqs[tree],
+                #     ta._split_distribution.calc_normalization_weight(),
+                #     (   bipartition_encoding_freqs[b],
+                #         stree.actual_count,
+                #         stree.total_weighted_count,
+                #         source_trees.total_weight,
+                #         stree.frequency,
+                #         stree.total_weighted_count / source_trees.total_weight,
+                #     )))
+                self.assertAlmostEqual(
+                        tree.frequency,
+                        bipartition_encoding_freqs[b])
+
+            calculated_bipartition_encoding_freqs = ta.bipartition_encoding_frequencies()
+            for tree in source_trees:
+                # if tree.key not in calculated_bipartition_encoding_freqs:
+                #     print(tree.actual_count)
+                #     print(tree.total_weighted_count)
+                #     print(tree.frequency)
+                # f1 = bipartition_encoding_freqs[tree.key]
+                # f2 = calculated_bipartition_encoding_freqs[tree.key]
+                # self.assertAlmostEqual(f1,f2)
+                if tree.actual_count == 0:
+                    if tree.key in calculated_bipartition_encoding_freqs:
+                        self.assertAlmostEqual(calculated_bipartition_encoding_freqs[tree.key], 0)
+                else:
+                    # self.assertIn(tree.key, calculated_bipartition_encoding_freqs)
+                    f1 = bipartition_encoding_freqs[tree.key]
+                    f2 = calculated_bipartition_encoding_freqs[tree.key]
+                    self.assertAlmostEqual(f1,f2)
+
+    def testSimple(self):
+        self.taxon_namespace = dendropy.TaxonNamespace()
+        tree1_str = "[&U] (A,(B,(C,(D,E))));"
+        tree2_str = "[&U] (B,(C,(D,(A,E))));"
+        tree3_str = "[&U] (D,(A,(B,(C,E))));"
+        tree4_str = "[&U] (C,(D,(A,(B,E))));"
+        tree5_str = "[&U] (A,(E,(B,(C,D))));"
+        all_tree_strs = [tree1_str, tree2_str, tree3_str, tree4_str, tree5_str]
+        weights = [8, 5, 4, 2, 1]
+        test_tree_strs = []
+        for idx, tree_str in enumerate(all_tree_strs):
+            test_tree_strs.extend([tree_str] * weights[idx])
+        test_trees = dendropy.TreeList.get_from_string(
+                "\n".join(test_tree_strs),
+                'newick',
+                taxon_namespace=self.taxon_namespace)
+        # expected_freq_values = [float(i)/sum(weights) for i in weights]
+        expected_trees = dendropy.TreeList.get_from_string(
+                "\n".join(all_tree_strs),
+                'newick',
+                taxon_namespace=self.taxon_namespace)
+        expected_freqs = {}
+        for idx, tree in enumerate(expected_trees):
+            b = frozenset(tree.encode_bipartitions())
+            expected_freqs[b] = float(weights[idx])/sum(weights)
+        ta = test_trees.as_tree_array()
+        topologies = ta.topologies()
+        self.assertEqual(len(topologies), len(expected_freqs))
+        for tree in topologies:
+            b = frozenset(tree.encode_bipartitions())
+            self.assertAlmostEqual(tree.frequency, expected_freqs[b])
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/dendropy/utility/__init__.py b/dendropy/utility/__init__.py
new file mode 100644
index 0000000..345d6ea
--- /dev/null
+++ b/dendropy/utility/__init__.py
@@ -0,0 +1,23 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+import random
+import os
+
+GLOBAL_RNG = random.Random()
+
diff --git a/dendropy/utility/bibtex.py b/dendropy/utility/bibtex.py
new file mode 100644
index 0000000..71dd7aa
--- /dev/null
+++ b/dendropy/utility/bibtex.py
@@ -0,0 +1,326 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+BibTeX interface.
+"""
+
+import re
+import textwrap
+import sys
+from dendropy.utility.container import OrderedCaselessDict
+
+# default order of fields
+BIBTEX_FIELDS = [
+    'author',
+    'year',
+    'title',
+    'journal',
+    'volume',
+    'number',
+    'editor',
+    'booktitle',
+    'series',
+    'chapter',
+    'pages',
+    'publisher',
+    'institution',
+    'address',
+    'annote',
+    'edition',
+    'howpublished',
+    'key',
+    'month',
+    'organization',
+    'school',
+    'note',
+    'abstract',
+    'keywords',
+    'localfile',
+    ]
+
+def _clean_parsed_text(text):
+    """
+    Strips outer quotes, curly braces, etc.; remove multiple
+    consecutive whitespaces, etc.
+    """
+    if text.startswith('{') and text.endswith('}'):
+        text = text[1:-1]
+    elif text.startswith('"') and text.endswith('"'):
+        text = text[1:-1]
+    text = re.sub("[\s]+", " ", text).strip()
+    return text
+
+def _format_bibtex_value(text, col_start=1, wrap=True, width=78):
+    """
+    Formats text of a BibTeX field.
+    """
+    ftext = re.sub("[\s]+", " ", text).strip()
+    col_indent = " " * col_start
+    if not ftext[0].isdigit():
+        if wrap:
+            initial_indent = '{'
+            subsequent_indent = ' '
+        else:
+            ftext = '{' + ftext
+        ftext = ftext + '}'
+    else:
+        initial_indent = ''
+        subsequent_indent = ''
+    if wrap:
+        wrapped = textwrap.wrap(ftext,
+                                width=width,
+                                initial_indent=initial_indent,
+                                subsequent_indent=subsequent_indent)
+        for index, line in enumerate(wrapped[1:]):
+            wrapped[index+1] = (" " * col_start) + wrapped[index+1]
+        return '\n'.join(wrapped)
+    else:
+        return ftext
+
+class BibTexEntry(object):
+    """
+    Tracks a single BibTeX entry.
+    """
+    decompose_pattern = re.compile(r'^@(\w*)\s*{\s*([\w|\:|\-]*),(.*)}')
+    # works, but misses last field
+    field_pattern = re.compile(r'\s*([\w|\-]*?)\s*=\s*(.*?),(?=\s*[\w|\-]*\s*\=)')
+    # get the last field
+    last_field_pattern = re.compile(r'\s*([\w|\-]*?)\s*=\s*(.*?)\s*[,]*\s*$')
+
+    def __init__(self, citation=None):
+        """
+        Sets up internal dictionary of BibTeX fields, and initializes
+        if argument is given.
+        """
+        self.bibtype = None
+        self.citekey = None
+        if isinstance(citation, BibTexEntry):
+            self._entry_dict = OrderedCaselessDict(citation._entry_dict)
+        elif isinstance(citation, dict):
+            self._entry_dict = OrderedCaselessDict()
+            for k, v in citation.items():
+                self._entry_dict[k.lower()] = v
+            self.bibtype = self._entry_dict.get("bibtype", None)
+            self.citekey = self._entry_dict.get("citekey", None)
+        else:
+            self._entry_dict = OrderedCaselessDict()
+            self.parse_text(citation)
+
+    def __getattr__(self, name):
+        """
+        Allows bibtex fields (and any additional ones) to be treated
+        like object attributes.
+        """
+        entry_dict = self._get_entry_dict()
+        if name == '_entry_dict' or name == '_BibTexEntry_entry_dict':
+            return entry_dict
+        elif name == '__dict__':
+            return object.__getattribute__(self, '__dict__')
+        elif name == 'bibtype' and hasattr(self, 'bibtype'):
+            return object.__getattribute__(self, '__dict__')['bibtype']
+        elif name == 'citekey' and hasattr(self, 'citekey'):
+            return object.__getattribute__(self, '__dict__')['citekey']
+        elif name in entry_dict:
+            return entry_dict[name]
+        elif name in BIBTEX_FIELDS:
+            return ""
+        else:
+            raise AttributeError(name)
+
+    def __setattr__(self, name, value):
+        """
+        Allows bibtex fields (and any additional ones) to be treated
+        like object attributes.
+        """
+        entry_dict = self._get_entry_dict()
+        if name == '_entry_dict' or name == '_BibTexEntry_entry_dict':
+            entry_dict = value
+        elif name == 'bibtype' or name == 'citekey':
+            object.__setattr__(self, name, value)
+        else:
+            self._entry_dict[name] = value
+
+    def __delattr__(self, name):
+        """
+        Allows bibtex fields (and any additional ones) to be treated
+        like object attributes.
+        """
+        entry_dict = self._get_entry_dict()
+        if name == '_entry_dict' or name == '_BibTexEntry_entry_dict':
+            object.__delattr__(self, '_entry_dict')
+        elif name in entry_dict:
+            del(entry_dict[name])
+        elif name in BIBTEX_FIELDS:
+            pass
+        elif name in object.__getattribute__(self, '__dict__'):
+            object.__delattr__(name)
+        else:
+            raise AttributeError(name)
+
+    def __str__(self):
+        """
+        String representation of self.
+        """
+        return self.as_bibtex()
+
+    def __repr__(self):
+        """
+        Internal representation of self.
+        """
+        repr_dict = {}
+        repr_dict['bibtype'] = self.bibtype
+        repr_dict['citekey'] = self.citekey
+        repr_dict.update(self.fields_as_dict())
+        return repr_dict
+
+    def _get_entry_dict(self):
+        """
+        Returns the internal field dictionary, creating it first if
+        neccessary.
+        """
+        if not hasattr(self, '_entry_dict'):
+            object.__setattr__(self, '_entry_dict', {})
+        return object.__getattribute__(self, '_entry_dict')
+
+    def _get_fields(self):
+        """
+        Returns list of populated fields in order (does not include
+        bibtype and citekey).
+        """
+        fields = []
+        for field in BIBTEX_FIELDS:
+            if field in self._entry_dict:
+                fields.append(field)
+        for key in self._entry_dict:
+            if key not in fields:
+                fields.append(key)
+        return fields
+
+    fields = property(_get_fields)
+
+    def parse_text(self, text):
+        """
+        Parses a BibTeX text entry.
+        """
+        text = text.replace("\n", "")
+        self.bibtype = None
+        self.citekey = None
+        text = text.strip()
+        decompose_match = self.decompose_pattern.match(text)
+        try:
+            self.bibtype = decompose_match.group(1)
+        except AttributeError as exception:
+            raise ValueError("Failed to parse bibtype: {}".format(text))
+        try:
+            self.citekey = decompose_match.group(2)
+        except AttributeError as exception:
+            raise ValueError("Failed to parse citekey: {}".format(text))
+        remaining = decompose_match.group(3)
+        field_match = self.field_pattern.match(remaining)
+        while field_match:
+            field_match = self.field_pattern.match(remaining)
+            if field_match:
+                field_name = field_match.group(1).lower()
+                field_value = _clean_parsed_text(field_match.group(2))
+                self._entry_dict[field_name] = field_value
+                remaining = remaining.replace(field_match.group(), '')
+        if remaining:
+            last_field_match = self.last_field_pattern.match(remaining)
+        if last_field_match:
+            field_name = last_field_match.group(1).lower()
+            field_value = _clean_parsed_text(last_field_match.group(2))
+            self._entry_dict[field_name] = field_value
+
+    def fields_as_dict(self):
+        """
+        Returns the fields (i.e., all public attributes except for
+        bibtype and citekey as a dictionary).
+        """
+        return dict(self._entry_dict)
+
+    def as_bibtex(self, wrap_width=78):
+        """
+        Composes entry in BibTex format.
+        """
+        entry = []
+        sep = "  =  "
+        entry.append('@{}{{},'.format((self.bibtype, self.citekey)))
+        fields = self.fields
+#         maxlen = max([len(field) for field in fields])
+        maxlen = max([len(field) for field in BIBTEX_FIELDS])
+        for field in fields:
+            if field != 'url':
+                wrap = True
+            else:
+                wrap = False
+            field_header = field.ljust(maxlen)
+            field_value = _format_bibtex_value(self._entry_dict[field],
+                                      wrap=wrap,
+                                      width = wrap_width - maxlen - len(sep) + 2,
+                                      col_start = maxlen + len(sep) + 2 )
+            entry.append("  {}{}{},".format((field_header, sep, field_value)))
+        entry.append('}')
+        return '\n'.join(entry)
+
+    def as_compact_bibtex(self):
+        """
+        Composes entry in BibTex format.
+        """
+        entry = []
+        entry.append('@{}{{{},'.format((self.bibtype, self.citekey)))
+        fields = self.fields
+        for field in fields:
+            field_value = _format_bibtex_value(self._entry_dict[field],
+                                      wrap=False,
+                                      width=None,
+                                      col_start=1)
+            entry.append("{}={},".format((field, field_value)))
+        entry.append('}')
+        return ''.join(entry)
+
+class BibTexDb(object):
+    """
+    A BibTeX database.
+    """
+
+    def __init__(self, bibfilepath=None):
+        """
+        Initializes database, optionally from file.
+        """
+        self.entries = []
+        if bibfilepath:
+           self.add_from_file(bibfilepath)
+
+    def add_from_file(self, filepath):
+        """
+        Reads and loads a BibTeX database file.
+        """
+        bibfile = open(filepath, 'r')
+        self.add_from_text(bibfile.read())
+        bibfile.close()
+
+    def add_from_text(self, text):
+        """
+        Loads from text.
+        """
+        text = text.replace('\n','')
+        entry_pattern = re.compile(r'@\w*{([^{}]+{[^{}]*({[^{}]*}[^{}]*)*})+}')
+        for match in entry_pattern.finditer(text):
+            entry = BibTexEntry(match.group())
+            self.entries.append(entry)
diff --git a/dendropy/utility/bitprocessing.py b/dendropy/utility/bitprocessing.py
new file mode 100644
index 0000000..72db68c
--- /dev/null
+++ b/dendropy/utility/bitprocessing.py
@@ -0,0 +1,95 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Various bitwise utilities.
+"""
+
+import sys
+
+if sys.hexversion >= 0x03010000:
+    def bit_length(n):
+        """
+        Return the number of bits necessary to represent an integer in binary,
+        excluding the sign and leading zeros. Also can be used to determine the
+        index of the highest set bit, or the width of the bitstring
+        representing the integer.
+        """
+        try:
+            return n.bit_length()
+        except AttributeError:
+            return 0
+else:
+    def bit_length(n):
+        """
+        Return the number of bits necessary to represent an integer in binary,
+        excluding the sign and leading zeros. Also can be used to determine the
+        index of the highest set bit, or the width of the bitstring
+        representing the integer.
+        """
+        s = bin(n)          # binary representation:  bin(-37) --> '-0b100101'
+        s = s.lstrip('-0b') # remove leading zeros and minus sign
+        return len(s)       # len('100101') --> 6
+
+def int_as_bitstring(n, length=None, symbol0=None, symbol1=None, reverse=False):
+    if length is None:
+        length = bit_length(n)
+    s = bin(n)[2:].rjust(length, "0")
+    if symbol0 is not None:
+        s = s.replace("0", symbol0)
+    if symbol1 is not None:
+        s = s.replace("1", symbol1)
+    if reverse:
+        return s[::-1]
+    else:
+        return s
+
+def num_set_bits(n):
+    return bin(n).count("1")
+
+def least_significant_set_bit(n):
+    """
+    Returns least-significant bit in integer 'n' that is set.
+    """
+    m = n & (n - 1)
+    return m ^ n
+
+def indexes_of_set_bits(s, fill_bitmask=-1, one_based=False, ordination_in_mask=False):
+    return [i for i in iter_set_bit_indexes(s, fill_bitmask, one_based, ordination_in_mask)]
+
+def iter_set_bit_indexes(s, fill_bitmask=-1, one_based=False, ordination_in_mask=False):
+    """
+    Returns the index of each bit that is on in ``s`` and the ``fill_bitmask``
+
+        If 'one_based` is True then the 0x01 bit is returned as 1 instead of 0.
+        If ``ordination_in_mask`` is True then the indices returned will be the
+            count of the 1's in the fill_bitmask that are to the right of the bit rather
+            than the total number of digits to the right of the bit. Thus, the
+            index will be the index in a taxon block that is the subset of the
+            full set of taxa).
+    """
+    currBitIndex = one_based and 1 or 0
+    test_bit = 1
+    maskedSplitRep = s & fill_bitmask
+    standard_ordination = not ordination_in_mask
+    while test_bit <= maskedSplitRep:
+        if maskedSplitRep & test_bit:
+            yield currBitIndex
+        if standard_ordination or (fill_bitmask & test_bit):
+            currBitIndex += 1
+        test_bit <<= 1
diff --git a/dendropy/utility/cli.py b/dendropy/utility/cli.py
new file mode 100644
index 0000000..1cb0ec7
--- /dev/null
+++ b/dendropy/utility/cli.py
@@ -0,0 +1,240 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Support for CLI operations.
+"""
+
+import re
+import argparse
+import os
+import sys
+if sys.hexversion < 0x03000000:
+    input_str = raw_input
+else:
+    input_str = input
+import textwrap
+
+import dendropy
+
+def confirm_overwrite(filepath,
+                      replace_without_asking=False,
+                      file_desc="Output",
+                      out=sys.stdout):
+    if os.path.realpath(filepath) == os.devnull:
+        return True
+    if os.path.exists(filepath):
+        if replace_without_asking:
+            overwrite = 'y'
+        else:
+            out.write('%s file already exists: "%s"\n' % (file_desc, filepath))
+            overwrite = input_str("Overwrite (y/N)? ")
+        if not overwrite.lower().startswith("y"):
+            return False
+        else:
+            return True
+    else:
+        return True
+
+def show_splash(
+        prog_name,
+        prog_subtitle,
+        prog_version,
+        prog_author,
+        prog_copyright,
+        include_citation=True,
+        include_copyright=False,
+        additional_citations=None,
+        width=76,
+        dest=sys.stderr,
+        ):
+    wrap_width = width - 2
+    dendropy_description = dendropy.name()
+    lines = []
+    lines.append("^"+prog_name)
+    lines.append("^"+prog_subtitle)
+    lines.append("^Version {}".format(prog_version))
+    lines.append("^By {}".format(prog_author))
+    lines.append("^Using: {}".format(dendropy_description))
+    if include_copyright:
+        copyright_lines = []
+        copyright_lines.append("{sub_bar1}")
+        copyright_text = textwrap.wrap(
+                prog_copyright,
+                width=wrap_width,
+                )
+        copyright_lines.extend(copyright_text)
+        lines.extend(copyright_lines)
+    if include_citation:
+        lines.append("{sub_bar1}")
+        lines.append("^Citation")
+        lines.append("^~~~~~~~~")
+        lines.extend(compose_citation_for_program(
+            prog_name=prog_name,
+            prog_version=prog_version,
+            additional_citations=additional_citations,
+            dendropy_description=dendropy_description,
+            width=wrap_width-2,
+            ))
+    max_splash_text_width = max(len(i) for i in lines)
+    top_bar = "/{}\\".format("=" * (max_splash_text_width + 2))
+    bottom_bar = "\\{}/".format("=" * (max_splash_text_width + 2))
+    sub_bar1 = "-" * (max_splash_text_width + 2)
+    dest.write(top_bar + "\n")
+    for line in lines:
+        if line == "{sub_bar1}":
+            dest.write("+{}+\n".format(sub_bar1))
+        else:
+            if line.startswith("^"):
+                line = line[1:]
+                align_char = "^"
+            else:
+                align_char = "<"
+            dest.write("| {:{align_char}{width}} |\n".format(line, align_char=align_char, width=max_splash_text_width))
+    dest.write(bottom_bar + "\n")
+
+def compose_citation_for_program(
+        prog_name,
+        prog_version,
+        additional_citations=None,
+        dendropy_description=None,
+        width=70,
+        include_preamble=True,
+        include_epilog=True):
+    if dendropy_description is None:
+        dendropy_description = dendropy.description()
+    citation_lines = []
+    citation_lines.extend(dendropy.citation_info(include_preamble=include_preamble, width=width))
+    if additional_citations:
+        # citation_lines.append("")
+        # citation_lines.append("You should also cite the following")
+        for additional_citation in additional_citations:
+            citation_lines.append("")
+            c = textwrap.wrap(
+                    additional_citation,
+                    width=width,
+                    initial_indent="  ",
+                    subsequent_indent="    ",
+                    )
+            citation_lines.extend(c)
+    if include_epilog:
+        citation_lines.append("")
+        extra = (
+                "Note that, in the interests of scientific reproducibility, you "
+                "should describe in the text of your publications not only the "
+                "specific version of the {prog_name} program, but also the "
+                "DendroPy library used in your analysis. "
+                "For your information, you are running {dendropy_desc}."
+                ).format( prog_name=prog_name,
+                        prog_version=prog_version,
+                        dendropy_desc=dendropy_description,
+                        python_version=sys.version)
+        citation_lines.extend(textwrap.wrap(extra))
+    return citation_lines
+
+# from http://stackoverflow.com/a/22157136/268330
+class CustomFormatter(argparse.HelpFormatter):
+
+    """
+    Provides special help formatting:
+
+        (1) Any format help text that begins with the token '<pre>' (for
+            'pre-formatted') will not be reformatted, e.g.::
+
+            parser.add_argument("-f", "--frobble",
+                help="\n".join([
+                "<pre>Sets the frobble specification:",
+                "",
+                "(1) 'foo bar'",
+                "        bar foo bar foo bar foo bar foo bar",
+                "        bar foo bar foo bar foo bar foo bar",
+                "(1) 'foo baz'",
+                "        baz foo baz foo baz foo baz foo baz",
+                "        baz foo baz foo baz foo baz foo baz",
+                "(1) 'foo boo'",
+                "        boo foo boo foo boo foo boo foo boo",
+                "        boo foo boo foo boo foo boo foo boo",
+                ]))
+
+        (2) A definition list:
+
+            parser.add_argument("-f", "--frobble",
+                help=CustomFormatter.format_definition_list_help(
+                    preamble="Sets the frobble specification:"
+                    definitions=(
+                        ('foo-bar', "bar foo bar foo bar foo bar foo bar"),
+                        ('foo-baz', "baz foo baz foo baz foo baz foo baz"),
+                        ('foo-boo', "boo foo boo foo boo foo boo foo boo"),
+                        )
+                    coda="And so, have a nice day."))
+    """
+
+    def _split_lines(self, text, width):
+       # this is the RawTextHelpFormatter._split_lines
+       if text.startswith('<pre>'):
+           return text[5:].splitlines()
+       return argparse.HelpFormatter._split_lines(self, text, width)
+
+    @staticmethod
+    def format_definition_list_help(
+           preamble,
+           definitions,
+           coda=None):
+        parts = []
+        if preamble:
+            if preamble.startswith("<pre>"):
+                preamble = preamble
+            else:
+                preamble = re.sub(r'\s+', ' ', preamble)
+                preamble = "<pre>" + textwrap.fill(preamble, width=54)
+            parts.append(preamble)
+        else:
+            parts.append("<pre>")
+        for term, term_definition in definitions:
+            parts.append("- {}".format(term))
+            body = textwrap.fill(
+                    re.sub(r'\s+', ' ', term_definition),
+                    width=54,
+                    initial_indent="      ",
+                    subsequent_indent="      ",
+                    replace_whitespace=True,
+                    drop_whitespace=True,
+                    )
+            parts.append(body)
+        if coda:
+            if coda.startswith("<pre>"):
+                coda = coda[5:]
+            else:
+                coda = re.sub(r'\s+', ' ', coda)
+                coda = textwrap.fill(coda, width=54)
+            parts.append(coda)
+        return "\n".join(parts)
+
+    # for debugging: prints the name of the argument being processed
+    # def _format_action_invocation(self, *args, **kwargs):
+    #     s = argparse.HelpFormatter._format_action_invocation(self, *args, **kwargs)
+    #     print(s)
+    #     return s
+
+    # Wrap long invocation
+    # def _format_action_invocation(self, *args, **kwargs):
+    #     s = argparse.HelpFormatter._format_action_invocation(self, *args, **kwargs)
+    #     if len(s) > 72:
+    #         parts = s.split(", -")
+    #         s = "\n  -".join(parts)
+    #     return s
diff --git a/dendropy/utility/constants.py b/dendropy/utility/constants.py
new file mode 100644
index 0000000..0c5d7f9
--- /dev/null
+++ b/dendropy/utility/constants.py
@@ -0,0 +1,22 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+import decimal
+
+GREATER_THAN_HALF = float(decimal.Decimal(0.5).next_plus())
+DEFAULT_ULTRAMETRICITY_PRECISION=1e-5
diff --git a/dendropy/utility/container.py b/dendropy/utility/container.py
new file mode 100644
index 0000000..aa80c17
--- /dev/null
+++ b/dendropy/utility/container.py
@@ -0,0 +1,912 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Various data structures.
+"""
+
+import collections
+import copy
+import sys
+
+###############################################################################
+## OrderedSet
+
+class OrderedSet(object):
+
+    """
+    Ordered collection of unique objects with O(1) look-ups, addition, and
+    deletion.
+    """
+
+    def __init__(self, *args):
+        self._item_list = []
+        self._item_set = set()
+        if len(args) > 1:
+            raise TypeError("OrderedSet expected at most 1 arguments, got 2")
+        elif len(args) == 1:
+            for a in args[0]:
+                if a not in self._item_set:
+                    self._item_set.add(a)
+                    self._item_list.append(a)
+
+    def __copy__(self, memo=None):
+        o = OrderedSet(self._item_list)
+        return o
+
+    def __deepcopy__(self, memo=None):
+        other = self.__class__()
+        memo[id(self)] = other
+        memo[id(self._item_set)] = other._item_set
+        memo[id(self._item_list)] = other._item_list
+        for item in self._item_list:
+            c = copy.deepcopy(item, memo)
+            memo[id(item)] = c
+            other._item_set.add(c)
+            other._item_list.append(c)
+        for k in self.__dict__:
+            if k in other.__dict__:
+                continue
+            other.__dict__[k] = copy.deepcopy(self.__dict__[k], memo)
+            memo[id(self.__dict__[k])] = other.__dict__[k]
+        return other
+
+    def __len__(self):
+        return len(self._item_list)
+
+    def __getitem__(self, index):
+        """
+        Returns value at ``index``.
+        Note takes *index* of than value as key.
+        """
+        return self._item_list[index]
+
+    def __setitem__(self, index, value):
+        """
+        Sets value at ``index``.
+        Note takes *index* of than value as key.
+        """
+        item = self._item_list[index]
+        self._item_set.remove(item)
+        self._item_set.add(value)
+        self._item_list[index] = value
+
+    def __delitem__(self, index):
+        """
+        Deletes value at ``index``.
+        Note takes *index* of than value as key.
+        """
+        self._item_set.remove(self._item_list[index])
+        del self._item_list[index]
+
+    def discard(self, key):
+        """
+        Deletes value of ``key`` from ``self``.
+        No error if no value of ``key`` is not in ``self``.
+        """
+        if key in self._item_set:
+            self._item_set.remove(key)
+            self._item_list.remove(key)
+
+    def remove(self, key):
+        """
+        Deletes value of ``key`` from ``self``.
+        KeyErrorif no value of ``key`` is not in ``self``.
+        """
+        self._item_set.remove(key)
+        self._item_list.remove(key)
+
+    def __iter__(self):
+        """
+        Returns iterator over values in ``self``.
+        """
+        return iter(self._item_list)
+
+    def next(self):
+        """
+        Returns iterator over values in ``self``.
+        """
+        return self.__iter__()
+
+    def __reversed__(self):
+        """
+        Returns `OrderedSet` with values in reversed order.
+        """
+        return OrderedSet(reversed(self._item_list))
+
+    def __add__(self, other):
+        """
+        Returns `OrderedSet` consisting of union of values in ``self``
+        and ``other``.
+        """
+        v = self._item_list + other._item_list
+        return OrderedSet(v)
+
+    def index(self, value):
+        """
+        Returns index of element with value of ``value``.
+        """
+        return self._item_list.index(value)
+
+    def __contains__(self, value):
+        """
+        Returns `True` if ``value`` is in ``self`` or `False` otherwise.
+        """
+        return value in self._item_set
+
+    def add(self, value):
+        """
+        Adds a new element, ``value``, to ``self`` if ``value`` is not already in
+        ``self``.
+        """
+        if value not in self._item_set:
+            self._item_set.add(value)
+            self._item_list.append(value)
+            return value
+        else:
+            return None
+
+    def update(self, other):
+        """
+        Updates ``self`` with values in ``other`` for each value in ``other`` that is
+        not already in ``self``.
+        """
+        for i in other:
+            if i not in self._item_set:
+                self._item_set.add(i)
+                self._item_list.append(i)
+
+    def __str__(self):
+        return "[{}]".format(", ".join([str(i) for i in self._item_list]))
+
+    def __repr__(self):
+        return "{}([{}])".format(self.__class__.__name__,
+            ", ".join([str(i) for i in self._item_list]))
+
+    def __hash__(self):
+        return id(self)
+    #     return hash( (t for t in self._item_list) )
+
+    def __eq__(self, o):
+        return self._item_list == o._item_list
+
+    def __lt__(self, o):
+        return self._item_list < o._item_list
+
+    def pop(self, last=True):
+        """
+        Removes and return value in ``self``.
+        By default, removes last value.
+        """
+        if not self._item_set:
+            raise KeyError('OrderedSet is empty')
+        if last:
+            key = self._item_list[-1]
+        else:
+            key = self._item_list[0]
+        self.discard(key)
+        return key
+
+    def clear(self):
+        self._item_set = set()
+        self._item_list = []
+
+###############################################################################
+## NormalizedBitmaskDict
+
+class NormalizedBitmaskDict(collections.OrderedDict):
+    """
+    Keys, {K_i}, are longs. ``fill_bitmask`` must be provided before elements can be
+    added removed from dictionary. All keys are normalized such that the
+    least- significant bit is '0'. That is, if the key's least-significant bit
+    is '0', it is added as-is, otherwise it is complemented by XOR'ing it with
+    'fill_bitmask'.
+    """
+
+    def least_significant_set_bit(s):
+        """
+        Returns least-significant bit in integer 's' that is set.
+        """
+        m = s & (s - 1)
+        return m ^ s
+    least_significant_set_bit = staticmethod(least_significant_set_bit)
+
+    # this is for the least-significant-bit-is-1 normalization convention
+    # def normalize(key, fill_bitmask, lowest_relevant_bit):
+    #     if key & lowest_relevant_bit:
+    #         return key & fill_bitmask                # keep least-significant bit to 1
+    #     else:
+    #         return (~key) & fill_bitmask             # force least-significant bit as 1
+    # normalize = staticmethod(normalize)
+
+    # this is for the least-significant-bit-is-0 normalization convention
+    def normalize(key, fill_bitmask, lowest_relevant_bit):
+        if key & lowest_relevant_bit:
+            return (~key) & fill_bitmask             # force least-significant bit to 0
+        else:
+            return key & fill_bitmask                # keep least-significant bit as 0
+    normalize = staticmethod(normalize)
+
+    def __init__(self, other=None, fill_bitmask=None):
+        """
+
+        Parameters
+        ----------
+        fill_bitmask : integer
+            A bitmask where all possible bits that can be set to 1 are set to 1.
+            When representing a taxon namespaces, with 8 taxa, for example,
+            this would be 0b11111111. Incomplete leaf-sets on trees need to
+            having the missing taxa bits set to 0. For example, for a tree
+            missing taxa 2, 3, and 5, ``fill_bitmask`` would be 0b11101001.
+
+        """
+        collections.OrderedDict.__init__(self)
+        self.lowest_relevant_bit = NormalizedBitmaskDict.least_significant_set_bit(fill_bitmask)
+        self.fill_bitmask = fill_bitmask
+        if other is not None:
+            if isinstance(other, NormalizedBitmaskDict):
+                self.fill_bitmask = other.fill_bitmask
+                self.lowest_relevant_bit = other.lowest_relevant_bit
+            if isinstance(other, dict):
+                for key, val in other.items():
+                    self[key] = val
+
+    def __deepcopy__(self, memo):
+        o = NormalizedBitmaskDict(fill_bitmask=self.fill_bitmask)
+        memo[id(self)] = o
+        o.fill_bitmask = self.fill_bitmask
+        o.lowest_relevant_bit = self.lowest_relevant_bit
+        for key, val in self.items():
+            o[key] = copy.deepcopy(val, memo)
+        return o
+
+    def normalize_key_and_assign_value(self, key, value):
+        "*Almost* like __setitem__(), but returns value of normalized key to calling code."
+        normalized_key = self.normalize_key(key)
+        dict.__setitem__(self, normalized_key, value)
+        return normalized_key
+
+    def normalize_key(self, key):
+        return NormalizedBitmaskDict.normalize(key, self.fill_bitmask, self.lowest_relevant_bit)
+
+    def __setitem__(self, key, value):
+        "Sets item with normalized key."
+        self.normalize_key_and_assign_value(key, value)
+
+    def __getitem__(self, key):
+        "Gets an item by its key."
+        key = self.normalize_key(key)
+        return dict.__getitem__(self, key)
+
+    def __delitem__(self, key):
+        "Remove item with normalized key."
+        key = self.normalize_key(key)
+        dict.__delitem__(self, key)
+
+    def __contains__(self, key):
+        "Returns true if has normalized key."
+        key = self.normalize_key(key)
+        return dict.__contains__(self, key)
+
+    def pop(self, key, alt_val=None):
+        "a.pop(k[, x]):  a[k] if k in a, else x (and remove k)"
+        key = self.normalize_key(key)
+        return dict.pop(self, key)
+
+    def get(self, key, def_val=None):
+        "Gets an item by its key, returning default if key not present."
+        key = self.normalize_key(key)
+        return dict.get(self, key, def_val)
+
+    def setdefault(self, key, def_val=None):
+        "Sets the default value to return if key not present."
+        dict.setdefault(self, self.normalize_key(key), def_val)
+
+    def update(self, other):
+        """
+        updates (and overwrites) key/value pairs:
+        k = { 'a':'A', 'b':'B', 'c':'C'}
+        q = { 'c':'C', 'd':'D', 'f':'F'}
+        k.update(q)
+        {'a': 'A', 'c': 'C', 'b': 'B', 'd': 'D', 'f': 'F'}
+        """
+        for key, val in other.items():
+            dict.__setitem__(self, self.normalize_key(key), val)
+
+    def fromkeys(self, iterable, value=None):
+        "Creates a new dictionary with keys from seq and values set to value."
+        raise NotImplementedError
+
+###############################################################################
+# CaseInsensitiveDict
+#
+# From:
+#        https://github.com/kennethreitz/requests
+#
+# Copyright 2014 Kenneth Reitz
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+class CaseInsensitiveDict(collections.MutableMapping):
+    """
+    A case-insensitive ``dict``-like object.
+
+    Implements all methods and operations of
+    ``collections.MutableMapping`` as well as dict's ``copy``. Also
+    provides ``lower_items``.
+
+    All keys are expected to be strings. The structure remembers the
+    case of the last key to be set, and ``iter(instance)``,
+    ``keys()``, ``items()``, ``iterkeys()``, and ``iteritems()``
+    will contain case-sensitive keys. However, querying and contains
+    testing is case insensitive:
+
+        cid = CaseInsensitiveDict()
+        cid['Accept'] = 'application/json'
+        cid['aCCEPT'] == 'application/json'  # True
+        list(cid) == ['Accept']  # True
+
+    For example, ``headers['content-encoding']`` will return the
+    value of a ``'Content-Encoding'`` response header, regardless
+    of how the header name was originally stored.
+
+    If the constructor, ``.update``, or equality comparison
+    operations are given keys that have equal ``.lower()``s, the
+    behavior is undefined.
+
+    """
+    def __init__(self, data=None, **kwargs):
+        self._store = dict()
+        if data is None:
+            data = {}
+        self.update(data, **kwargs)
+
+    def __setitem__(self, key, value):
+        # Use the lowercased key for lookups, but store the actual
+        # key alongside the value.
+        self._store[key.lower()] = (key, value)
+
+    def __getitem__(self, key):
+        return self._store[key.lower()][1]
+
+    def __delitem__(self, key):
+        del self._store[key.lower()]
+
+    def __iter__(self):
+        return (casedkey for casedkey, mappedvalue in self._store.values())
+
+    def __len__(self):
+        return len(self._store)
+
+    def lower_items(self):
+        """Like iteritems(), but with all lowercase keys."""
+        return (
+            (lowerkey, keyval[1])
+            for (lowerkey, keyval)
+            in self._store.items()
+        )
+
+    def __eq__(self, other):
+        if isinstance(other, collections.Mapping):
+            other = CaseInsensitiveDict(other)
+        else:
+            return NotImplementedError
+        # Compare insensitively
+        return dict(self.lower_items()) == dict(other.lower_items())
+
+    # Copy is required
+    def copy(self):
+        return CaseInsensitiveDict(self._store.values())
+
+    def __repr__(self):
+        return '%s(%r)' % (self.__class__.__name__, dict(self.items()))
+
+# CaseInsensitiveDict
+###############################################################################
+
+###############################################################################
+## OrderedCaselessDict
+
+class OrderedCaselessDict(dict):
+    """
+    Inherits from dict. Maintains two sets of keys: the first the keys
+    belonging to dict, which actually accesses the container
+    items. This is always cast to lower() whenever it is called, thus
+    ensuring that keys are always of the same case. The second set of
+    keys it maintains locally in an list, thus maintaining the order
+    in which they were added. The second set of keys is not cast to
+    lower(), which means that client code can always recover the
+    original 'canonical' casing of the keys.
+
+    ONLY TAKES STRING KEYS!
+    """
+
+    def __init__(self, other=None):
+        """
+        __init__ creates the local set of keys, and then initializes self with
+        arguments, if any, by using the superclass methods, keeping
+        the ordered keys in sync.
+        """
+        super(OrderedCaselessDict, self).__init__()
+        self._ordered_keys = []
+        if other is not None:
+            if isinstance(other, dict):
+                for key, val in other.items():
+                    if key.lower() not in self:
+                        self._ordered_keys.append(str(key))
+                    super(OrderedCaselessDict, \
+                          self).__setitem__(key.lower(), val)
+            else:
+                for key, val in other:
+                    if key.lower() not in self:
+                        self._ordered_keys.append(str(key))
+                    super(OrderedCaselessDict, \
+                          self).__setitem__(key.lower(), val)
+
+    def __deepcopy__(self, memo):
+        o = self.__class__()
+        memo[id(self)] = o
+        for key, val in self.items():
+            o[key] = copy.deepcopy(val, memo)
+        return o
+
+    def copy(self):
+        "Returns a shallow copy of self."
+        return self.__class__(self)
+
+    def iterkeys(self):
+        "Returns an iterator over self's ordered keys."
+        return iter(self._ordered_keys)
+
+    def itervalues(self):
+        "Returns an iterator over self's key, value pairs."
+        for key in self.iterkeys():
+            yield self[key.lower()]
+
+    def iteritems(self):
+        "Returns an iterator over self's values."
+        for key in self.iterkeys():
+            yield (key, self[key.lower()])
+
+    def items(self):
+        "Returns key, value pairs in key-order."
+        return [(key, self[key]) for key in self.iterkeys()]
+
+    def values(self):
+        "Returns list of key, value pairs."
+        return [v for v in self.itervalues()]
+
+    def __iter__(self):
+        "Returns an iterator over self's ordered keys."
+        return self.iterkeys()
+
+    def __getitem__(self, key):
+        "Gets an item using a case-insensitive key."
+        return super(OrderedCaselessDict, self).__getitem__(key.lower())
+
+    def __setitem__(self, key, value):
+        "Sets an item using a case-insensitive key,"
+        if key.lower() not in self:
+            self._ordered_keys.append(str(key))
+        super(OrderedCaselessDict, self).__setitem__(key.lower(), value)
+
+    def __delitem__(self, key):
+        "Remove item with specified key."
+        del(self._ordered_keys[self.index(key)])
+        super(OrderedCaselessDict, \
+              self).__delitem__(key.lower())
+
+    def __contains__(self, key):
+        "Returns true if has key, regardless of case."
+        return super(OrderedCaselessDict, self).__contains__(key.lower())
+
+    def pop(self, key, alt_val=None):
+        "a.pop(k[, x]):  a[k] if k in a, else x (and remove k)"
+        if key.lower() in self:
+            val = self[key]
+            self.__delitem__(key.lower())
+            return val
+        else:
+            return alt_val
+
+    def popitem(self):
+        "a.popitem()  remove and last (key, value) pair"
+        key = self._ordered_keys[-1]
+        item = (key, self[key.lower()])
+        self.__delitem__(key)
+        return item
+
+    def caseless_keys(self):
+        "Returns a copy of the ordered list of keys."
+        return [k.lower() for k in self._ordered_keys]
+
+    def index(self, key):
+        """
+        Return the index of (caseless) key.
+        Raise KeyError if not found.
+        """
+        count = 0
+        for k in self._ordered_keys:
+            if k.lower() == key.lower():
+                return count
+            count = count + 1
+        raise KeyError(key)
+
+    def keys(self):
+        "Returns a copy of the ordered list of keys."
+        return list(self._ordered_keys)
+
+    def clear(self):
+        "Deletes all items from the dictionary."
+        self._ordered_keys = []
+        super(OrderedCaselessDict, self).clear()
+
+    def has_key(self, key):
+        "Returns true if has key, regardless of case."
+        return key.lower() in self
+
+    def get(self, key, def_val=None):
+        "Gets an item by its key, returning default if key not present."
+        return super(OrderedCaselessDict, self).get(key.lower(), def_val)
+
+    def setdefault(self, key, def_val=None):
+        "Sets the default value to return if key not present."
+        return super(OrderedCaselessDict, self).setdefault(key.lower(), def_val)
+
+    def update(self, other):
+        """
+        updates (and overwrites) key/value pairs:
+        k = { 'a':'A', 'b':'B', 'c':'C'}
+        q = { 'c':'C', 'd':'D', 'f':'F'}
+        k.update(q)
+        {'a': 'A', 'c': 'C', 'b': 'B', 'd': 'D', 'f': 'F'}
+        """
+        for key, val in other.items():
+            if key.lower() not in self:
+                self._ordered_keys.append(str(key))
+            super(OrderedCaselessDict, self).__setitem__(key.lower(), val)
+
+    def fromkeys(self, iterable, value=None):
+        "Creates a new dictionary with keys from seq and values set to value."
+        ocd = OrderedCaselessDict()
+        for key in iterable:
+            if key.lower() not in self:
+                self[key] = value
+        return ocd
+
+###############################################################################
+## FrozenOrderedDict
+
+class FrozenOrderedDict(collections.OrderedDict):
+
+    class ImmutableTypeError(TypeError):
+        def __init__(self, *args, **kwargs):
+            super(FrozenOrderedDict.ImmutableTypeError, self).__init__(*args, **kwargs)
+
+    def __init__(self, *args, **kwargs):
+        self._is_frozen = False
+        super(FrozenOrderedDict, self).__init__(*args, **kwargs)
+        self._is_frozen = True
+
+    def __setitem__(self, key, value):
+        if self._is_frozen:
+            raise FrozenOrderedDict.ImmutableTypeError("{} is immutable".format(self.__class__.__name__))
+        else:
+            super(FrozenOrderedDict, self).__setitem__(key, value)
+
+    def __delitem__(self, key):
+        if self._is_frozen:
+            raise FrozenOrderedDict.ImmutableTypeError("{} is immutable".format(self.__class__.__name__))
+        else:
+            super(FrozenOrderedDict, self).__delitem__(key)
+
+    def pop(self, key, alt_val=None):
+        if self._is_frozen:
+            raise FrozenOrderedDict.ImmutableTypeError("{} is immutable".format(self.__class__.__name__))
+        else:
+            super(FrozenOrderedDict, self).pop(key, alt_val)
+
+    def popitem(self):
+        if self._is_frozen:
+            raise FrozenOrderedDict.ImmutableTypeError("{} is immutable".format(self.__class__.__name__))
+        else:
+            super(FrozenOrderedDict, self).popitem()
+
+    def clear(self):
+        if self._is_frozen:
+            raise FrozenOrderedDict.ImmutableTypeError("{} is immutable".format(self.__class__.__name__))
+        else:
+            super(FrozenOrderedDict, self).clear()
+
+    def update(self, other):
+        if self._is_frozen:
+            raise FrozenOrderedDict.ImmutableTypeError("{} is immutable".format(self.__class__.__name__))
+        else:
+            super(FrozenOrderedDict, self).update(other)
+
+    def fromkeys(self, iterable, value=None):
+        if self._is_frozen:
+            raise FrozenOrderedDict.ImmutableTypeError("{} is immutable".format(self.__class__.__name__))
+        else:
+            super(FrozenOrderedDict, self).fromkeys(iterable, value)
+
+    def __deepcopy__(self, memo):
+        temp = FrozenOrderedDict()
+        temp._is_frozen = False
+        for k in self:
+            k2 = copy.deepcopy(k, memo)
+            memo[id(k)] = k2
+            v = self[k]
+            v2 = copy.deepcopy(v, memo)
+            memo[id(v)] = v2
+            temp[k2] = v2
+        temp._is_frozen = True
+        return temp
+
+    def __copy__(self):
+        temp = FrozenOrderedDict()
+        temp._is_frozen = False
+        for k in self:
+            temp[k] = self[k]
+        temp._is_frozen = True
+        return temp
+
+###############################################################################
+## Generic Container Interace (for reference)
+
+class _ContainerInterface(list):
+
+    """
+    The following methods can be defined to implement container objects. Containers usually are
+    sequences (such as lists or tuples) or mappings (like dictionaries), but can represent other
+    containers as well. The first set of methods is used either to emulate a sequence or to
+    emulate a mapping; the difference is that for a sequence, the allowable keys should be the
+    integers k for which 0 <= k < N where N is the length of the sequence, or slice objects,
+    which define a range of items. (For backwards compatibility, the method __getslice__() (see
+    below) can also be defined to handle simple, but not extended slices.) It is also recommended
+    that mappings provide the methods keys(), values(), items(), has_key(), get(), clear(),
+    setdefault(), iterkeys(), itervalues(), iteritems(), pop(), popitem(), copy(), and update()
+    behaving similar to those for Python's standard dictionary objects. The UserDict module
+    provides a DictMixin class to help create those methods from a base set of __getitem__(),
+    __setitem__(), __delitem__(), and keys(). Mutable sequences should provide methods append(),
+    count(), index(), extend(), insert(), pop(), remove(), reverse() and sort(), like Python
+    standard list objects. Finally, sequence types should implement addition (meaning
+    concatenation) and multiplication (meaning repetition) by defining the methods __add__(),
+    __radd__(), __iadd__(), __mul__(), __rmul__() and __imul__() described below; they should not
+    define __coerce__() or other numerical operators. It is recommended that both mappings and
+    sequences implement the __contains__() method to allow efficient use of the in operator; for
+    mappings, in should be equivalent of has_key(); for sequences, it should search through the
+    values. It is further recommended that both mappings and sequences implement the __iter__()
+    method to allow efficient iteration through the container; for mappings, __iter__() should be
+    the same as iterkeys(); for sequences, it should iterate through the values.
+    """
+
+    def __len__(self):
+        """
+        Called to implement the built-in function len(). Should return the length of the
+        object, an integer >= 0. Also, an object that doesn't define a __nonzero__() method
+        and whose __len__() method returns zero is considered to be false in a Boolean
+        context.
+        """
+        pass
+
+    def __getitem__(self, key):
+        """
+        Called to implement evaluation of self[key]. For sequence types, the accepted keys
+        should be integers and slice objects. Note that the special interpretation of
+        negative indices (if the class wishes to emulate a sequence type) is up to the
+        __getitem__() method. If key is of an inappropriate type, TypeError may be raised; if
+        of a value outside the set of indices for the sequence (after any special
+        interpretation of negative values), IndexError should be raised. For mapping types,
+        if key is missing (not in the container), KeyError should be raised.
+
+        Note
+
+        for loops expect that an IndexError will be raised for illegal indices to allow proper
+        detection of the end of the sequence.
+        """
+        pass
+
+    def __setitem__(self, key, value):
+        """
+        Called to implement assignment to self[key]. Same note as for __getitem__(). This should only be implemented for mappings if the objects support changes to the values for keys, or if new keys can be added, or for sequences if elements can be replaced. The same exceptions should be raised for improper key values as for the __getitem__() method.
+        """
+        pass
+
+    def __delitem__(self, key):
+        """
+        Called to implement deletion of self[key]. Same note as for __getitem__(). This should only be implemented for mappings if the objects support removal of keys, or for sequences if elements can be removed from the sequence. The same exceptions should be raised for improper key values as for the __getitem__() method.
+        """
+        pass
+
+    def __iter__(self):
+        """
+        This method is called when an iterator is required for a container. This method
+        should return a new iterator object that can iterate over all the objects in the
+        container. For mappings, it should iterate over the keys of the container, and should
+        also be made available as the method iterkeys().
+
+        Iterator objects also need to implement this method; they are required to return
+        themselves. For more information on iterator objects, see Iterator Types.
+        """
+        pass
+
+    def __reversed__(self):
+        """
+        Called (if present) by the reversed() builtin to implement reverse iteration. It
+        should return a new iterator object that iterates over all the objects in the
+        container in reverse order.
+
+        If the __reversed__() method is not provided, the reversed() builtin will fall back
+        to using the sequence protocol (__len__() and __getitem__()). Objects that support
+        the sequence protocol should only provide __reversed__() if they can provide an
+        implementation that is more efficient than the one provided by reversed().
+
+        New in version 2.6.
+        """
+        pass
+
+    def __contains__(self, item):
+        """
+        The membership test operators (in and not in) are normally implemented as an
+        iteration through a sequence. However, container objects can supply the following
+        special method with a more efficient implementation, which also does not require the
+        object be a sequence.
+
+        Called to implement membership test operators. Should return true if item is in self,
+        false otherwise. For mapping objects, this should consider the keys of the mapping
+        rather than the values or the key-item pairs.
+        """
+        pass
+
+###############################################################################
+## RecastingIterator
+
+class RecastingIterator(object):
+    """
+    Given an iterator I_X that returns objects of type X {x1, x2, x3,
+    ... etc.}, and a function F(X), that takes objects of type X as an
+    argument and returns objects of type Y, F(X) = Y, this class will
+    act as an iterator that returns objects of type Y, I_Y, given an
+    iterator on X. The 'function' given can be a class if the class's
+    constructor takes a single argument of type X.
+    """
+    def __init__(self, source_iter, casting_fn=None, filter_fn=None):
+        """
+        ``source_iter`` is an iterator. ``casting_fn`` is a function
+        that takes objects returned by ``source_iter`` and returns other
+        objects. ``filter_fn`` is what will be applied to the SOURCE object
+        to decide if it will be returned.
+        """
+        self.source_iter = iter(source_iter)
+        self.casting_fn = casting_fn
+        self.filter_fn = filter_fn
+    def __iter__(self):
+        "Returns self."
+        return self
+    def next(self):
+        """
+        Gets next item from the underlying iterator, and if
+        filter_fn returns True on it, applies casting_fn to it and
+        returns it.
+        """
+        while True:
+            source_next = self.source_iter.next()
+            if self.filter_fn is None or self.filter_fn(source_next):
+                if self.casting_fn is not None:
+                    return self.casting_fn(source_next)
+                else:
+                    return source_next
+
+###############################################################################
+## ItemAttributeProviderList
+
+class ItemAttributeProxyList(list):
+    """
+    This list will return the attribute of its elements instead of the
+    elements themselves.
+    """
+
+    def __init__(self, attr_name, *args):
+        """
+        __init__ calls the list.__init__ with all unnamed args.
+
+        ``attr_name`` is the name of the attribute or property that should be
+        returned.
+        """
+        self.bound_attr_name = attr_name
+        list.__init__(self, *args)
+
+    def __getitem__(self, *args):
+        item = list.__getitem__(self, *args)
+#         assert hasattr(item, self.bound_attr_name)
+        return getattr(item, self.bound_attr_name)
+
+    def __iter__(self):
+        """
+        Iterates over all elements in self, returning their `<bound_attr_name>`
+        attribute.
+        """
+        for item in list.__iter__(self):
+            yield getattr(item, self.bound_attr_name)
+
+    def aggregate(self):
+        """
+        Returns a shallow-copy list of the `<bound_attr_name>` attribute of self's
+        members.
+        """
+        return [getattr(item, self.bound_attr_name) for item in self]
+
+###############################################################################
+## ItemSublistProxyList
+
+class ItemSublistProxyList(list):
+    """
+    This list will return a the elements of the bound list attribute of its own
+    elements instead of the element themselves.
+    """
+
+    def __init__(self, attr_name, *args):
+        """
+       __init__ calls the list.__init__ with all unnamed args.
+
+        ``attr_name`` is the name of the attribute or property that should be
+        returned.
+        """
+        self.bound_attr_name = attr_name
+        list.__init__(self, *args)
+
+    def __str__(self):
+        return str([str(i) for i in self])
+
+    def __getitem__(self, *args):
+        req_idx = int(args[0])
+        for idx, i in enumerate(self):
+            if idx == req_idx:
+                return i
+        raise IndexError("list index out of range: %d (max=%d)" % (req_idx, idx))
+
+    def __len__(self):
+        return len([i for i in self])
+
+    def __iter__(self):
+        """
+        Iterates over all elements in self, returning their `<bound_attr_name>`
+        attribute.
+        """
+        for item in list.__iter__(self):
+            for sublist_item in getattr(item, self.bound_attr_name):
+                yield sublist_item
+
+    def aggregate(self):
+        """
+        Returns a shallow-copy list of the `<bound_attr_name>` attribute of self's
+        members.
+        """
+        return [i for i in self]
+
diff --git a/dendropy/utility/debug.py b/dendropy/utility/debug.py
new file mode 100644
index 0000000..8674d3c
--- /dev/null
+++ b/dendropy/utility/debug.py
@@ -0,0 +1,37 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Various data structures.
+"""
+
+def get_calling_code_info(stack_level):
+    frame = inspect.stack()[stacklevel]
+    filename = inspect.getfile(frame[0])
+    lineno = inspect.getlineno(frame[0])
+    return filename, lineno
+
+def dump_stack(out=None):
+    if out is None:
+        out = sys.stderr
+    for frame, filename, line_num, func, source_code, source_index in inspect.stack()[2:]:
+        if source_code is None:
+            out.write("{}: {}\n".format(filename, line_num))
+        else:
+            out.write("{}: {}: {}\n".format(filename, line_num, source_code[source_index].strip()))
+
diff --git a/dendropy/utility/deprecate.py b/dendropy/utility/deprecate.py
new file mode 100644
index 0000000..7167f76
--- /dev/null
+++ b/dendropy/utility/deprecate.py
@@ -0,0 +1,74 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Handling deprecation warnings and messages correctly.
+"""
+
+import os
+import warnings
+from dendropy.utility import metavar
+
+DEPRECATION_WARNING_FILTER = None
+_DEPRECATION_WARNINGS_CONFIGURED = False
+
+class CriticalDeprecationWarning(UserWarning):
+    pass
+
+def configure_deprecation_warning_behavior(warning_filter=None):
+    global DEPRECATION_WARNING_FILTER
+    global _DEPRECATION_WARNINGS_CONFIGURED
+    if warning_filter is None:
+        warning_filter = os.environ.get(metavar.DEPRECATION_WARNING_FILTER, "default")
+    DEPRECATION_WARNING_FILTER = warning_filter
+    warnings.simplefilter(DEPRECATION_WARNING_FILTER,
+            CriticalDeprecationWarning)
+    _DEPRECATION_WARNINGS_CONFIGURED = True
+
+def _initialize_deprecation_warnings():
+    global _DEPRECATION_WARNINGS_CONFIGURED
+    if not _DEPRECATION_WARNINGS_CONFIGURED:
+        configure_deprecation_warning_behavior()
+
+def dendropy_deprecation_warning(**kwargs):
+    _initialize_deprecation_warnings()
+    leader = "  # "
+    stacklevel = kwargs.pop("stacklevel", 3)
+    if "message" in kwargs:
+        message = kwargs["message"]
+    elif "old_construct" in kwargs or "new_construct" in kwargs:
+        message = []
+        message.append("")
+        if "preamble" in kwargs:
+            message.append(leader + kwargs["preamble"])
+        message.append(leader + "Instead of:")
+        for construct in kwargs["old_construct"].split("\n"):
+            message.append(leader + "    {}".format(construct))
+        message.append(leader + "Use:")
+        for construct in kwargs["new_construct"].split("\n"):
+            message.append(leader + "    {}".format(construct))
+        if "epilog" in kwargs:
+            message.append(leader + kwargs["epilog"])
+        message = "\n".join(message)
+    _initialize_deprecation_warnings()
+    old_formatwarning = warnings.formatwarning
+    warnings.warn(
+            message=message,
+            category=CriticalDeprecationWarning,
+            stacklevel=stacklevel,
+            )
diff --git a/dendropy/utility/error.py b/dendropy/utility/error.py
new file mode 100644
index 0000000..3e0aa3e
--- /dev/null
+++ b/dendropy/utility/error.py
@@ -0,0 +1,192 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Errors, exceptions, warnings, etc.
+"""
+
+try:
+    from StringIO import StringIO # Python 2 legacy support: StringIO in this module is the one needed (not io)
+except ImportError:
+    from io import StringIO # Python 3
+import sys
+import re
+import warnings
+import inspect
+import subprocess
+
+class ImmutableTaxonNamespaceError(TypeError):
+    def __init__(self, message):
+        TypeError.__init__(self, message)
+
+class DataError(Exception):
+
+    def __init__(self,
+            message=None,
+            line_num=None,
+            col_num=None,
+            filename=None,
+            stream=None):
+        Exception.__init__(self)
+        self.line_num = line_num
+        self.col_num = col_num
+        self.message = message
+        self.stream = stream
+        self.filename = None
+        self.decorate_with_name(filename=filename, stream=stream)
+
+    def decorate_with_name(self,
+            filename=None,
+            stream=None):
+        if filename is not None:
+            self.filename = filename
+        if stream is not None:
+            try:
+                self.filename = stream.name
+            except AttributeError:
+                pass
+
+    def __str__(self):
+        f, l, c = "", "", ""
+        if self.filename:
+            f =  " '{}'".format(self.filename)
+        if self.line_num is not None:
+            l =  " on line {}".format(self.line_num)
+        if self.col_num is not None:
+            c =  " at column {}".format(self.col_num)
+        return "Error parsing data source{}{}{}: {}".format(f, l, c, self.message)
+
+class DataParseError(DataError):
+
+    def __init__(self,
+            message=None,
+            line_num=None,
+            col_num=None,
+            filename=None,
+            stream=None):
+        DataError.__init__(self,
+                message=message,
+                line_num=line_num,
+                col_num=col_num,
+                filename=filename,
+                stream=stream)
+
+class UnsupportedSchemaError(NotImplementedError):
+
+    def __init__(self, *args, **kwargs):
+        NotImplementedError.__init__(self, *args, **kwargs)
+
+class UnspecifiedSchemaError(Exception):
+
+    def __init__(self, *args, **kwargs):
+        Exception.__init__(self, *args, **kwargs)
+
+class UnspecifiedSourceError(Exception):
+
+    def __init__(self, *args, **kwargs):
+        Exception.__init__(self, *args, **kwargs)
+
+class TooManyArgumentsError(TypeError):
+
+    def __init__(self,
+            message=None,
+            func_name=None,
+            max_args=None,
+            args=None):
+        if message is None and (func_name is not None and max_args):
+            message = "{}() takes a maximum of {} arguments ({} given)".format(func_name, max_args, len(args))
+        TypeError.__init__(self, message)
+
+class InvalidArgumentValueError(ValueError):
+
+    def __init__(self, message=None, func_name=None, arg=None):
+        if message is None and (func_name is not None and arg is not None):
+            message = "{}() does not accept objects of type '{}' as an argument".format(func_name, arg.__class__.__name__)
+        ValueError.__init__(self, message)
+
+class MultipleInitializationSourceError(TypeError):
+    def __init__(self, message=None, class_name=None, arg=None):
+        if message is None and (class_name is not None and arg is not None):
+            message = "{}() does not accept data 'stream' or 'schema' arguments when initializing with another object".format(class_name)
+        TypeError.__init__(self, message)
+
+class TaxonNamespaceIdentityError(ValueError):
+    def __init__(self, o1, o2):
+        message = "Non-identical taxon namespace references: {} is not {}".format(
+                "<TaxonNamespace object at {}>".format(str(hex(id((o1.taxon_namespace))))),
+                "<TaxonNamespace object at {}>".format(str(hex(id((o2.taxon_namespace))))),
+                )
+        ValueError.__init__(self, message)
+
+class MixedRootingError(ValueError):
+    def __init__(self, message=None):
+        ValueError.__init__(self, message)
+
+class TaxonNamespaceReconstructionError(ValueError):
+    def __init__(self, message=None):
+        ValueError.__init__(self, message)
+
+class UltrametricityError(ValueError):
+    def __init__(self, message=None):
+        ValueError.__init__(self, message)
+
+class TreeSimTotalExtinctionException(Exception):
+    """Exception to be raised when branching process results in all lineages going extinct."""
+    def __init__(self, *args, **kwargs):
+        Exception.__init__(self, *args, **kwargs)
+
+class SeedNodeDeletionException(Exception):
+
+    def __init__(self, *args, **kwargs):
+        Exception.__init__(self, *args, **kwargs)
+
+class ExternalServiceError(Exception):
+
+    def __init__(self,
+            service_name,
+            invocation_command,
+            service_input,
+            returncode,
+            stdout,
+            stderr):
+        self.service_name = service_name
+        self.invocation_command = invocation_command
+        self.service_input = service_input
+        self.returncode = returncode
+        self.stdout = stdout
+        self.stderr = stderr
+        self.message = self.compose_message()
+
+    def __str__(self):
+        return self.compose_message()
+
+    def compose_message(self):
+        parts = []
+        parts.append("- Service process {} exited with return code: {}".format(self.service_name, self.returncode))
+        parts.append("- Service invoked with command:")
+        parts.append("   {}".format(self.invocation_command))
+        parts.append("<<<<<<< (SERVICE STANDARD INPUT)")
+        parts.append(self.service_input)
+        parts.append(">>>>>>>")
+        parts.append("<<<<<<< (SERVICE STANDARD OUTPUT)")
+        parts.append(self.stdout)
+        parts.append(">>>>>>>")
+        parts.append("<<<<<<< (SERVICE STANDARD ERROR)")
+        parts.append(self.stderr)
+        parts.append(">>>>>>>")
+        return "\n".join(parts)
diff --git a/dendropy/utility/filesys.py b/dendropy/utility/filesys.py
new file mode 100644
index 0000000..50479d5
--- /dev/null
+++ b/dendropy/utility/filesys.py
@@ -0,0 +1,338 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Various utilities in support of filesystem interaction.
+"""
+
+import fnmatch
+import time
+import os
+import sys
+import re
+from threading import Event, Thread, Lock
+
+from dendropy.utility import messaging
+_LOG = messaging.get_logger(__name__)
+
+###############################################################################
+## Universal Opening
+
+def pre_py34_open(file,
+        mode='r',
+        buffering=-1,
+        encoding=None,
+        errors=None,
+        newline=None,
+        closefd=True,
+        opener=None):
+    if encoding is not None:
+        raise NotImplementedError
+    if errors is not None:
+        raise NotImplementedError
+    if newline is None:
+        if mode.startswith("r"):
+            mode = mode + "U"
+    else:
+        raise NotImplementedError
+    if closefd is not True:
+        raise NotImplementedError
+    if opener is not None:
+        raise NotImplementedError
+    return open(
+            file,
+            mode=mode,
+            buffering=buffering)
+
+###############################################################################
+## LineReadingThread
+
+DEFAULT_SLEEP_INTERVAL=0.1
+class LineReadingThread(Thread):
+    """A thread that will read the input stream - designed to work with a file
+    thas is being written. Note that if the file does not end with a newline
+    and the keep_going() method does not return False, then the thread will not
+    terminate
+
+    self.keep_going()
+    is called with each line. (sub classes should override).
+
+    LineReadingThread.__init__ must be called by subclasses.
+    """
+    def __init__(self,
+                lineCallback=None,
+                stream=None,
+                filename="",
+                stop_event=None,
+                sleep_interval=DEFAULT_SLEEP_INTERVAL,
+                store_lines=False,
+                is_file=True,
+                subproc=None,
+                *args,
+                **kwargs):
+        """
+        __init__ processes the following arguments:
+
+            - ``lineCallback`` is the callable that takes a string that is each line, and returns False to stop reading.  This is a way of using the class without sub-classing and overriding keep_going
+            - ``stream`` is in input file-like object
+            - ``filename`` can be sent instead of ``stream``, it should be the path to the file to read.
+            - ``stop_event`` is an Event, that will kill the thread if it is triggered.
+            - ``sleep_interval`` is the interval to sleep while waiting for a new tree to appear.
+
+        All other arguments are passed to the Thread.__init__()
+        """
+        self.stream = stream
+        self.filename = filename
+        self.lineCallback = lineCallback
+        self.unfinished_line = None
+        self.stop_event = stop_event
+        self.sleep_interval = sleep_interval
+        self.store_lines = store_lines
+        if store_lines:
+            self.line_list_lock = Lock()
+        self.is_file = is_file
+        self.lines = []
+        self.subproc = subproc
+        self.stop_on_subproc_exit = kwargs.get('stop_on_subproc_exit', False)
+        Thread.__init__(self,group=None, target=None, name=None,
+                        args=tuple(*args), kwargs=dict(**kwargs))
+
+    def wait_for_file_to_appear(self, filename):
+        """Blocks until the file ``filename`` appears or stop_event is triggered.
+
+        Returns True if ``filename`` exists.
+
+        Checks for the stop_event *before* checking for the file existence.
+        (paup_wrap and raxml_wrap threads depend on this behavior).
+        """
+        while True:
+            if (self.stop_event is not None) and self.stop_event.isSet():
+                return False
+            if os.path.exists(filename):
+                return True
+            #_LOG.debug("Waiting for %s" %filename)
+            time.sleep(self.sleep_interval)
+
+    def open_file_when_exists(self, filename):
+        """Blocks until the file ``filename`` appears and then returns a file
+        object opened in rU mode.
+
+        Returns None if the stop event is triggered.
+        """
+        if self.wait_for_file_to_appear(filename):
+            return open(filename, "rU")
+        return None
+
+
+    def run(self):
+        if self.stream is None:
+            if not self.filename:
+                _LOG.debug('"stream" and "filename" both None when LineReadingThread.run called')
+                return
+            self.stream = self.open_file_when_exists(self.filename)
+            if self.stream is None:
+                return
+        self._read_stream()
+
+    def keep_going(self, line):
+        _LOG.debug("In keep_going: " + line)
+        if self.store_lines:
+            self.line_list_lock.acquire()
+            try:
+                self.lines.append(line)
+                _LOG.debug("self.lines = %s" % str(self.lines))
+            finally:
+                self.line_list_lock.release()
+        if self.lineCallback is None:
+            r = True
+            if self.subproc:
+                _LOG.debug("subproc is not None")
+                if self.subproc.returncode is None:
+                    _LOG.debug("subproc.returncode is None")
+                else:
+                    _LOG.debug("subproc.returncode is %d" % self.subproc.returncode)
+                    if self.store_lines:
+                        _LOG.debug("about to call readlines")
+                        line = line + "\n".join(self.stream.readlines())
+                    r = False
+            else:
+                _LOG.debug("subproc is None")
+            return r
+        return self.lineCallback(line)
+
+    def _read_stream(self):
+        self.unfinished_line = ""
+        while True:
+            if (self.stop_event is not None) and self.stop_event.isSet():
+                # when we terminate because of an event setting,
+                # we pass any unfinished_line line that we have to
+                if not self.unfinished_line is None:
+                    self.keep_going(self.unfinished_line)
+                break
+            _LOG.debug("about to readline")
+            line = self.stream.readline()
+            if not line:
+                _LOG.debug("line is empty")
+                if self.stop_on_subproc_exit:
+                    self.subproc.poll()
+                    if self.subproc is not None:
+                        _LOG.debug("subproc is not None")
+                        if self.subproc.returncode is not None:
+                            _LOG.debug("subproc.returncode is %d" % self.subproc.returncode)
+                            _LOG.debug("%s" % repr(self.stream))
+                            l = "".join(self.stream.readlines())
+                            if l:
+                                self.keep_going(l)
+                            break
+                        else:
+                            _LOG.debug("subproc.returncode is None")
+                    else:
+                        _LOG.debug("subproc is None")
+            else:
+                _LOG.debug('line is "%s"' % line)
+            if not line.endswith("\n"):
+                if self.unfinished_line:
+                    self.unfinished_line = self.unfinished_line + line
+                else:
+                    self.unfinished_line = line
+                time.sleep(self.sleep_interval)
+            else:
+                if self.unfinished_line:
+                    line = self.unfinished_line + line
+                self.unfinished_line = ""
+                if not self.keep_going(line):
+                    break
+        _LOG.debug("LineReadingThread exiting")
+
+###############################################################################
+## File Finding
+
+def glob_match(pathname, pattern, respect_case=False, complement=False):
+    if respect_case:
+        if fnmatch.fnmatchcase(pathname, pattern):
+            if complement:
+                return False
+            else:
+                return True
+        else:
+            if complement:
+                return True
+            else:
+                return False
+    else:
+        pathname = pathname.lower()
+        pattern = pattern.lower()
+        if fnmatch.fnmatch(pathname, pattern):
+            if complement:
+                return False
+            else:
+                return True
+        else:
+            if complement:
+                return True
+            else:
+                return False
+
+def find_files(top,
+               recursive=True,
+               filename_filter=None,
+               dirname_filter=None,
+               excludes=None,
+               complement=False,
+               respect_case=False,
+               expand_vars=True,
+               include_hidden=True):
+    if expand_vars:
+        top = os.path.abspath(os.path.expandvars(os.path.expanduser(top)))
+    if excludes == None:
+        excludes = []
+    filepaths = []
+    if os.path.exists(top):
+        for fpath in os.listdir(top):
+            abspath = os.path.abspath(os.path.join(top, fpath))
+            if os.path.isfile(abspath):
+                if (include_hidden or not fpath.startswith('.')) \
+                    and (not filename_filter or glob_match(fpath, filename_filter, respect_case, complement)):
+                    to_exclude = False
+                    for e in excludes:
+                        if glob_match(fpath, e, respect_case):
+                            to_exclude = True
+                    if not to_exclude:
+                        filepaths.append(abspath)
+            elif os.path.isdir(abspath):
+                if recursive:
+                    if (include_hidden or not fpath.startswith('.')) \
+                        and (not dirname_filter or (glob_match(fpath, dirname_filter, respect_case, complement))):
+                        filepaths.extend(find_files(abspath,
+                                                     recursive=recursive,
+                                                     filename_filter=filename_filter,
+                                                     dirname_filter=dirname_filter,
+                                                     excludes=excludes,
+                                                     complement=complement,
+                                                     respect_case=respect_case,
+                                                     expand_vars=False))
+    filepaths.sort()
+    return filepaths
+
+
+# from http://snippets.dzone.com/posts/show/6313
+def find_executable(executable, path=None):
+    """Try to find 'executable' in the directories listed in 'path' (a
+    string listing directories separated by 'os.pathsep'; defaults to
+    os.environ['PATH']).  Returns the complete filename or None if not
+    found
+    """
+    if path is None:
+        path = os.environ['PATH']
+    paths = path.split(os.pathsep)
+    extlist = ['']
+    if os.name == 'os2':
+        (base, ext) = os.path.splitext(executable)
+        # executable files on OS/2 can have an arbitrary extension, but
+        # .exe is automatically appended if no dot is present in the name
+        if not ext:
+            executable = executable + ".exe"
+    elif sys.platform == 'win32':
+        pathext = os.environ['PATHEXT'].lower().split(os.pathsep)
+        (base, ext) = os.path.splitext(executable)
+        if ext.lower() not in pathext:
+            extlist = pathext
+    for ext in extlist:
+        execname = executable + ext
+        if os.path.isfile(execname):
+            return execname
+        else:
+            for p in paths:
+                f = os.path.join(p, execname)
+                if os.path.isfile(f):
+                    return f
+    else:
+        return None
+
+###############################################################################
+## Dealing with streams that may not have been opened with the universal
+## newline option
+
+def get_lines(stream):
+    """
+    Parse stream into lines, dealing with all line break conventions.
+    """
+    s = stream.read()
+    return re.split(r'\r\n|\n|\r', s)
+
+
diff --git a/dendropy/utility/libexec/__init__.py b/dendropy/utility/libexec/__init__.py
new file mode 100644
index 0000000..97ffbf3
--- /dev/null
+++ b/dendropy/utility/libexec/__init__.py
@@ -0,0 +1,37 @@
+
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Scripts and other files that executed, sourced, invoked, or otherwise used by
+various DendroPy entities.
+"""
+
+import os
+
+def filepath(filename):
+    try:
+        import pkg_resources
+        # note that this creates a temporary file with the contents of the
+        # filename if the package is in an egg
+        filepath = pkg_resources.resource_filename("dendropy", "utility/libexec/{}".format(filename))
+        # print("-->{}".format(filepath))
+    except:
+        filepath = os.path.normpath(os.path.join(os.path.dirname(__file__), filename))
+    return filepath
+
diff --git a/dendropy/utility/messaging.py b/dendropy/utility/messaging.py
new file mode 100644
index 0000000..f1026b3
--- /dev/null
+++ b/dendropy/utility/messaging.py
@@ -0,0 +1,177 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Messaging, logging and support.
+"""
+
+import sys
+import os
+import logging
+import textwrap
+from dendropy.utility import metavar
+
+###############################################################################
+## metavar.LOGGING
+
+def get_logging_level():
+    if metavar.LOGGING_LEVEL_ENVAR in os.environ:
+        if os.environ[metavar.LOGGING_LEVEL_ENVAR].upper() == "NOTSET":
+            level = logging.NOTSET
+        elif os.environ[metavar.LOGGING_LEVEL_ENVAR].upper() == "DEBUG":
+            level = logging.DEBUG
+        elif os.environ[metavar.LOGGING_LEVEL_ENVAR].upper() == "INFO":
+            level = logging.INFO
+        elif os.environ[metavar.LOGGING_LEVEL_ENVAR].upper() == "WARNING":
+            level = logging.WARNING
+        elif os.environ[metavar.LOGGING_LEVEL_ENVAR].upper() == "ERROR":
+            level = logging.ERROR
+        elif os.environ[metavar.LOGGING_LEVEL_ENVAR].upper() == "CRITICAL":
+            level = logging.CRITICAL
+        else:
+            level = logging.NOTSET
+    else:
+        level = logging.NOTSET
+    return level
+
+def get_logger(name="dendropy"):
+    """
+    Returns a logger with name set as given, and configured
+    to the level given by the environment variable metavar.LOGGING_LEVEL_ENVAR.
+    """
+
+#     package_dir = os.path.dirname(module_path)
+#     config_filepath = os.path.join(package_dir, metavar.LOGGING_CONFIG_FILE)
+#     if os.path.exists(config_filepath):
+#         try:
+#             logging.config.fileConfig(config_filepath)
+#             logger_set = True
+#         except:
+#             logger_set = False
+    logger = logging.getLogger(name)
+    if not hasattr(logger, 'is_configured'):
+        logger.is_configured = False
+    if not logger.is_configured:
+        level = get_logging_level()
+        rich_formatter = logging.Formatter("[%(asctime)s] %(filename)s (%(lineno)d): %(levelname) 8s: %(message)s")
+        simple_formatter = logging.Formatter("%(levelname) 8s: %(message)s")
+        raw_formatter = logging.Formatter("%(message)s")
+        default_formatter = None
+        logging_formatter = default_formatter
+        if metavar.LOGGING_FORMAT_ENVAR in os.environ:
+            if os.environ[metavar.LOGGING_FORMAT_ENVAR].upper() == "RICH":
+                logging_formatter = rich_formatter
+            elif os.environ[metavar.LOGGING_FORMAT_ENVAR].upper() == "SIMPLE":
+                logging_formatter = simple_formatter
+            elif os.environ[metavar.LOGGING_FORMAT_ENVAR].upper() == "NONE":
+                logging_formatter = None
+            else:
+                logging_formatter = default_formatter
+        else:
+            logging_formatter = default_formatter
+        if logging_formatter is not None:
+            logging_formatter.datefmt='%H:%M:%S'
+        logger.setLevel(level)
+        ch = logging.StreamHandler()
+        ch.setLevel(level)
+        ch.setFormatter(logging_formatter)
+        logger.addHandler(ch)
+        logger.is_configured = True
+    return logger
+
+class ConsoleMessenger(object):
+
+    ERROR_MESSAGING_LEVEL = 2000
+    WARNING_MESSAGING_LEVEL = 1000
+    INFO_MESSAGING_LEVEL = 100
+
+    def __init__(self,
+            name="DendroPy",
+            messaging_level=None,
+            dest=sys.stderr):
+        self.name = name
+        if messaging_level is None:
+            self.messaging_level = ConsoleMessenger.INFO_MESSAGING_LEVEL
+        else:
+            self.messaging_level = messaging_level
+        self.primary_out = dest
+        self.text_wrapper = textwrap.TextWrapper(width=78, initial_indent= "", subsequent_indent=" " * (len(self.name) + 2))
+        self.message_leader = {
+                ConsoleMessenger.ERROR_MESSAGING_LEVEL : self.error_leader,
+                ConsoleMessenger.WARNING_MESSAGING_LEVEL : self.warning_leader,
+                ConsoleMessenger.INFO_MESSAGING_LEVEL : self.info_leader
+                }
+        self.silent = False
+
+    def error_leader(self):
+        return self.name + ": [ERROR] "
+
+    def warning_leader(self):
+        return self.name + ": [WARNING] "
+
+    def info_leader(self):
+        return self.name + ": "
+
+    def format_message(self, msg, level, wrap=True, prefix=""):
+        if not wrap:
+            msg = self.message_leader[level]() + prefix + msg
+        else:
+            if prefix:
+                full_leader = self.message_leader[level]() + prefix
+                msg = textwrap.fill(
+                    msg,
+                    width=self.text_wrapper.width,
+                    initial_indent=full_leader,
+                    subsequent_indent=" " * len(full_leader))
+            else:
+                msg = self.text_wrapper.fill(self.message_leader[level]() + msg)
+        return msg
+
+    def log(self, msg, level=0, wrap=True, prefix="", newline=True):
+        if self.silent:
+            return
+        if level >= self.messaging_level:
+            msg = self.format_message(msg, level, wrap=wrap, prefix=prefix)
+            self.primary_out.write(msg)
+            if newline:
+                self.primary_out.write("\n")
+
+    def log_lines(self, msg, level=None, wrap=True, prefix=""):
+        for line in msg:
+            self.log(msg=line, level=level, wrap=wrap, prefix=prefix)
+
+    def error(self, msg, wrap=True, prefix=""):
+        self.log(msg, level=ConsoleMessenger.ERROR_MESSAGING_LEVEL, wrap=wrap, prefix=prefix)
+
+    def warning(self, msg, wrap=True, prefix=""):
+        self.log(msg, level=ConsoleMessenger.WARNING_MESSAGING_LEVEL, wrap=wrap, prefix=prefix)
+
+    def info(self, msg, wrap=True, prefix=""):
+        self.log(msg, level=ConsoleMessenger.INFO_MESSAGING_LEVEL, wrap=wrap, prefix=prefix)
+
+    def info_lines(self, msg, wrap=True, prefix=""):
+        for line in msg:
+            self.log(msg=line, level=ConsoleMessenger.INFO_MESSAGING_LEVEL, wrap=wrap, prefix=prefix)
+
+    def info_raw(self, msg):
+        if self.messaging_level <= ConsoleMessenger.INFO_MESSAGING_LEVEL:
+            self.primary_out.write(self.info_leader() + msg)
+
+
+
+
diff --git a/dendropy/utility/metavar.py b/dendropy/utility/metavar.py
new file mode 100644
index 0000000..41dffac
--- /dev/null
+++ b/dendropy/utility/metavar.py
@@ -0,0 +1,36 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Environmental variables controlling DendroPy behavior (mostly for
+development/testing usage).
+"""
+
+FAIL_INCOMPLETE_TESTS_ENVAR        = "DENDROPY_FAIL_INCOMPLETE_TESTS"
+LOGGING_LEVEL_ENVAR                = "DENDROPY_LOGGING_LEVEL"
+LOGGING_FORMAT_ENVAR               = "DENDROPY_LOGGING_FORMAT"
+DENDROPY_PAUP_PATH_ENVAR           = "DENDROPY_PAUP_EXECUTABLE_PATH"
+DENDROPY_RSCRIPT_PATH_ENVAR        = "DENDROPY_RSCRIPT_EXECUTABLE_PATH"
+
+# error: Turn the warning into an exception.
+# ignore: Discard the warning.
+# always: Always emit a warning.
+# default: Print the warning the first time it is generated from each location.
+# module: Print the warning the first time it is generated from each module.
+# once: Print the warning the first time it is generated.
+DEPRECATION_WARNING_FILTER         = "DENDROPY_DEPRECATION_WARNINGS"
diff --git a/dendropy/utility/processio.py b/dendropy/utility/processio.py
new file mode 100644
index 0000000..23bef31
--- /dev/null
+++ b/dendropy/utility/processio.py
@@ -0,0 +1,110 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Wraps external process as a processio, i.e., allow for non-blocking
+read/writes to stdout/stderr/stdin.
+"""
+
+from dendropy.utility import textprocessing
+import sys
+import subprocess
+import threading
+
+try:
+    from Queue import Queue, Empty
+except ImportError:
+    from queue import Queue, Empty  # python 3.x
+
+ON_POSIX = 'posix' in sys.builtin_module_names
+
+############################################################################
+## Handling of byte/string conversion during subprocess calls
+
+def communicate(p, commands=None):
+    if isinstance(commands, list) or isinstance(commands, tuple):
+        commands = "\n".join(str(c) for c in commands)
+    if commands is not None:
+        commands = str.encode(commands)
+    stdout, stderr = p.communicate(commands)
+    if stdout is not None:
+        stdout = textprocessing.bytes_to_text(stdout)
+    if stderr is not None:
+        stderr = textprocessing.bytes_to_text(stderr)
+    return stdout, stderr
+
+############################################################################
+## SessionReader
+
+class SessionReader(object):
+
+    def __init__(self, file_handle):
+        self.queue = Queue()
+        self.stream = file_handle
+        self.thread = threading.Thread(
+                target=self.enqueue_stream,
+                )
+        self.thread.daemon = True
+        self.thread.start()
+
+    def enqueue_stream(self):
+        # for line in self.stream.readline():
+        for line in iter(self.stream.readline, b''):
+            self.queue.put(line)
+        self.stream.close()
+
+    def read(self):
+        # read line without blocking
+        try:
+            line = self.queue.get_nowait()
+            # line = self.queue.get(timeout=0.1)
+        except Empty:
+            return None
+        else:
+            return line # got line
+
+class Session(object):
+
+    def __init__(self, join_err_to_out=False):
+        self.process = None
+        self.stdin = None
+        self._stdout_reader = None
+        self._stderr_reader = None
+        self.queue = None
+        self.thread = None
+        self.join_err_to_out = join_err_to_out
+
+    def start(self, command):
+        if self.join_err_to_out:
+            stderr = subprocess.STDOUT
+        else:
+            stderr = subprocess.PIPE
+        self.process = subprocess.Popen(command,
+                stdin=subprocess.PIPE,
+                stdout=subprocess.PIPE,
+                stderr=stderr,
+                bufsize=1,
+                close_fds=ON_POSIX)
+        self._stdout_reader = SessionReader(self.process.stdout)
+        if not self.join_err_to_out:
+            self._stderr_reader = SessionReader(self.process.stderr)
+
+    def _stdin_write(self, command):
+        self.process.stdin.write(command)
+        self.process.stdin.flush()
+
diff --git a/dendropy/utility/terminal.py b/dendropy/utility/terminal.py
new file mode 100644
index 0000000..59bd5b7
--- /dev/null
+++ b/dendropy/utility/terminal.py
@@ -0,0 +1,59 @@
+import sys
+
+def ttysize():
+    try:
+        fp = os.popen('stty -a', 'r')
+        ln1 = fp.readline()
+        fp.close()
+        if not ln1:
+            raise ValueError('tty size not supported for input')
+        vals = {'rows':None, 'columns':None}
+        for ph in string.split(ln1, ';'):
+            x = string.split(ph)
+            if len(x) == 2:
+                vals[x[0]] = x[1]
+                vals[x[1]] = x[0]
+        return vals['rows'], vals['columns']
+    except:
+        return 40, 80
+
+def posix_terminal_width():
+    """Return estimated terminal width."""
+    width = 0
+    try:
+        import struct, fcntl, termios
+        s = struct.pack('HHHH', 0, 0, 0, 0)
+        x = fcntl.ioctl(1, termios.TIOCGWINSZ, s)
+        width = struct.unpack('HHHH', x)[1]
+    except IOError:
+        pass
+    if width <= 0:
+        try:
+            width = int(os.environ['COLUMNS'])
+        except:
+            pass
+    if width <= 0:
+        width = 80
+    return width
+
+def win_terminal_width():
+    from ctypes import windll, create_string_buffer
+    # stdin handle is -10, stdout handle is -11, stderr handle is -12
+    h = windll.kernel32.GetStdHandle(-12)
+    csbi = create_string_buffer(22)
+    res = windll.kernel32.GetConsoleScreenBufferInfo(h, csbi)
+    if res:
+        import struct
+        (bufx, bufy, curx, cury, wattr,
+         left, top, right, bottom, maxx, maxy) = struct.unpack("hhhhHhhhhhh", csbi.raw)
+        sizex = right - left + 1
+        sizey = bottom - top + 1
+    else:
+        sizex, sizey = 80, 25 # can't determine actual size - return default values
+    return sizex-1
+
+def terminal_width():
+    if sys.platform.startswith('win'):
+        return win_terminal_width()
+    else:
+        return posix_terminal_width()
diff --git a/dendropy/utility/textprocessing.py b/dendropy/utility/textprocessing.py
new file mode 100644
index 0000000..1227e52
--- /dev/null
+++ b/dendropy/utility/textprocessing.py
@@ -0,0 +1,182 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Various text-manipulating and formatting utilities.
+"""
+
+import re
+import sys
+import time
+import itertools
+import locale
+import codecs
+
+###############################################################################
+## Unicode/String Conversions
+
+ENCODING = locale.getdefaultlocale()[1]
+
+def bytes_to_text(s):
+    """
+    Converts a byte string (as read from, e.g., standard input)
+    to a text string.
+
+    In Python 3, this is from type ``bytes`` to ``str``.
+    In Python 2, this is, confusingly, from type ``str`` to ``unicode``.
+
+    """
+    s = codecs.decode(s, ENCODING)
+    if sys.hexversion < 0x03000000:
+        s = codecs.encode(s, "utf-8")
+    return s
+
+def parse_curie_standard_qualified_name(prefixed_name, sep=":"):
+    if sep not in prefixed_name:
+        raise ValueError("'{}' is not a valid CURIE-standard qualified name".format(prefixed_name))
+    return prefixed_name.split(":", 1)
+
+###############################################################################
+##
+
+def camel_case(s):
+    components = s.split('_')
+    return components[0] + "".join(x.title() for x in components[1:])
+
+def snake_case(name):
+    s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', name)
+    return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower()
+
+###############################################################################
+##
+
+def unique_taxon_label_map(taxa, taxon_label_map=None, max_label_len=0, logger=None):
+    """
+    Given a list of taxa, returns a dictionary with the Taxon objects as
+    keys and string labels as values, where the labels are guaranteed to
+    be unique. If ``taxon_label_map`` is pre-populated (as <Taxon> : 'label'),
+    then those labels will be used as the basis for the label composition,
+    otherwise the original taxon object label will be used. ``max_label_len``
+    can be used to restrict the maximum length of the labels.
+    """
+    if taxon_label_map is None:
+        taxon_label_map = {}
+        for t in taxa:
+            taxon_label_map[t] = t.label
+    labels = []
+    for t in taxon_label_map:
+        label = taxon_label_map[t]
+        idx = 1
+        if label in labels:
+            candidate_label = label
+            while candidate_label in labels:
+                idx += 1
+                if max_label_len > 0:
+                    k = max_label_len - len(str(idx))
+                    if k < 1:
+                        raise ValueError("Unable to make labels unique with maximum label length of %d" % max_label_len)
+                    candidate_label = label[:k] + str(idx)
+                else:
+                    candidate_label = label + str(idx)
+            label = candidate_label
+        labels.append(label)
+        taxon_label_map[t] = label
+    return taxon_label_map
+
+###############################################################################
+##
+
+def format_dict_table(rows, column_names=None, max_column_width=None, border_style=2):
+    """
+    Returns a string representation of a tuple of dictionaries in a
+    table format. This method can read the column names directly off the
+    dictionary keys, but if a tuple of these keys is provided in the
+    'column_names' variable, then the order of column_names will follow
+    the order of the fields/keys in that variable.
+    """
+    if column_names or len(rows) > 0:
+        lengths = {}
+        rules = {}
+        if column_names:
+            column_list = column_names
+        else:
+            try:
+                column_list = rows[0].keys()
+            except:
+                column_list = None
+        if column_list:
+            # characters that make up the table rules
+            border_style = int(border_style)
+            #border_style = 0
+            if border_style == 0:
+                vertical_rule = '  '
+                horizontal_rule = ''
+                rule_junction = ''
+            elif border_style == 1:
+                vertical_rule = ' '
+                horizontal_rule = '-'
+                rule_junction = '-'
+            else:
+                vertical_rule = ' | '
+                horizontal_rule = '-'
+                rule_junction = '-+-'
+            if border_style >= 3:
+                left_table_edge_rule = '| '
+                right_table_edge_rule = ' |'
+                left_table_edge_rule_junction = '+-'
+                right_table_edge_rule_junction = '-+'
+            else:
+                left_table_edge_rule = ''
+                right_table_edge_rule = ''
+                left_table_edge_rule_junction = ''
+                right_table_edge_rule_junction = ''
+
+            if max_column_width:
+                column_list = [c[:max_column_width] for c in column_list]
+                trunc_rows = []
+                for row in rows:
+                    new_row = {}
+                    for k in row.keys():
+                        new_row[k[:max_column_width]] = str(row[k])[:max_column_width]
+                    trunc_rows.append(new_row)
+                rows = trunc_rows
+
+            for col in column_list:
+                rls = [len(str(row[col])) for row in rows]
+                lengths[col] = max(rls+[len(col)])
+                rules[col] = horizontal_rule*lengths[col]
+
+            template_elements = ["%%(%s)-%ss" % (col, lengths[col]) for col in column_list]
+            row_template = vertical_rule.join(template_elements)
+            border_template = rule_junction.join(template_elements)
+            full_line = left_table_edge_rule_junction + (border_template % rules) + right_table_edge_rule_junction
+            display = []
+            if border_style > 0:
+                display.append(full_line)
+            display.append(left_table_edge_rule + (row_template % dict(zip(column_list, column_list))) + right_table_edge_rule)
+            if border_style > 0:
+                display.append(full_line)
+            for row in rows:
+                display.append(left_table_edge_rule + (row_template % row) + right_table_edge_rule)
+            if border_style > 0:
+                display.append(full_line)
+            return "\n".join(display)
+        else:
+            return ''
+    else:
+        return ''
diff --git a/dendropy/utility/timeprocessing.py b/dendropy/utility/timeprocessing.py
new file mode 100644
index 0000000..fd27521
--- /dev/null
+++ b/dendropy/utility/timeprocessing.py
@@ -0,0 +1,70 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+import time
+
+def pretty_timestamp(t=None, style=0):
+    if t is None:
+        t = time.localtime()
+    if style == 0:
+        return time.strftime("%Y-%m-%d", t)
+    else:
+        return time.strftime("%Y%m%d%H%M%S", t)
+
+def pretty_elapsed_datetime(t, fill=False):
+    parts = []
+    _render = lambda f, value: "{} {}{}".format(value, f, "" if value == 1 else "s")
+    if t.day or fill:
+        parts.append(_render("day", t.day))
+    if t.hour or fill:
+        parts.append(_render("hour", t.hour))
+    if t.minute or fill:
+        parts.append(_render("minute", t.minute))
+    secs = t.second + float(t.microsecond)/1000000
+    if secs or fill:
+        s = _render("second", secs)
+        if parts:
+            parts.append("and {}".format(s))
+        else:
+            parts.append(s)
+    return ", ".join(parts)
+
+def parse_timedelta(td):
+    hours = (td.days * 24) + td.seconds // 3600
+    minutes = (td.seconds % 3600) // 60
+    # seconds = ((td.seconds % 3600) % 60) + float(td.microseconds)/1000000
+    seconds = ((td.seconds % 3600) % 60) + float(td.microseconds)/1000000
+    return hours, minutes, seconds
+
+def pretty_timedelta(td, fill=False):
+    hours, minutes, seconds = parse_timedelta(td)
+    parts = []
+    _render = lambda f, value: "{} {}{}".format(value, f, "" if value == 1 else "s")
+    if hours or fill:
+        parts.append(_render("hour", hours))
+    if minutes or fill:
+        parts.append(_render("minute", minutes))
+    if seconds or fill:
+        s = _render("second", seconds)
+        if parts:
+            parts.append("and {}".format(s))
+        else:
+            parts.append(s)
+    if not parts:
+        return "0 seconds"
+    return ", ".join(parts)
diff --git a/dendropy/utility/urlio.py b/dendropy/utility/urlio.py
new file mode 100644
index 0000000..5d97aa5
--- /dev/null
+++ b/dendropy/utility/urlio.py
@@ -0,0 +1,47 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Communications using web/internet protocols.
+"""
+
+from dendropy.utility import textprocessing
+
+import sys
+if sys.hexversion < 0x03000000:
+    from urllib2 import Request
+    from urllib2 import urlopen
+    from urllib import urlencode
+    from urllib2 import HTTPError
+else:
+    from urllib.request import Request
+    from urllib.request import urlopen
+    from urllib.parse import urlencode
+    from urllib.error import HTTPError
+import re
+
+def read_url(url, strip_markup=False):
+    """
+    Return contents of url as string.
+    """
+    s = urlopen(url)
+    text = textprocessing.bytes_to_text(s.read())
+    if strip_markup:
+        return re.sub(r'<[^>]*?>', '', text)
+    else:
+        return text
diff --git a/dendropy/utility/vcsinfo.py b/dendropy/utility/vcsinfo.py
new file mode 100644
index 0000000..9335127
--- /dev/null
+++ b/dendropy/utility/vcsinfo.py
@@ -0,0 +1,246 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Wraps up source version control system system information.
+"""
+
+import os
+import sys
+try:
+    from StringIO import StringIO # Python 2 legacy support: StringIO in this module is the one needed (not io)
+except ImportError:
+    from io import StringIO # Python 3
+import subprocess
+import datetime
+from dendropy.utility import processio
+
+class Revision(object):
+    """
+    Provides (Git) version control information
+    about a project.
+    """
+
+    class VcsUnavailable(Exception):
+        def __init__(self, *args, **kwargs):
+            Exception.__init__(self, *args, **kwargs)
+
+    class NonRepositoryError(Exception):
+        def __init__(self, *args, **kwargs):
+            Exception.__init__(self, *args, **kwargs)
+
+    class NonBranchException(Exception):
+        def __init__(self, *args, **kwargs):
+            Exception.__init__(self, *args, **kwargs)
+
+    class UntaggedException(Exception):
+        def __init__(self, *args, **kwargs):
+            Exception.__init__(self, *args, **kwargs)
+
+    def __init__(self, repo_path, release=None, vcs_app_path='git'):
+        self.vcs_app_path = vcs_app_path
+        self.repo_path = repo_path
+        self.release = release
+        self._commit_id = None
+        self._commit_date = None
+        self._commit_tag = None
+        self._branch_name = None
+        self._description = None
+        self._long_description = None
+        self._is_available = None
+
+    def __str__(self):
+        parts = []
+        if self.branch_name:
+            parts.append("%s-" % self.branch_name)
+        if self.commit_id:
+            parts.append(self.commit_id[:10])
+        if self.commit_date:
+            parts.append(", %s" % str(self.commit_date))
+        if parts:
+            return "".join(parts)
+        else:
+            return ""
+
+    def __repr__(self):
+        return "<%s: '%s'>" % (self.__class__.__name__, self.__str__())
+
+    @property
+    def commit_id(self):
+        if self._commit_id is None:
+            self.update()
+        return self._commit_id
+
+    @property
+    def commit_date(self):
+        if self._commit_date is None:
+            self.update()
+        return self._commit_date
+
+    @property
+    def commit_tag(self):
+        if self._commit_tag is None:
+            self.update()
+        return self._commit_tag
+
+    @property
+    def branch_name(self):
+        if self._branch_name is None:
+            self.update()
+        return self._branch_name
+
+    @property
+    def description(self):
+        if self._description is None:
+            self.update()
+        return self._description
+
+    @property
+    def long_description(self):
+        if self._long_description is None:
+            self.update()
+        return self._long_description
+
+    @property
+    def is_available(self):
+        if self._is_available is None:
+            self.update()
+        return self._is_available
+
+    def update(self, repo_path=None):
+        if repo_path is not None:
+            self.repo_path = repo_path
+        if not self.repo_path or not self._vcs_available():
+            self._commit_id = None
+            self._commit_date = None
+            self._commit_tag = None
+            self._branch_name = None
+            self._description = None
+            self._long_description = None
+            self._is_available = False
+            return
+        self._commit_id = self.get_commit_id()
+        self._commit_date = self.get_datetime()
+        self._commit_tag = self.get_commit_tag()
+        self._branch_name = self.get_branch_name()
+        self._description = self.get_description()
+        self._long_description = self._build_long_description()
+        self._is_available = True
+
+    def _run_vcs(self, cmd):
+        if isinstance(cmd, str):
+            cmd = self.vcs_app_path + " " + cmd
+        else:
+            cmd.insert(0, self.vcs_app_path)
+        try:
+            p = subprocess.Popen(cmd,
+                shell=True,
+                cwd=os.path.abspath(self.repo_path),
+                stdin=subprocess.PIPE,
+                stdout=subprocess.PIPE,
+                stderr=subprocess.PIPE)
+            stdout, stderr = processio.communicate(p)
+            retcode = p.returncode
+        except OSError as e:
+            return -999, "", str(e)
+        return retcode, stdout, stderr
+
+    def _vcs_available(self):
+        retcode, stdout, stderr = self._run_vcs("--version")
+        if retcode != 0:
+            return False
+        retcode, stdout, stderr = self._run_vcs("status")
+        if "fatal: Not a git repository" in stderr:
+            return False
+        return True
+
+    def get_commit_id(self):
+        # cmd = "show --quiet --pretty=format:'%H' HEAD"
+        cmd = "rev-parse --short HEAD"
+        retcode, stdout, stderr = self._run_vcs(cmd)
+        return stdout.replace('\n', '')
+
+    def get_datetime(self):
+        cmd = "show --quiet --pretty=format:'%at' HEAD"
+        retcode, stdout, stderr = self._run_vcs(cmd)
+        if stdout:
+            try:
+                return datetime.datetime.fromtimestamp(float(stdout.replace('\n', '').replace("'", "").replace('"','')))
+            except ValueError:
+                return None
+        else:
+            return None
+
+    def get_commit_tag(self):
+        cmd = "name-rev --name-only --tags --no-undefined HEAD"
+        retcode, stdout, stderr = self._run_vcs(cmd)
+        if "fatal: cannot describe" in stderr:
+            return None
+        else:
+            return stdout.strip('\n')
+
+    def get_branch_name(self):
+        # git name-rev --name-only HEAD
+        cmd = "symbolic-ref HEAD"
+        retcode, stdout, stderr = self._run_vcs(cmd)
+        if retcode != 0:
+            if "fatal: ref HEAD is not a symbolic ref" in stderr:
+                return "(no branch)"
+            else:
+                return None
+        else:
+            return stdout.replace('\n', '').split('/')[-1]
+
+    def get_description(self):
+        cmd = "describe --tags --long --always --abbrev=12"
+        retcode, stdout, stderr = self._run_vcs(cmd)
+        if retcode != 0:
+            if "fatal: No names found, cannot describe anything." in stderr:
+                return "(unnamed)"
+            else:
+                return None
+        else:
+            return stdout.replace('\n', '')
+
+    def get_long_description(self):
+        parts = []
+        if self.commit_id:
+            parts.append(self.commit_id)
+        if self.branch_name:
+            parts.append("on branch '%s'" % self.branch_name)
+        if self.commit_date:
+            parts.append("committed on %s" % str(self.commit_date))
+        if parts:
+            return ", ".join(parts)
+        else:
+            return None
+
+    def _build_long_description(self):
+        parts = []
+        if self._commit_id:
+            parts.append(self._commit_id)
+        if self._branch_name:
+            parts.append("on branch '%s'" % self._branch_name)
+        if self._commit_date:
+            parts.append("committed on %s" % str(self._commit_date))
+        if parts:
+            return ", ".join(parts)
+        else:
+            return None
+
+
diff --git a/doc/Makefile b/doc/Makefile
new file mode 100644
index 0000000..af7bfab
--- /dev/null
+++ b/doc/Makefile
@@ -0,0 +1,177 @@
+# Makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line.
+SPHINXOPTS    =
+SPHINXBUILD   = sphinx-build
+PAPER         =
+BUILDDIR      = build
+
+# User-friendly check for sphinx-build
+ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
+$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
+endif
+
+# Internal variables.
+PAPEROPT_a4     = -D latex_paper_size=a4
+PAPEROPT_letter = -D latex_paper_size=letter
+ALLSPHINXOPTS   = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source
+# the i18n builder cannot share the environment and doctrees with the others
+I18NSPHINXOPTS  = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source
+
+.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
+
+help:
+	@echo "Please use \`make <target>' where <target> is one of"
+	@echo "  html       to make standalone HTML files"
+	@echo "  dirhtml    to make HTML files named index.html in directories"
+	@echo "  singlehtml to make a single large HTML file"
+	@echo "  pickle     to make pickle files"
+	@echo "  json       to make JSON files"
+	@echo "  htmlhelp   to make HTML files and a HTML help project"
+	@echo "  qthelp     to make HTML files and a qthelp project"
+	@echo "  devhelp    to make HTML files and a Devhelp project"
+	@echo "  epub       to make an epub"
+	@echo "  latex      to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
+	@echo "  latexpdf   to make LaTeX files and run them through pdflatex"
+	@echo "  latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
+	@echo "  text       to make text files"
+	@echo "  man        to make manual pages"
+	@echo "  texinfo    to make Texinfo files"
+	@echo "  info       to make Texinfo files and run them through makeinfo"
+	@echo "  gettext    to make PO message catalogs"
+	@echo "  changes    to make an overview of all changed/added/deprecated items"
+	@echo "  xml        to make Docutils-native XML files"
+	@echo "  pseudoxml  to make pseudoxml-XML files for display purposes"
+	@echo "  linkcheck  to check all external links for integrity"
+	@echo "  doctest    to run all doctests embedded in the documentation (if enabled)"
+
+clean:
+	rm -rf $(BUILDDIR)/*
+
+html:
+	$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
+	@echo
+	@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
+
+dirhtml:
+	$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
+	@echo
+	@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
+
+singlehtml:
+	$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
+	@echo
+	@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
+
+pickle:
+	$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
+	@echo
+	@echo "Build finished; now you can process the pickle files."
+
+json:
+	$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
+	@echo
+	@echo "Build finished; now you can process the JSON files."
+
+htmlhelp:
+	$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
+	@echo
+	@echo "Build finished; now you can run HTML Help Workshop with the" \
+	      ".hhp project file in $(BUILDDIR)/htmlhelp."
+
+qthelp:
+	$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
+	@echo
+	@echo "Build finished; now you can run "qcollectiongenerator" with the" \
+	      ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
+	@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/DendroPy.qhcp"
+	@echo "To view the help file:"
+	@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/DendroPy.qhc"
+
+devhelp:
+	$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
+	@echo
+	@echo "Build finished."
+	@echo "To view the help file:"
+	@echo "# mkdir -p $$HOME/.local/share/devhelp/DendroPy"
+	@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/DendroPy"
+	@echo "# devhelp"
+
+epub:
+	$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
+	@echo
+	@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
+
+latex:
+	$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+	@echo
+	@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
+	@echo "Run \`make' in that directory to run these through (pdf)latex" \
+	      "(use \`make latexpdf' here to do that automatically)."
+
+latexpdf:
+	$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+	@echo "Running LaTeX files through pdflatex..."
+	$(MAKE) -C $(BUILDDIR)/latex all-pdf
+	@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
+
+latexpdfja:
+	$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+	@echo "Running LaTeX files through platex and dvipdfmx..."
+	$(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
+	@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
+
+text:
+	$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
+	@echo
+	@echo "Build finished. The text files are in $(BUILDDIR)/text."
+
+man:
+	$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
+	@echo
+	@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
+
+texinfo:
+	$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+	@echo
+	@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
+	@echo "Run \`make' in that directory to run these through makeinfo" \
+	      "(use \`make info' here to do that automatically)."
+
+info:
+	$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+	@echo "Running Texinfo files through makeinfo..."
+	make -C $(BUILDDIR)/texinfo info
+	@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
+
+gettext:
+	$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
+	@echo
+	@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
+
+changes:
+	$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
+	@echo
+	@echo "The overview file is in $(BUILDDIR)/changes."
+
+linkcheck:
+	$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
+	@echo
+	@echo "Link check complete; look for any errors in the above output " \
+	      "or in $(BUILDDIR)/linkcheck/output.txt."
+
+doctest:
+	$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
+	@echo "Testing of doctests in the sources finished, look at the " \
+	      "results in $(BUILDDIR)/doctest/output.txt."
+
+xml:
+	$(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
+	@echo
+	@echo "Build finished. The XML files are in $(BUILDDIR)/xml."
+
+pseudoxml:
+	$(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
+	@echo
+	@echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
diff --git a/doc/source/_static/Octocat.png b/doc/source/_static/Octocat.png
new file mode 100644
index 0000000..91057da
Binary files /dev/null and b/doc/source/_static/Octocat.png differ
diff --git a/doc/source/_static/dendropy3.png b/doc/source/_static/dendropy3.png
new file mode 100644
index 0000000..51b8c96
Binary files /dev/null and b/doc/source/_static/dendropy3.png differ
diff --git a/doc/source/_static/dendropy_icon.png b/doc/source/_static/dendropy_icon.png
new file mode 100644
index 0000000..a7a7759
Binary files /dev/null and b/doc/source/_static/dendropy_icon.png differ
diff --git a/doc/source/_static/dendropy_logo.png b/doc/source/_static/dendropy_logo.png
new file mode 100644
index 0000000..b16a1de
Binary files /dev/null and b/doc/source/_static/dendropy_logo.png differ
diff --git a/doc/source/_static/google-groups-logo1.png b/doc/source/_static/google-groups-logo1.png
new file mode 100644
index 0000000..37a0fb9
Binary files /dev/null and b/doc/source/_static/google-groups-logo1.png differ
diff --git a/doc/source/_static/logo_cipres.gif b/doc/source/_static/logo_cipres.gif
new file mode 100644
index 0000000..a136d1c
Binary files /dev/null and b/doc/source/_static/logo_cipres.gif differ
diff --git a/doc/source/_static/nsf.gif b/doc/source/_static/nsf.gif
new file mode 100644
index 0000000..735163e
Binary files /dev/null and b/doc/source/_static/nsf.gif differ
diff --git a/doc/source/_themes/rtd-fabric/static/rtd.css b/doc/source/_themes/rtd-fabric/static/rtd.css
new file mode 100644
index 0000000..1fbc48e
--- /dev/null
+++ b/doc/source/_themes/rtd-fabric/static/rtd.css
@@ -0,0 +1,773 @@
+/*
+ * rtd.css
+ * ~~~~~~~~~~~~~~~
+ *
+ * Sphinx stylesheet -- sphinxdoc theme.  Originally created by
+ * Armin Ronacher for Werkzeug.
+ *
+ * Customized for ReadTheDocs by Eric Pierce & Eric Holscher
+ *
+ * Customized for DendroPy by Jeet Sukumaran
+ *
+ * :copyright: Copyright 2007-2010 by the Sphinx team, see AUTHORS.
+ * :license: BSD, see LICENSE for details.
+ *
+ */
+
+/* RTD colors
+ * light blue: #e8ecef
+ * medium blue: #8ca1af
+ * dark blue: #465158
+ * dark grey: #444444
+ *
+ * white hover: #d1d9df;
+ * medium blue hover: #697983;
+ * green highlight: #8ecc4c
+ * light blue (project bar): #e8ecef
+ */
+
+ at import url("basic.css");
+
+/* PAGE LAYOUT -------------------------------------------------------------- */
+
+body {
+    font: 100%/1.5 "ff-meta-web-pro-1","ff-meta-web-pro-2",Arial,"Helvetica Neue",sans-serif;
+    text-align: center;
+    color: black;
+    background-color: #465158;
+    padding: 0;
+    margin: 0;
+}
+
+div.document {
+    text-align: left;
+    background-color: #e8ecef;
+}
+
+div.bodywrapper {
+    background-color: #ffffff;
+    border-left: 1px solid #ccc;
+    border-bottom: 1px solid #ccc;
+    margin: 0 0 0 16em;
+}
+
+div.body {
+    margin: 0;
+    padding: 0.5em 1.3em;
+    max-width: 55em;
+    min-width: 20em;
+}
+
+div.related {
+    font-size: 1em;
+    background-color: #465158;
+}
+
+div.documentwrapper {
+    float: left;
+    width: 100%;
+    background-color: #e8ecef;
+}
+
+
+/* HEADINGS --------------------------------------------------------------- */
+
+h1 {
+    margin: 0;
+    padding: 0.7em 0 0.3em 0;
+    font-size: 1.5em;
+    line-height: 1.15;
+    color: #111;
+    clear: both;
+}
+
+h2 {
+    margin: 2em 0 0.2em 0;
+    font-size: 1.35em;
+    padding: 0;
+    color: #465158;
+}
+
+h3 {
+    margin: 1em 0 -0.3em 0;
+    font-size: 1.2em;
+    color: #6c818f;
+}
+
+div.body h1 a, div.body h2 a, div.body h3 a, div.body h4 a, div.body h5 a, div.body h6 a {
+    color: black;
+}
+
+h1 a.anchor, h2 a.anchor, h3 a.anchor, h4 a.anchor, h5 a.anchor, h6 a.anchor {
+    display: none;
+    margin: 0 0 0 0.3em;
+    padding: 0 0.2em 0 0.2em;
+    color: #aaa !important;
+}
+
+h1:hover a.anchor, h2:hover a.anchor, h3:hover a.anchor, h4:hover a.anchor,
+h5:hover a.anchor, h6:hover a.anchor {
+    display: inline;
+}
+
+h1 a.anchor:hover, h2 a.anchor:hover, h3 a.anchor:hover, h4 a.anchor:hover,
+h5 a.anchor:hover, h6 a.anchor:hover {
+    color: #777;
+    background-color: #eee;
+}
+
+
+/* LINKS ------------------------------------------------------------------ */
+
+/* Normal links get a pseudo-underline */
+a {
+    color: #444;
+    text-decoration: none;
+    border-bottom: 1px solid #ccc;
+}
+
+/* Links in sidebar, TOC, index trees and tables have no underline */
+.sphinxsidebar a,
+.toctree-wrapper a,
+.indextable a,
+#indices-and-tables a {
+    color: #444;
+    text-decoration: none;
+    border-bottom: none;
+}
+
+/* Most links get an underline-effect when hovered */
+a:hover,
+div.toctree-wrapper a:hover,
+.indextable a:hover,
+#indices-and-tables a:hover {
+    color: #111;
+    text-decoration: none;
+    border-bottom: 1px solid #111;
+}
+
+/* Footer links */
+div.footer a {
+    color: #86989B;
+    text-decoration: none;
+    border: none;
+}
+div.footer a:hover {
+    color: #a6b8bb;
+    text-decoration: underline;
+    border: none;
+}
+
+/* Permalink anchor (subtle grey with a red hover) */
+div.body a.headerlink {
+    color: #ccc;
+    font-size: 1em;
+    margin-left: 6px;
+    padding: 0 4px 0 4px;
+    text-decoration: none;
+    border: none;
+}
+div.body a.headerlink:hover {
+    color: #c60f0f;
+    border: none;
+}
+
+
+/* NAVIGATION BAR --------------------------------------------------------- */
+
+div.related ul {
+    height: 2.5em;
+}
+
+div.related ul li {
+    margin: 0;
+    padding: 0.65em 0;
+    float: left;
+    display: block;
+    color: white; /* For the >> separators */
+    font-size: 0.8em;
+}
+
+div.related ul li.right {
+    float: right;
+    margin-right: 5px;
+    color: transparent; /* Hide the | separators */
+}
+
+/* "Breadcrumb" links in nav bar */
+div.related ul li a {
+    order: none;
+    background-color: inherit;
+    font-weight: bold;
+    margin: 6px 0 6px 4px;
+    line-height: 1.75em;
+    color: #ffffff;
+    padding: 0.4em 0.8em;
+    border: none;
+    border-radius: 3px;
+}
+/* previous / next / modules / index links look more like buttons */
+div.related ul li.right a {
+    margin: 0.375em 0;
+    background-color: #697983;
+    text-shadow: 0 1px rgba(0, 0, 0, 0.5);
+    border-radius: 3px;
+    -webkit-border-radius: 3px;
+    -moz-border-radius: 3px;
+}
+/* All navbar links light up as buttons when hovered */
+div.related ul li a:hover {
+    background-color: #8ca1af;
+    color: #ffffff;
+    text-decoration: none;
+    border-radius: 3px;
+    -webkit-border-radius: 3px;
+    -moz-border-radius: 3px;
+}
+/* Take extra precautions for tt within links */
+a tt,
+div.related ul li a tt {
+    background: inherit !important;
+    color: inherit !important;
+}
+
+
+/* SIDEBAR ---------------------------------------------------------------- */
+
+div.sphinxsidebarwrapper {
+    padding: 0;
+}
+
+img.logo {
+    padding-top: 15px;
+    padding-top: 15px;
+}
+
+div.sphinxsidebar {
+    margin: 0;
+    margin-left: -100%;
+    float: left;
+    top: 3em;
+    left: 0;
+    padding: 0 1em;
+    width: 14em;
+    font-size: 1em;
+    text-align: left;
+    background-color: #e8ecef;
+}
+
+div.sphinxsidebar img {
+    max-width: 12em;
+}
+
+div.sphinxsidebar h3, div.sphinxsidebar h4 {
+    margin: 1.2em 0 0.3em 0;
+    font-size: 1em;
+    padding: 0;
+    color: #222222;
+    font-family: "ff-meta-web-pro-1", "ff-meta-web-pro-2", "Arial", "Helvetica Neue", sans-serif;
+}
+
+div.sphinxsidebar h3 a {
+    color: #444444;
+}
+
+div.sphinxsidebar ul,
+div.sphinxsidebar p {
+    margin-top: 0;
+    padding-left: 0;
+    line-height: 130%;
+    background-color: #e8ecef;
+}
+
+/* No bullets for nested lists, but a little extra indentation */
+div.sphinxsidebar ul ul {
+    list-style-type: none;
+    margin-left: 1.5em;
+    padding: 0;
+}
+
+/* A little top/bottom padding to prevent adjacent links' borders
+ * from overlapping each other */
+div.sphinxsidebar ul li {
+    padding: 1px 0;
+}
+
+/* A little left-padding to make these align with the ULs */
+div.sphinxsidebar p.topless {
+    padding-left: 0 0 0 1em;
+}
+
+/* Make these into hidden one-liners */
+div.sphinxsidebar ul li,
+div.sphinxsidebar p.topless {
+    white-space: nowrap;
+    overflow: hidden;
+}
+/* ...which become visible when hovered */
+div.sphinxsidebar ul li:hover,
+div.sphinxsidebar p.topless:hover {
+    overflow: visible;
+}
+
+/* Search text box and "Go" button */
+#searchbox {
+    margin-top: 2em;
+    margin-bottom: 1em;
+    background: #ddd;
+    padding: 0.5em;
+    border-radius: 6px;
+    -moz-border-radius: 6px;
+    -webkit-border-radius: 6px;
+}
+#searchbox h3 {
+    margin-top: 0;
+}
+
+/* Make search box and button abut and have a border */
+input,
+div.sphinxsidebar input {
+    border: 1px solid #999;
+    float: left;
+}
+
+/* Search textbox */
+input[type="text"] {
+    margin: 0;
+    padding: 0 3px;
+    height: 20px;
+    width: 144px;
+    border-top-left-radius: 3px;
+    border-bottom-left-radius: 3px;
+    -moz-border-radius-topleft: 3px;
+    -moz-border-radius-bottomleft: 3px;
+    -webkit-border-top-left-radius: 3px;
+    -webkit-border-bottom-left-radius: 3px;
+}
+/* Search button */
+input[type="submit"] {
+    margin: 0 0 0 -1px; /* -1px prevents a double-border with textbox */
+    height: 22px;
+    color: #444;
+    background-color: #e8ecef;
+    padding: 1px 4px;
+    font-weight: bold;
+    border-top-right-radius: 3px;
+    border-bottom-right-radius: 3px;
+    -moz-border-radius-topright: 3px;
+    -moz-border-radius-bottomright: 3px;
+    -webkit-border-top-right-radius: 3px;
+    -webkit-border-bottom-right-radius: 3px;
+}
+input[type="submit"]:hover {
+    color: #ffffff;
+    background-color: #8ecc4c;
+}
+
+div.sphinxsidebar p.searchtip {
+    clear: both;
+    padding: 0.5em 0 0 0;
+    background: #ddd;
+    color: #666;
+    font-size: 0.9em;
+}
+
+/* Sidebar links are unusual */
+div.sphinxsidebar li a,
+div.sphinxsidebar p a {
+    background: #e8ecef; /* In case links overlap main content */
+    border-radius: 3px;
+    -moz-border-radius: 3px;
+    -webkit-border-radius: 3px;
+    border: 1px solid transparent; /* To prevent things jumping around on hover */
+    padding: 0 5px 0 5px;
+}
+div.sphinxsidebar li a:hover,
+div.sphinxsidebar p a:hover {
+    color: #111;
+    text-decoration: none;
+    border: 1px solid #888;
+}
+
+/* Tweak any link appearing in a heading */
+div.sphinxsidebar h3 a {
+}
+
+
+
+
+/* OTHER STUFF ------------------------------------------------------------ */
+
+cite, code, tt {
+    font-family: 'Consolas', 'Deja Vu Sans Mono',
+                 'Bitstream Vera Sans Mono', monospace;
+    font-size: 0.95em;
+    letter-spacing: 0.01em;
+}
+
+tt {
+    background-color: #f2f2f2;
+    color: #444;
+}
+
+tt.descname, tt.descclassname, tt.xref {
+    border: 0;
+}
+
+hr {
+    border: 1px solid #abc;
+    margin: 2em;
+}
+
+pre, #_fontwidthtest {
+    font-family: 'Consolas', 'Deja Vu Sans Mono',
+                 'Bitstream Vera Sans Mono', monospace;
+    margin: 1em 2em;
+    font-size: 0.95em;
+    letter-spacing: 0.015em;
+    line-height: 120%;
+    padding: 0.5em;
+    border: 1px solid #ccc;
+    background-color: #eee;
+    border-radius: 6px;
+    -moz-border-radius: 6px;
+    -webkit-border-radius: 6px;
+}
+
+pre a {
+    color: inherit;
+    text-decoration: underline;
+}
+
+td.linenos pre {
+    padding: 0.5em 0;
+}
+
+div.quotebar {
+    background-color: #f8f8f8;
+    max-width: 250px;
+    float: right;
+    padding: 2px 7px;
+    border: 1px solid #ccc;
+}
+
+div.topic {
+    background-color: #f8f8f8;
+}
+
+table {
+    border-collapse: collapse;
+    margin: 0 -0.5em 0 -0.5em;
+}
+
+table td, table th {
+    padding: 0.2em 0.5em 0.2em 0.5em;
+}
+
+
+/* ADMONITIONS AND WARNINGS ------------------------------------------------- */
+
+/* Shared by admonitions and warnings */
+div.admonition, div.warning {
+    font-size: 0.9em;
+    margin: 2em;
+    padding: 0;
+    /*
+    border-radius: 6px;
+    -moz-border-radius: 6px;
+    -webkit-border-radius: 6px;
+    */
+}
+div.admonition p, div.warning p {
+    margin: 0.5em 1em 0.5em 1em;
+    padding: 0;
+}
+div.admonition pre, div.warning pre {
+    margin: 0.4em 1em 0.4em 1em;
+}
+div.admonition p.admonition-title,
+div.warning p.admonition-title {
+    margin: 0;
+    padding: 0.1em 0 0.1em 0.5em;
+    color: white;
+    font-weight: bold;
+    font-size: 1.1em;
+    text-shadow: 0 1px rgba(0, 0, 0, 0.5);
+}
+div.admonition ul, div.admonition ol,
+div.warning ul, div.warning ol {
+    margin: 0.1em 0.5em 0.5em 3em;
+    padding: 0;
+}
+
+
+/* Admonitions only */
+div.admonition {
+    border: 1px solid #609060;
+    background-color: #e9ffe9;
+}
+div.admonition p.admonition-title {
+    background-color: #70A070;
+    border-bottom: 1px solid #609060;
+}
+
+
+/* Warnings only */
+div.warning {
+    border: 1px solid #900000;
+    background-color: #ffe9e9;
+}
+div.warning p.admonition-title {
+    background-color: #b04040;
+    border-bottom: 1px solid #900000;
+}
+
+
+
+div.versioninfo {
+    margin: 1em 0 0 0;
+    border: 1px solid #ccc;
+    background-color: #DDEAF0;
+    padding: 8px;
+    line-height: 1.3em;
+    font-size: 0.9em;
+}
+
+.viewcode-back {
+    font-family: 'Lucida Grande', 'Lucida Sans Unicode', 'Geneva',
+                 'Verdana', sans-serif;
+}
+
+div.viewcode-block:target {
+    background-color: #f4debf;
+    border-top: 1px solid #ac9;
+    border-bottom: 1px solid #ac9;
+}
+
+dl {
+    margin: 1em 0 2.5em 0;
+}
+
+/* Highlight target when you click an internal link */
+dt:target {
+    background: #ffe080;
+}
+/* Don't highlight whole divs */
+div.highlight {
+    background: transparent;
+}
+/* But do highlight spans (so search results can be highlighted) */
+span.highlight {
+    background: #ffe080;
+}
+
+div.footer {
+    background-color: #465158;
+    color: #eeeeee;
+    padding: 0 2em 2em 2em;
+    clear: both;
+    font-size: 0.8em;
+    text-align: center;
+}
+
+p {
+    margin: 0.8em 0 0.5em 0;
+}
+
+.section p img {
+    margin: 1em 2em;
+}
+
+
+/* MOBILE LAYOUT -------------------------------------------------------------- */
+
+ at media screen and (max-width: 600px) {
+
+    h1, h2, h3, h4, h5 {
+        position: relative;
+    }
+
+    ul {
+        padding-left: 1.75em;
+    }
+
+    div.bodywrapper a.headerlink, #indices-and-tables h1 a {
+        color: #e6e6e6;
+        font-size: 80%;
+        float: right;
+        line-height: 1.8;
+        position: absolute;
+        right: -0.7em;
+        visibility: inherit;
+    }
+
+    div.bodywrapper h1 a.headerlink, #indices-and-tables h1 a {
+        line-height: 1.5;
+    }
+
+    pre {
+        font-size: 0.7em;
+        overflow: auto;
+        word-wrap: break-word;
+        white-space: pre-wrap;
+    }
+
+    div.related ul {
+        height: 2.5em;
+        padding: 0;
+        text-align: left;
+    }
+
+    div.related ul li {
+        clear: both;
+        color: #465158;
+        padding: 0.2em 0;
+    }
+
+    div.related ul li:last-child {
+        border-bottom: 1px dotted #8ca1af;
+        padding-bottom: 0.4em;
+        margin-bottom: 1em;
+        width: 100%;
+    }
+
+    div.related ul li a {
+        color: #465158;
+        padding-right: 0;
+    }
+
+    div.related ul li a:hover {
+        background: inherit;
+        color: inherit;
+    }
+
+    div.related ul li.right {
+        clear: none;
+        padding: 0.65em 0;
+        margin-bottom: 0.5em;
+    }
+
+    div.related ul li.right a {
+        color: #fff;
+        padding-right: 0.8em;
+    }
+
+    div.related ul li.right a:hover {
+        background-color: #8ca1af;
+    }
+
+    div.body {
+        clear: both;
+        min-width: 0;
+        word-wrap: break-word;
+    }
+
+    div.bodywrapper {
+        margin: 0 0 0 0;
+    }
+
+    div.sphinxsidebar {
+        float: none;
+        margin: 0;
+        width: auto;
+    }
+
+    div.sphinxsidebar input[type="text"] {
+        height: 2em;
+        line-height: 2em;
+        width: 70%;
+    }
+
+    div.sphinxsidebar input[type="submit"] {
+        height: 2em;
+        margin-left: 0.5em;
+        width: 20%;
+    }
+
+    div.sphinxsidebar p.searchtip {
+        background: inherit;
+        margin-bottom: 1em;
+    }
+
+    div.sphinxsidebar ul li, div.sphinxsidebar p.topless {
+        white-space: normal;
+    }
+
+    .bodywrapper img {
+        display: block;
+        margin-left: auto;
+        margin-right: auto;
+        max-width: 100%;
+    }
+
+    div.documentwrapper {
+        float: none;
+    }
+
+    div.admonition, div.warning, pre, blockquote {
+        margin-left: 0em;
+        margin-right: 0em;
+    }
+
+    .body p img {
+        margin: 0;
+    }
+
+    #searchbox {
+        background: transparent;
+    }
+
+    .related:not(:first-child) li {
+        display: none;
+    }
+
+    .related:not(:first-child) li.right {
+        display: block;
+    }
+
+    div.footer {
+        padding: 1em;
+    }
+
+    .rtd_doc_footer .badge {
+        float: none;
+        margin: 1em auto;
+        position: static;
+    }
+
+    .rtd_doc_footer .badge.revsys-inline {
+        margin-right: auto;
+        margin-bottom: 2em;
+    }
+
+    table.indextable {
+        display: block;
+        width: auto;
+    }
+
+    .indextable tr {
+        display: block;
+    }
+
+    .indextable td {
+        display: block;
+        padding: 0;
+        width: auto !important;
+    }
+
+    .indextable td dt {
+        margin: 1em 0;
+    }
+
+    ul.search {
+        margin-left: 0.25em;
+    }
+
+    ul.search li div.context {
+        font-size: 90%;
+        line-height: 1.1;
+        margin-bottom: 1;
+        margin-left: 0;
+    }
+
+}
diff --git a/doc/source/_themes/rtd-fabric/theme.conf b/doc/source/_themes/rtd-fabric/theme.conf
new file mode 100644
index 0000000..0915c43
--- /dev/null
+++ b/doc/source/_themes/rtd-fabric/theme.conf
@@ -0,0 +1,3 @@
+[theme]
+inherit = default
+stylesheet = rtd.css
diff --git a/doc/source/_themes/sphinx_dendropy_theme/logo.html b/doc/source/_themes/sphinx_dendropy_theme/logo.html
new file mode 100644
index 0000000..c413180
--- /dev/null
+++ b/doc/source/_themes/sphinx_dendropy_theme/logo.html
@@ -0,0 +1,3 @@
+<div style="text-align: center; padding-top: 20px; padding-bottom: 5px; width: 100%;">
+    <a href="{{pathto('index')}}"><img src="{{pathto('_static/dendropy_logo.png', 1)}}" /></a>
+</div>
diff --git a/doc/source/_themes/sphinx_dendropy_theme/side_supplemental.html b/doc/source/_themes/sphinx_dendropy_theme/side_supplemental.html
new file mode 100644
index 0000000..ffe4318
--- /dev/null
+++ b/doc/source/_themes/sphinx_dendropy_theme/side_supplemental.html
@@ -0,0 +1,78 @@
+
+    <!-- Documentation -->
+    <div style="border-top: double 1px white; padding-top: 10px;">
+        <h3>Documentation</h3>
+        <ul>
+            <li><a href="{{pathto('primer/index')}}">The DendroPy Primer</a></li>
+            <li><a href="{{pathto('library/index')}}">Library API Reference</a></li>
+            <li>
+                <a href="{{pathto('schemas/index')}}">Schemas</a>
+                <ul>
+                    <li><a href="{{pathto('schemas/fasta')}}">FASTA</a></li>
+                    <li><a href="{{pathto('schemas/newick')}}">Newick</a></li>
+                    <li><a href="{{pathto('schemas/nexml')}}">NeXML</a></li>
+                    <li><a href="{{pathto('schemas/nexus')}}">Nexus</a></li>
+                    <li><a href="{{pathto('schemas/phylip')}}">PHYLIP</a></li>
+                </ul>
+            </li>
+            <li>
+                <a href="{{pathto('programs/index')}}">Programs</a>
+                <ul>
+                    <li><a href="{{pathto('programs/sumtrees')}}">SumTrees</a></li>
+                </ul>
+            </li>
+            <li><a href="{{pathto('glossary')}}">Glossary and Terminological Reference</a></li>
+            <li><a href="{{pathto('changes')}}">Change History</a></li>
+        </ul>
+    </div>
+
+    <!-- Downloads -->
+    <div style="border-top: double 1px white; padding-top: 10px;">
+        <h3>Obtaining</h3>
+        <ul>
+            <li><a target="_blank" href="http://pypi.python.org/pypi/DendroPy">Install from the Python Package Index</a></li>
+            <li><a target="_blank" href="http://pypi.python.org/packages/source/D/DendroPy/DendroPy-{{version}}.tar.gz">Download the Source Code Archive</a></li>
+            <li><a target="_blank" href="http://github.com/jeetsukumaran/DendroPy">Clone the Source Code Repository</a></li>
+        </ul>
+    </div>
+
+    <!-- Discussions -->
+    <div style="border-top: double 1px white; padding-top: 10px; position: relative;">
+        <h3><span style="text-align: left">Discussion</span><span style="position: absolute; right: 0; top: 10px "><img src="{{pathto('_static/google-groups-logo1.png', 1)}}" height="20px" alt="Google Groups" /></span></h3>
+        <div style="margin-top: 15px;">
+            <p style="font-size: 90%; margin-top: 3px; clear: both;">Join the <a href="http://groups.google.com/group/dendropy-users?hl=en">" DendroPy Users" </a>group to follow and participate in discussion, troubleshooting, help, information, suggestions, etc. on the usage and development of the DendroPy phylogenetic computing library.</p>
+            <form action="http://groups.google.com/group/dendropy-users/boxsubscribe">
+                <input type=text name=email>
+                <input type=submit name="sub" value="Subscribe">
+            </form>
+            <p style="font-size: 90%; clear: both; padding-top: 5px; padding-bottom: 10px;">Enter your e-mail address in the box above and click the "subscribe" button to subscribe to the <a href="http://groups.google.com/group/dendropy-users?hl=en">"dendropy-users"</a> group, or click <a href="http://groups.google.com/group/dendropy-users?hl=en">here</a> to visit this group page directly.</p>
+        </div>
+    </div>
+
+    <!-- Announcements -->
+    <div style="border-top: double 1px white; padding-top: 10px; position: relative;">
+        <h3><span style="text-align: left">Announcements</span><span style="position: absolute; right: 0; top: 10px "><img src="{{pathto('_static/google-groups-logo1.png', 1)}}" height="20px" alt="Google Groups" /></span></h3>
+        <div style="margin-top: 15px;">
+            <p style="font-size: 90%; margin-top: 3px; clear: both;">Join the <a href="http://groups.google.com/group/dendropy-announce?hl=en">" DendroPy Announcements" </a>group to receive announcements of new releases, updates, changes and other news of interest to DendroPy users and developers.</p>
+            <form action="http://groups.google.com/group/dendropy-announce/boxsubscribe">
+                <input type=text name=email>
+                <input type=submit name="sub" value="Subscribe">
+            </form>
+            <p style="font-size: 90%; clear: both; padding-top: 5px; padding-bottom: 10px;">Enter your e-mail address in the box above and click the "subscribe" button to subscribe to the <a href="http://groups.google.com/group/dendropy-announce?hl=en">" dendropy-announce" </a>group, or click <a href="http://groups.google.com/group/dendropy-announce?hl=en">here</a> to visit this group page directly.</p>
+        </div>
+    </div>
+
+    <!-- Development -->
+    <div style="border-top: double 1px white; padding-top: 10px; position: relative; padding-bottom: 15px; margin-bottom:5px;">
+        <h3><span style="text-align: left"><a href="https://github.com/jeetsukumaran/DendroPy/">Development</a></span><a href="https://github.com/jeetsukumaran/DendroPy/"><span style="position: absolute; right: 0; top: 10px "><img src="{{pathto('_static/Octocat.png', 1)}}" height="30px" alt="GitHub" /></span></a></h3>
+        <div style="margin-top: 15px;">
+            <!-- <a href="https://github.com/jeetsukumaran/DendroPy/issues">Issues</a> • <a href="https://github.com/jeetsukumaran/DendroPy/subscription">Watch</a> • <a href="https://github.com/jeetsukumaran/DendroPy/fork">Fork</a> • <a href="https://github.com/jeetsukumaran/DendroPy/stargazers">Star</a> • <a href="https://github.com/jeetsukumaran/">Follow</a> -->
+            <ul>
+                <li>                <span style="font-weight: bold;"><a href="https:/github.com/jeetsukumaran/DendroPy/issues"><code>Issues</code></a></span> <span style="font-style: italic; font-size:80%;"> - Report bugs or request features</span></li>
+                <li>     <span style="font-weight: bold;"><a href="https:/github.com/jeetsukumaran/DendroPy/subscription"><code> Watch</code></a></span> <span style="font-style: italic; font-size:80%;"> - Follow development activity</span></li>
+                <li>        <span style="font-weight: bold;"><a href="https:/github.com/jeetsukumaran/DendroPy/fork"><code>  Fork</code></a></span> <span style="font-style: italic; font-size:80%;"> - Contribute and collaborate</span></li>
+                <li>  <span style="font-weight: bold;"><a href="https:/github.com/jeetsukumaran/DendroPy/stargazers"><code>  Star</code></a></span> <span style="font-style: italic; font-size:80%;"> - Throw some glitter, add some glamour</span></li>
+        </div>
+    </div>
+
+
diff --git a/doc/source/_themes/sphinx_dendropy_theme/static/rtd.css b/doc/source/_themes/sphinx_dendropy_theme/static/rtd.css
new file mode 100644
index 0000000..7880779
--- /dev/null
+++ b/doc/source/_themes/sphinx_dendropy_theme/static/rtd.css
@@ -0,0 +1,775 @@
+/*
+ * rtd.css
+ * ~~~~~~~~~~~~~~~
+ *
+ * Sphinx stylesheet -- sphinxdoc theme.  Originally created by
+ * Armin Ronacher for Werkzeug.
+ *
+ * Customized for ReadTheDocs by Eric Pierce & Eric Holscher
+ *
+ * Customized for DendroPy by Jeet Sukumaran
+ *
+ * :copyright: Copyright 2007-2010 by the Sphinx team, see AUTHORS.
+ * :license: BSD, see LICENSE for details.
+ *
+ */
+
+/* RTD colors
+ * light blue: #e8ecef
+ * medium blue: #8ca1af
+ * dark blue: #465158
+ * dark grey: #444444
+ *
+ * white hover: #d1d9df;
+ * medium blue hover: #697983;
+ * green highlight: #8ecc4c
+ * light blue (project bar): #e8ecef
+ */
+
+ at import url("basic.css");
+
+/* PAGE LAYOUT -------------------------------------------------------------- */
+
+body {
+    font: 100%/1.5 "ff-meta-web-pro-1","ff-meta-web-pro-2",Arial,"Helvetica Neue",sans-serif;
+    text-align: center;
+    color: black;
+    background-color: #465158;
+    padding: 0;
+    margin: 0;
+}
+
+div.document {
+    text-align: left;
+    background-color: #e8ecef;
+}
+
+div.bodywrapper {
+    background-color: #ffffff;
+    border-left: 1px solid #ccc;
+    border-bottom: 1px solid #ccc;
+    margin: 0 0 0 18em;
+}
+
+div.body {
+    margin: 0;
+    padding: 0.5em 1.3em;
+    max-width: 55em;
+    min-width: 20em;
+}
+
+div.related {
+    font-size: 1em;
+    background-color: #465158;
+}
+
+div.documentwrapper {
+    float: left;
+    width: 100%;
+    background-color: #e8ecef;
+}
+
+
+/* HEADINGS --------------------------------------------------------------- */
+
+h1 {
+    margin: 0;
+    padding: 0.7em 0 0.3em 0;
+    font-size: 1.5em;
+    line-height: 1.15;
+    color: #111;
+    clear: both;
+}
+
+h2 {
+    margin: 2em 0 0.2em 0;
+    font-size: 1.35em;
+    padding: 0;
+    color: #465158;
+}
+
+h3 {
+    margin: 1em 0 -0.3em 0;
+    font-size: 1.2em;
+    color: #6c818f;
+}
+
+div.body h1 a, div.body h2 a, div.body h3 a, div.body h4 a, div.body h5 a, div.body h6 a {
+    color: black;
+}
+
+h1 a.anchor, h2 a.anchor, h3 a.anchor, h4 a.anchor, h5 a.anchor, h6 a.anchor {
+    display: none;
+    margin: 0 0 0 0.3em;
+    padding: 0 0.2em 0 0.2em;
+    color: #aaa !important;
+}
+
+h1:hover a.anchor, h2:hover a.anchor, h3:hover a.anchor, h4:hover a.anchor,
+h5:hover a.anchor, h6:hover a.anchor {
+    display: inline;
+}
+
+h1 a.anchor:hover, h2 a.anchor:hover, h3 a.anchor:hover, h4 a.anchor:hover,
+h5 a.anchor:hover, h6 a.anchor:hover {
+    color: #777;
+    background-color: #eee;
+}
+
+
+/* LINKS ------------------------------------------------------------------ */
+
+/* Normal links get a pseudo-underline */
+a {
+    /* color: #6688aa; */
+    color: #206088;
+    text-decoration: none;
+    border-bottom: 1px solid #ccc;
+}
+
+/* Links in sidebar, TOC, index trees and tables have no underline */
+.sphinxsidebar a,
+.toctree-wrapper a,
+.indextable a,
+#indices-and-tables a {
+    color: #206088;
+    text-decoration: none;
+    border-bottom: none;
+}
+
+/* Most links get an underline-effect when hovered */
+a:hover,
+div.toctree-wrapper a:hover,
+.indextable a:hover,
+#indices-and-tables a:hover {
+    color: #cc4400;
+    text-decoration: none;
+    border-bottom: 1px solid #cc4400;
+}
+
+/* Footer links */
+div.footer a {
+    color: #86989B;
+    text-decoration: none;
+    border: none;
+}
+div.footer a:hover {
+    color: #a6b8bb;
+    text-decoration: underline;
+    border: none;
+}
+
+/* Permalink anchor (subtle grey with a red hover) */
+div.body a.headerlink {
+    color: #ccc;
+    font-size: 1em;
+    margin-left: 6px;
+    padding: 0 4px 0 4px;
+    text-decoration: none;
+    border: none;
+}
+div.body a.headerlink:hover {
+    color: #c60f0f;
+    border: none;
+}
+
+
+/* NAVIGATION BAR --------------------------------------------------------- */
+
+div.related ul {
+    height: 2.5em;
+}
+
+div.related ul li {
+    margin: 0;
+    padding: 0.65em 0;
+    float: left;
+    display: block;
+    color: white; /* For the >> separators */
+    font-size: 0.8em;
+}
+
+div.related ul li.right {
+    float: right;
+    margin-right: 5px;
+    color: transparent; /* Hide the | separators */
+}
+
+/* "Breadcrumb" links in nav bar */
+div.related ul li a {
+    order: none;
+    background-color: inherit;
+    font-weight: bold;
+    margin: 6px 0 6px 4px;
+    line-height: 1.75em;
+    color: #ffffff;
+    padding: 0.4em 0.8em;
+    border: none;
+    border-radius: 3px;
+}
+/* previous / next / modules / index links look more like buttons */
+div.related ul li.right a {
+    margin: 0.375em 0;
+    background-color: #697983;
+    text-shadow: 0 1px rgba(0, 0, 0, 0.5);
+    border-radius: 3px;
+    -webkit-border-radius: 3px;
+    -moz-border-radius: 3px;
+}
+/* All navbar links light up as buttons when hovered */
+div.related ul li a:hover {
+    background-color: #8ca1af;
+    color: #ffffff;
+    text-decoration: none;
+    border-radius: 3px;
+    -webkit-border-radius: 3px;
+    -moz-border-radius: 3px;
+}
+/* Take extra precautions for tt within links */
+a tt,
+div.related ul li a tt {
+    background: inherit !important;
+    color: inherit !important;
+}
+
+
+/* SIDEBAR ---------------------------------------------------------------- */
+
+div.sphinxsidebarwrapper {
+    padding: 0;
+}
+
+img.logo {
+    padding-top: 15px;
+    padding-top: 15px;
+}
+
+div.sphinxsidebar {
+    margin: 0;
+    margin-left: -100%;
+    float: left;
+    top: 3em;
+    left: 0;
+    padding: 0 1em;
+    width: 20em;
+    font-size: 0.80em;
+    text-align: left;
+    background-color: #e8ecef;
+}
+
+div.sphinxsidebar h3, div.sphinxsidebar h4 {
+    margin: 1.2em 0 0.3em 0;
+    font-size: 1em;
+    padding: 0;
+    color: #222222;
+    font-family: "ff-meta-web-pro-1", "ff-meta-web-pro-2", "Arial", "Helvetica Neue", sans-serif;
+}
+
+div.sphinxsidebar h3 a {
+    color: #444444;
+}
+
+div.sphinxsidebar ul,
+div.sphinxsidebar p {
+    margin-top: 0;
+    padding-left: 0;
+    line-height: 130%;
+    background-color: #e8ecef;
+}
+
+/* No bullets for nested lists, but a little extra indentation */
+div.sphinxsidebar ul ul {
+    list-style-type: none;
+    margin-left: 1em;
+    padding: 0;
+}
+
+/* A little top/bottom padding to prevent adjacent links' borders
+ * from overlapping each other */
+div.sphinxsidebar ul li {
+    padding: 1px 0;
+}
+
+/* A little left-padding to make these align with the ULs */
+div.sphinxsidebar p.topless {
+    padding-left: 0 0 0 1em;
+}
+
+/* Make these into hidden one-liners */
+div.sphinxsidebar ul li,
+div.sphinxsidebar p.topless {
+    white-space: nowrap;
+    overflow: hidden;
+}
+/* ...which become visible when hovered */
+div.sphinxsidebar ul li:hover,
+div.sphinxsidebar p.topless:hover {
+    overflow: visible;
+}
+
+/* Search text box and "Go" button */
+#searchbox {
+    margin-top: 2em;
+    margin-bottom: 1em;
+    background: #ddd;
+    padding: 0.5em;
+    border-radius: 6px;
+    -moz-border-radius: 6px;
+    -webkit-border-radius: 6px;
+}
+#searchbox h3 {
+    margin-top: 0;
+}
+
+/* Make search box and button abut and have a border */
+input,
+div.sphinxsidebar input {
+    border: 1px solid #999;
+    float: left;
+}
+
+/* Search textbox */
+input[type="text"] {
+    margin: 0;
+    padding: 0 3px;
+    height: 20px;
+    width: 144px;
+    border-top-left-radius: 3px;
+    border-bottom-left-radius: 3px;
+    -moz-border-radius-topleft: 3px;
+    -moz-border-radius-bottomleft: 3px;
+    -webkit-border-top-left-radius: 3px;
+    -webkit-border-bottom-left-radius: 3px;
+}
+/* Search button */
+input[type="submit"] {
+    margin: 0 0 0 -1px; /* -1px prevents a double-border with textbox */
+    height: 22px;
+    color: #444;
+    background-color: #e8ecef;
+    padding: 1px 4px;
+    font-weight: bold;
+    border-top-right-radius: 3px;
+    border-bottom-right-radius: 3px;
+    -moz-border-radius-topright: 3px;
+    -moz-border-radius-bottomright: 3px;
+    -webkit-border-top-right-radius: 3px;
+    -webkit-border-bottom-right-radius: 3px;
+}
+input[type="submit"]:hover {
+    color: #ffffff;
+    background-color: #8ecc4c;
+}
+
+div.sphinxsidebar p.searchtip {
+    clear: both;
+    padding: 0.5em 0 0 0;
+    background: #ddd;
+    color: #666;
+    font-size: 0.9em;
+}
+
+/* Sidebar links are unusual */
+div.sphinxsidebar li a,
+div.sphinxsidebar p a {
+    background: #e8ecef; /* In case links overlap main content */
+    border-radius: 3px;
+    -moz-border-radius: 3px;
+    -webkit-border-radius: 3px;
+    border: 1px solid transparent; /* To prevent things jumping around on hover */
+    padding: 0 5px 0 5px;
+}
+div.sphinxsidebar li a:hover,
+div.sphinxsidebar p a:hover {
+    color: #cc4400;
+    text-decoration: none;
+    border: 1px solid #cc4400;
+}
+
+/* Tweak any link appearing in a heading */
+div.sphinxsidebar h3 a {
+}
+
+
+
+
+/* OTHER STUFF ------------------------------------------------------------ */
+
+cite, code, tt {
+    font-family: 'Consolas', 'Deja Vu Sans Mono',
+                 'Bitstream Vera Sans Mono', monospace;
+    font-size: 0.95em;
+    letter-spacing: 0.01em;
+}
+
+tt {
+    background-color: #f2f2f2;
+    color: #444;
+}
+
+tt.descname, tt.descclassname, tt.xref {
+    border: 0;
+}
+
+hr {
+    border: 1px solid #abc;
+    margin: 2em;
+}
+
+pre, #_fontwidthtest {
+    font-family: 'Consolas', 'Deja Vu Sans Mono',
+                 'Bitstream Vera Sans Mono', monospace;
+    margin: 1em 2em;
+    font-size: 0.95em;
+    letter-spacing: 0.015em;
+    line-height: 120%;
+    padding: 0.5em;
+    border: 1px solid #ccc;
+    background-color: #eee;
+    border-radius: 6px;
+    -moz-border-radius: 6px;
+    -webkit-border-radius: 6px;
+}
+
+pre a {
+    color: inherit;
+    text-decoration: underline;
+}
+
+td.linenos pre {
+    padding: 0.5em 0;
+}
+
+div.quotebar {
+    background-color: #f8f8f8;
+    max-width: 250px;
+    float: right;
+    padding: 2px 7px;
+    border: 1px solid #ccc;
+}
+
+div.topic {
+    background-color: #f8f8f8;
+}
+
+table {
+    border-collapse: collapse;
+    margin: 0 -0.5em 0 -0.5em;
+}
+
+table td, table th {
+    padding: 0.2em 0.5em 0.2em 0.5em;
+}
+
+
+/* ADMONITIONS AND WARNINGS ------------------------------------------------- */
+
+/* Shared by admonitions and warnings */
+div.admonition, div.warning {
+    font-size: 0.9em;
+    margin: 2em;
+    padding: 0;
+    /*
+    border-radius: 6px;
+    -moz-border-radius: 6px;
+    -webkit-border-radius: 6px;
+    */
+}
+div.admonition p, div.warning p {
+    margin: 0.5em 1em 0.5em 1em;
+    padding: 0;
+}
+div.admonition pre, div.warning pre {
+    margin: 0.4em 1em 0.4em 1em;
+}
+div.admonition p.admonition-title,
+div.warning p.admonition-title {
+    margin: 0;
+    padding: 0.1em 0 0.1em 0.5em;
+    color: white;
+    font-weight: bold;
+    font-size: 1.1em;
+    text-shadow: 0 1px rgba(0, 0, 0, 0.5);
+}
+div.admonition ul, div.admonition ol,
+div.warning ul, div.warning ol {
+    margin: 0.1em 0.5em 0.5em 3em;
+    padding: 0;
+}
+
+
+/* Admonitions only */
+div.admonition {
+    border: 1px solid #609060;
+    background-color: #e9ffe9;
+}
+div.admonition p.admonition-title {
+    background-color: #70A070;
+    border-bottom: 1px solid #609060;
+}
+
+
+/* Warnings only */
+div.warning {
+    border: 1px solid #900000;
+    background-color: #ffe9e9;
+}
+div.warning p.admonition-title {
+    background-color: #b04040;
+    border-bottom: 1px solid #900000;
+}
+
+
+
+div.versioninfo {
+    margin: 1em 0 0 0;
+    border: 1px solid #ccc;
+    background-color: #DDEAF0;
+    padding: 8px;
+    line-height: 1.3em;
+    font-size: 0.9em;
+}
+
+.viewcode-back {
+    font-family: 'Lucida Grande', 'Lucida Sans Unicode', 'Geneva',
+                 'Verdana', sans-serif;
+}
+
+div.viewcode-block:target {
+    background-color: #f4debf;
+    border-top: 1px solid #ac9;
+    border-bottom: 1px solid #ac9;
+}
+
+dl {
+    margin: 1em 0 2.5em 0;
+}
+
+/* Highlight target when you click an internal link */
+dt:target {
+    background: #ffe080;
+}
+/* Don't highlight whole divs */
+div.highlight {
+    background: transparent;
+}
+/* But do highlight spans (so search results can be highlighted) */
+span.highlight {
+    background: #ffe080;
+}
+
+div.footer {
+    background-color: #465158;
+    color: #eeeeee;
+    padding: 0 2em 2em 2em;
+    clear: both;
+    font-size: 0.8em;
+    text-align: center;
+}
+
+p {
+    margin: 0.8em 0 0.5em 0;
+}
+
+.section p img {
+    margin: 1em 2em;
+}
+
+
+/* MOBILE LAYOUT -------------------------------------------------------------- */
+
+ at media screen and (max-width: 600px) {
+
+    h1, h2, h3, h4, h5 {
+        position: relative;
+    }
+
+    ul {
+        padding-left: 1.75em;
+    }
+
+    div.bodywrapper a.headerlink, #indices-and-tables h1 a {
+        color: #e6e6e6;
+        font-size: 80%;
+        float: right;
+        line-height: 1.8;
+        position: absolute;
+        right: -0.7em;
+        visibility: inherit;
+    }
+
+    div.bodywrapper h1 a.headerlink, #indices-and-tables h1 a {
+        line-height: 1.5;
+    }
+
+    pre {
+        font-size: 0.7em;
+        overflow: auto;
+        word-wrap: break-word;
+        white-space: pre-wrap;
+    }
+
+    div.related ul {
+        height: 2.5em;
+        padding: 0;
+        text-align: left;
+    }
+
+    div.related ul li {
+        clear: both;
+        color: #465158;
+        padding: 0.2em 0;
+    }
+
+    div.related ul li:last-child {
+        border-bottom: 1px dotted #8ca1af;
+        padding-bottom: 0.4em;
+        margin-bottom: 1em;
+        width: 100%;
+    }
+
+    div.related ul li a {
+        color: #465158;
+        padding-right: 0;
+    }
+
+    div.related ul li a:hover {
+        background: inherit;
+        color: inherit;
+    }
+
+    div.related ul li.right {
+        clear: none;
+        padding: 0.65em 0;
+        margin-bottom: 0.5em;
+    }
+
+    div.related ul li.right a {
+        color: #fff;
+        padding-right: 0.8em;
+    }
+
+    div.related ul li.right a:hover {
+        background-color: #8ca1af;
+    }
+
+    div.body {
+        clear: both;
+        min-width: 0;
+        word-wrap: break-word;
+    }
+
+    div.bodywrapper {
+        margin: 0 0 0 0;
+    }
+
+    div.sphinxsidebar {
+        float: none;
+        margin: 0;
+        width: auto;
+    }
+
+    div.sphinxsidebar input[type="text"] {
+        height: 2em;
+        line-height: 2em;
+        width: 70%;
+    }
+
+    div.sphinxsidebar input[type="submit"] {
+        height: 2em;
+        margin-left: 0.5em;
+        width: 20%;
+    }
+
+    div.sphinxsidebar p.searchtip {
+        background: inherit;
+        margin-bottom: 1em;
+    }
+
+    div.sphinxsidebar ul li, div.sphinxsidebar p.topless {
+        white-space: normal;
+    }
+
+    .bodywrapper img {
+        display: block;
+        margin-left: auto;
+        margin-right: auto;
+        max-width: 100%;
+    }
+
+    div.documentwrapper {
+        float: none;
+    }
+
+    div.admonition, div.warning, pre, blockquote {
+        margin-left: 0em;
+        margin-right: 0em;
+    }
+
+    .body p img {
+        margin: 0;
+    }
+
+    #searchbox {
+        background: transparent;
+    }
+
+    .related:not(:first-child) li {
+        display: none;
+    }
+
+    .related:not(:first-child) li.right {
+        display: block;
+    }
+
+    div.footer {
+        padding: 1em;
+    }
+
+    .rtd_doc_footer .badge {
+        float: none;
+        margin: 1em auto;
+        position: static;
+    }
+
+    .rtd_doc_footer .badge.revsys-inline {
+        margin-right: auto;
+        margin-bottom: 2em;
+    }
+
+    table.indextable {
+        display: block;
+        width: auto;
+    }
+
+    .indextable tr {
+        display: block;
+    }
+
+    .indextable td {
+        display: block;
+        padding: 0;
+        width: auto !important;
+    }
+
+    .indextable td dt {
+        margin: 1em 0;
+    }
+
+    ul.search {
+        margin-left: 0.25em;
+    }
+
+    ul.search li div.context {
+        font-size: 90%;
+        line-height: 1.1;
+        margin-bottom: 1;
+        margin-left: 0;
+    }
+
+}
+
+/* SPECIAL ELEMENTS  -------------------------------------------------------------- */
+
+.copyrightblock { font-size: 80% }
+.licenseblock { font-weight: bold; text-align: center; }
diff --git a/doc/source/_themes/sphinx_dendropy_theme/theme.conf b/doc/source/_themes/sphinx_dendropy_theme/theme.conf
new file mode 100644
index 0000000..e49d114
--- /dev/null
+++ b/doc/source/_themes/sphinx_dendropy_theme/theme.conf
@@ -0,0 +1,3 @@
+[theme]
+inherit = classic
+stylesheet = rtd.css
diff --git a/doc/source/acknowledgements.inc b/doc/source/acknowledgements.inc
new file mode 100644
index 0000000..f6b5915
--- /dev/null
+++ b/doc/source/acknowledgements.inc
@@ -0,0 +1,5 @@
+Acknowledgments
+================
+
+Portions of |DendroPy|_ were developed under `CIPRES <http://www.phylo.org>`_, a multi-site collaboration funded by the `NSF <http://www.nsf.gov/>`_ Information Technology Research (ITR) program grant entitled "`BUILDING THE TREE OF LIFE: A National Resource for Phyloinformatics and Computational Phylogenetics <http://www.phylo.org/about/acknowledgements>`_" and `NSF <http://www.nsf.gov/>`_ ATOL grant 0732920 to Mark Holder.
+
diff --git a/doc/source/changes.rst b/doc/source/changes.rst
new file mode 100644
index 0000000..a3857f8
--- /dev/null
+++ b/doc/source/changes.rst
@@ -0,0 +1,3 @@
+#######################
+DendroPy Change History
+#######################
diff --git a/doc/source/citation.inc b/doc/source/citation.inc
new file mode 100644
index 0000000..1179baa
--- /dev/null
+++ b/doc/source/citation.inc
@@ -0,0 +1,15 @@
+
+Citation
+=========
+
+If you use this library either in whole or in part in your analysis, or use any code derived from it, please cite it as:
+
+    |dendropy_citation|
+
+Note that, in the interests of scientific reproducibility, in addition to noting the primary citation for DendroPy as given above in the reference section, you should note in the text of your publications (e.g., in the "Methods" section, or, at the very least, in the "Supplemental Materials" section) the specific version of DendroPy that you used (e.g., "DendroPy version 4.0.0, revision 78e6a63"). You can get useful information about your DendroPy installation by typing "``python -m dend [...]
+
+If you use or reference any part of this *documentation*, you can cite it as:
+
+    Sukumaran, J. and Mark T. Holder. The DendroPy Phylogenetic Computing Library Documentation. Retrieved |today|, from http://dendropy.org/.
+
+
diff --git a/doc/source/conf.py b/doc/source/conf.py
new file mode 100644
index 0000000..f88ee4e
--- /dev/null
+++ b/doc/source/conf.py
@@ -0,0 +1,559 @@
+# -*- coding: utf-8 -*-
+#
+# DendroPy documentation build configuration file, created by
+# sphinx-quickstart on Wed Mar 19 16:36:08 2014.
+#
+# This file is execfile()d with the current directory set to its
+# containing dir.
+#
+# Note that not all possible configuration values are present in this
+# autogenerated file.
+#
+# All configuration values have a default; values that are commented out
+# serve to show the default.
+
+import sys
+import os
+import time
+from sphinx.ext import autodoc
+from dendropy import __version__ as PROJECT_VERSION
+
+# -- Sphinx Hackery ------------------------------------------------
+
+# Following allows for a docstring of a method to be inserted "nakedly"
+# (without the signature etc.) into the current context by, for example::
+#
+#       .. autodocstringonly:: dendropy.dataio.newickreader.NewickReader.__init__
+#
+# Based on:
+#
+#   http://stackoverflow.com/questions/7825263/including-docstring-in-sphinx-documentation
+class DocStringOnlyMethodDocumenter(autodoc.MethodDocumenter):
+    objtype = "docstringonly"
+
+    # do not indent the content
+    content_indent = "    "
+
+    # do not add a header to the docstring
+    def add_directive_header(self, sig):
+        pass
+
+    # def add_line(self, line, source, *lineno):
+    #     """Append one line of generated reST to the output."""
+    #     print self.indent + line
+    #     self.directive.result.append(self.indent + line, source, *lineno)
+
+class KeywordArgumentsOnlyMethodDocumenter(autodoc.MethodDocumenter):
+    objtype = "keywordargumentsonly"
+    priority = 0 # do not override normal autodocumenter
+
+    # do not indent the content
+    content_indent = "    "
+
+    # do not add a header to the docstring
+    def add_directive_header(self, sig):
+        pass
+
+    def add_line(self, line, source, *lineno):
+        if ":Keyword Arguments:" in line:
+            line = line.replace(":Keyword Arguments:", "                   ")
+            self._emit_line = True
+        if getattr(self, "_emit_line", False):
+            self.directive.result.append(self.indent + line, source, *lineno)
+
+def setup(app):
+#     app.add_autodocumenter(DocStringOnlyMethodDocumenter)
+    app.add_autodocumenter(KeywordArgumentsOnlyMethodDocumenter)
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+#sys.path.insert(0, os.path.abspath('.'))
+
+# -- General configuration ------------------------------------------------
+
+# If your documentation needs a minimal Sphinx version, state it here.
+#needs_sphinx = '1.0'
+
+# Add any Sphinx extension module names here, as strings. They can be
+# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
+# ones.
+extensions = [
+    'sphinx.ext.autodoc',
+    'sphinx.ext.doctest',
+    'sphinx.ext.intersphinx',
+    'sphinx.ext.todo',
+    'sphinx.ext.coverage',
+    'sphinx.ext.mathjax',
+    'sphinx.ext.ifconfig',
+    'sphinx.ext.viewcode',
+    'sphinx.ext.autosummary',
+    'sphinxcontrib.napoleon', # requires: pip install sphinxcontrib-napoleon
+    # 'numpydoc', # requires: pip install numpydoc
+]
+
+# If 'both', then class docstring and class.__init__() docstring combined for
+# class documentation.
+# If 'init', then only class.__init__() docstring shown for class documentation
+# (class docstring omitted).
+# If not specified, then only class docstring shown for class documentation
+# (__init__ docstring omitted).
+autoclass_content = 'both' # or 'init'
+
+# numpydoc settings
+# numpydoc_show_class_members = False
+# numpydoc_class_members_toctree = False
+
+# Napoleon settings
+# napoleon_google_docstring = True
+# napoleon_numpy_docstring = True
+napoleon_include_private_with_doc = False
+napoleon_include_special_with_doc = True
+# napoleon_use_admonition_for_examples = False
+# napoleon_use_admonition_for_notes = False
+# napoleon_use_admonition_for_references = False
+# napoleon_use_ivar = False
+# napoleon_use_param = False
+napoleon_use_rtype = False
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ['_templates']
+
+# The suffix of source filenames.
+source_suffix = '.rst'
+
+# The encoding of source files.
+#source_encoding = 'utf-8-sig'
+
+# The master toctree document.
+master_doc = 'index'
+
+# General information about the project.
+project = u'DendroPy'
+copyright = u'2009-{}, Jeet Sukumaran and Mark T. Holder'.format(time.strftime('%Y'))
+
+# The version info for the project you're documenting, acts as replacement for
+# |version| and |release|, also used in various other places throughout the
+# built documents.
+#
+# The short X.Y version.
+version = PROJECT_VERSION
+# The full version, including alpha/beta/rc tags.
+release = PROJECT_VERSION
+
+# The language for content autogenerated by Sphinx. Refer to documentation
+# for a list of supported languages.
+#language = None
+
+# There are two options for replacing |today|: either, you set today to some
+# non-false value, then it is used:
+#today = ''
+# Else, today_fmt is used as the format for a strftime call.
+#today_fmt = '%B %d, %Y'
+
+# List of patterns, relative to source directory, that match files and
+# directories to ignore when looking for source files.
+exclude_patterns = []
+
+# The reST default role (used for this markup: `text`) to use for all
+# documents.
+default_role = "any"
+
+# If true, '()' will be appended to :func: etc. cross-reference text.
+add_function_parentheses = False
+
+# If true, the current module name will be prepended to all description
+# unit titles (such as .. function::).
+#add_module_names = True
+
+# If true, sectionauthor and moduleauthor directives will be shown in the
+# output. They are ignored by default.
+#show_authors = False
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = 'sphinx'
+
+# A list of ignored prefixes for module index sorting.
+#modindex_common_prefix = []
+
+# If true, keep warnings as "system message" paragraphs in the built documents.
+#keep_warnings = False
+
+rst_prolog = """
+.. |js| replace:: Jeet Sukumaran
+.. _js: http://www.jeetworks.org/about
+.. |mth| replace:: Mark T. Holder
+.. _mth: http://people.ku.edu/~mtholder
+
+.. |DendroPy| replace:: DendroPy
+.. _DendroPy: http://www.dendropy.org/
+.. |dendropy_homepage_url| replace:: http://www.dendropy.org/
+.. |dendropy_primer_url| replace:: http://www.dendropy.org/primer/index.html
+.. |dendropy_library_url| replace:: http://www.dendropy.org/library/index.html
+.. |dendropy_download_url| replace:: http://pypi.python.org/pypi/DendroPy
+.. |dendropy_public_repo_url| replace:: http://github.com/jeetsukumaran/DendroPy
+
+.. |Python| replace:: Python
+.. _Python: http://www.python.org/
+.. |Python26| replace:: Python 2.6
+.. _Python 2.6: http://www.python.org/download/releases/2.6/
+.. |Python27| replace:: Python 2.7
+.. _Python 2.7: http://www.python.org/download/releases/2.7/
+.. |Python34| replace:: Python 3.4.0
+.. _Python 3.4.0: https://www.python.org/download/releases/3.4.0/
+.. |Python2| replace:: Python 2
+.. _Python 2: http://www.python.org/download/releases/2.7/
+.. |Python3| replace:: Python 3
+.. _Python 3: https://www.python.org/download/releases/3.4.0/
+.. |setuptools| replace:: setuptools
+.. _setuptools: http://pypi.python.org/pypi/setuptools
+.. |pip| replace:: pip
+.. _pip: http://pypi.python.org/pypi/pip
+.. |Git| replace:: Git
+.. _Git: http://git-scm.com/
+
+.. |dendropy_logo| replace:: /_static/dendropy_logo.png
+.. |dendropy_library_doc| replace:: /library/index
+.. |dendropy_primer_doc| replace:: /primer/index
+.. |sumtrees_doc| replace:: /programs/sumtrees
+
+.. |dendropy_announce| replace:: DendroPy Announcements
+.. _dendropy_announce: http://groups.google.com/group/dendropy-announce
+.. |dendropy_users| replace:: DendroPy Users
+.. _dendropy_users: http://groups.google.com/group/dendropy-users
+.. |dendropy_issues| replace:: DendroPy Issues
+.. _dendropy_issues: https://github.com/jeetsukumaran/DendroPy/issues
+
+.. |Taxon| replace:: :class:`~dendropy.datamodel.taxonmodel.Taxon`
+.. |TaxonNamespace| replace:: :class:`~dendropy.datamodel.taxonmodel.TaxonNamespace`
+.. |TaxonNamespaceMapping| replace:: :class:`~dendropy.datamodel.taxonmodel.TaxonNamespaceMapping`
+.. |Tree| replace:: :class:`~dendropy.datamodel.treemodel.Tree`
+.. |Node| replace:: :class:`~dendropy.datamodel.treemodel.Node`
+.. |Edge| replace:: :class:`~dendropy.datamodel.treemodel.Edge`
+.. |Bipartition| replace:: :class:`~dendropy.datamodel.treemodel.Bipartition`
+.. |TreeList| replace:: :class:`~dendropy.datamodel.treecollectionmodel.TreeList`
+.. |TreeArray| replace:: :class:`~dendropy.datamodel.treecollectionmodel.TreeArray`
+.. |SplitDistribution| replace:: :class:`~dendropy.datamodel.treecollectionmodel.SplitDistribution`
+.. |SplitDistributionSummarizer| replace:: :class:`~dendropy.datamodel.treecollectionmodel.SplitDistributionSummarizer`
+.. |DataSet| replace:: :class:`~dendropy.datamodel.datasetmodel.DataSet`
+.. |StateIdentity| replace:: :class:`~dendropy.datamodel.charstatemodel.StateIdentity`
+.. |StateAlphabet| replace:: :class:`~dendropy.datamodel.charstatemodel.StateAlphabet`
+.. |CharacterMatrix| replace:: :class:`~dendropy.datamodel.charmatrixmodel.CharacterMatrix`
+.. |DnaCharacterMatrix| replace:: :class:`~dendropy.datamodel.charmatrixmodel.DnaCharacterMatrix`
+.. |RnaCharacterMatrix| replace:: :class:`~dendropy.datamodel.charmatrixmodel.RnaCharacterMatrix`
+.. |ProteinCharacterMatrix| replace:: :class:`~dendropy.datamodel.charmatrixmodel.ProteinCharacterMatrix`
+.. |InfiniteSitesCharacterMatrix| replace:: :class:`~dendropy.datamodel.charmatrixmodel.InfiniteSitesCharacterMatrix`
+.. |RestrictionSitesCharacterMatrix| replace:: :class:`~dendropy.datamodel.charmatrixmodel.RestrictionSitesCharacterMatrix`
+.. |StandardCharacterMatrix| replace:: :class:`~dendropy.datamodel.charmatrixmodel.StandardCharacterMatrix`
+.. |ContinuousCharacterMatrix| replace:: :class:`~dendropy.datamodel.charmatrixmodel.ContinuousCharacterMatrix`
+.. |CharacterDataSequence| replace:: :class:`~dendropy.datamodel.charmatrixmodel.CharacterDataSequence`
+.. |ContinuousCharacterDataSequence| replace:: :class:`~dendropy.datamodel.charmatrixmodel.ContinuousCharacterDataSequence`
+.. |DnaCharacterDataSequence| replace:: :class:`~dendropy.datamodel.charmatrixmodel.DnaCharacterDataSequence`
+.. |CharacterType| replace:: :class:`~dendropy.datamodel.charmatrixmodel.CharacterType`
+.. |Annotation| replace:: :class:`~dendropy.datamodel.basemodel.Annotation`
+.. |AnnotationSet| replace:: :class:`~dendropy.datamodel.basemodel.AnnotationSet`
+.. |Annotable| replace:: :class:`~dendropy.datamodel.basemodel.Annotable`
+
+.. |get| replace::  :py:meth:`get`
+.. |put| replace::  :py:meth:`put`
+.. |read| replace::  :py:meth:`read`
+.. |write| replace::  :py:meth:`write`
+
+.. |get_from_methods| replace::  :py:meth:`get_from_*() <get_from_*>`
+.. |read_from_methods| replace::  :py:meth:`read_from_*() <read_from_*>`
+.. |write_to_methods| replace::  :py:meth:`write_to_*() <read_from_*>`
+
+.. |True| replace:: `True`
+.. |False| replace:: `False`
+.. |None| replace:: `None`
+
+.. |FigTree| replace:: FigTree
+.. _FigTree: http://tree.bio.ed.ac.uk/software/figtree/
+.. |RAxML| replace:: RAxML
+.. _RAxML: http://sco.h-its.org/exelixis/software.html
+.. |SeqGen| replace:: Seq-Gen
+.. _SeqGen: http://tree.bio.ed.ac.uk/software/seqgen/
+.. |GenBank| replace:: GenBank
+.. _GenBank: http://www.ncbi.nlm.nih.gov/genbank/
+
+.. |Schemas| replace:: :doc:`/schemas/index`
+.. |FASTA| replace:: :doc:`/schemas/fasta`
+.. |Newick| replace:: :doc:`/schemas/newick`
+.. |Nexus| replace:: :doc:`/schemas/nexus`
+.. |NeXML| replace:: :doc:`/schemas/nexml`
+.. |Phylip| replace:: :doc:`/schemas/phylip`
+
+"""
+
+rst_prolog += """\
+.. |dendropy_source_archive_url| replace:: http://pypi.python.org/packages/source/D/DendroPy/DendroPy-%s.tar.gz
+.. |dendropy_source_archive| replace:: DendroPy source code archive
+.. _dendropy_source_archive: http://pypi.python.org/packages/source/D/DendroPy/DendroPy-%s.tar.gz
+""" % (version, version)
+
+rst_prolog += """\
+.. |dendropy_citation| replace:: Sukumaran, J. and Mark T. Holder. 2010. DendroPy: A Python library for phylogenetic computing. *Bioinformatics* 26: 1569-1571.
+.. |dendropy_copyright| replace:: Copyright {copyright}. All rights reserved.
+.. |
+""".format(copyright=copyright)
+
+# -- Options for HTML output ----------------------------------------------
+
+# The theme to use for HTML and HTML Help pages.  See the documentation for
+# a list of builtin themes.
+html_theme = 'sphinx_dendropy_theme'
+
+# Theme options are theme-specific and customize the look and feel of a theme
+# further.  For a list of options available for each theme, see the
+# documentation.
+#html_theme_options = {}
+
+# Add any paths that contain custom themes here, relative to this directory.
+html_theme_path = ["_themes"]
+
+# The name for this set of Sphinx documents.  If None, it defaults to
+# "<project> v<release> documentation".
+#html_title = None
+
+# A shorter title for the navigation bar.  Default is the same as html_title.
+#html_short_title = None
+
+# The name of an image file (relative to this directory) to place at the top
+# of the sidebar.
+# html_logo = "_static/dendropy_logo.png"
+
+# The name of an image file (within the static path) to use as favicon of the
+# docs.  This file should be a Windows icon file (.ico) being 16x16 or 32x32
+# pixels large.
+#html_favicon = None
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+html_static_path = ['_static']
+
+# Add any extra paths that contain custom files (such as robots.txt or
+# .htaccess) here, relative to this directory. These files are copied
+# directly to the root of the documentation.
+#html_extra_path = []
+
+# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
+# using the given strftime format.
+#html_last_updated_fmt = '%b %d, %Y'
+
+# If true, SmartyPants will be used to convert quotes and dashes to
+# typographically correct entities.
+#html_use_smartypants = True
+
+# Custom sidebar templates, maps document names to template names.
+html_sidebars = {
+    "**" : ["logo.html", "searchbox.html", "localtoc.html", "relations.html", "side_supplemental.html"],
+}
+
+# Additional templates that should be rendered to pages, maps page names to
+# template names.
+#html_additional_pages = {}
+
+# If false, no module index is generated.
+#html_domain_indices = True
+
+# If false, no index is generated.
+#html_use_index = True
+
+# If true, the index is split into individual pages for each letter.
+#html_split_index = False
+
+# If true, links to the reST sources are added to the pages.
+html_show_sourcelink = False
+
+# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
+html_show_sphinx = False
+
+# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
+#html_show_copyright = True
+
+# If true, an OpenSearch description file will be output, and all pages will
+# contain a <link> tag referring to it.  The value of this option must be the
+# base URL from which the finished HTML is served.
+#html_use_opensearch = ''
+
+# This is the file name suffix for HTML files (e.g. ".xhtml").
+#html_file_suffix = None
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = 'DendroPydoc'
+
+
+# -- Options for LaTeX output ---------------------------------------------
+
+latex_elements = {
+# The paper size ('letterpaper' or 'a4paper').
+#'papersize': 'letterpaper',
+
+# The font size ('10pt', '11pt' or '12pt').
+#'pointsize': '10pt',
+
+# Additional stuff for the LaTeX preamble.
+#'preamble': '',
+}
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title,
+#  author, documentclass [howto, manual, or own class]).
+# latex_documents = [
+#   ('index', 'DendroPy.tex', u'DendroPy Documentation',
+#    u'Jeet Sukumaran and Mark T. Holder', 'manual'),
+# ]
+latex_documents = [
+  ('index', 'DendroPy.tex', u'DendroPy Documentation',
+   u'Jeet Sukumaran and Mark T. Holder', 'manual'),
+  ('primer/index', 'DendroPy-Primer.tex', u'DendroPy Primer',
+   u'Jeet Sukumaran and Mark T. Holder', 'manual'),
+  ('library/index', 'DendroPy-Library-API.tex', u'DendroPy Library API Reference',
+   u'Jeet Sukumaran and Mark T. Holder', 'manual'),
+  ('programs/sumtrees', 'DendroPy-SumTrees.tex', u'SumTrees User Manual',
+   u'Jeet Sukumaran and Mark T. Holder', 'manual'),
+]
+
+# The name of an image file (relative to this directory) to place at the top of
+# the title page.
+#latex_logo = None
+
+# For "manual" documents, if this is true, then toplevel headings are parts,
+# not chapters.
+#latex_use_parts = False
+
+# If true, show page references after internal links.
+#latex_show_pagerefs = False
+
+# If true, show URL addresses after external links.
+#latex_show_urls = False
+
+# Documents to append as an appendix to all manuals.
+#latex_appendices = []
+
+# If false, no module index is generated.
+#latex_domain_indices = True
+
+
+# -- Options for manual page output ---------------------------------------
+
+# One entry per manual page. List of tuples
+# (source start file, name, description, authors, manual section).
+man_pages = [
+    # ('index', 'dendropy', u'DendroPy Documentation',
+    #  [u'Jeet Sukumaran and Mark T. Holder'], 1),
+    ('library/index', 'dendropy', u'DendroPy Library API Reference',
+     [u'Jeet Sukumaran and Mark T. Holder'], 1),
+    ('primer/index', 'dendropy-primer', u'DendroPy Primer',
+     [u'Jeet Sukumaran and Mark T. Holder'], 1),
+    ('programs/sumtrees', 'sumtrees', u'SumTrees User Manual',
+     [u'Jeet Sukumaran and Mark T. Holder'], 1),
+]
+
+# If true, show URL addresses after external links.
+#man_show_urls = False
+
+
+# -- Options for Texinfo output -------------------------------------------
+
+# Grouping the document tree into Texinfo files. List of tuples
+# (source start file, target name, title, author,
+#  dir menu entry, description, category)
+texinfo_documents = [
+  ('library/index', 'DendroPy', u'DendroPy Documentation',
+   u'Jeet Sukumaran and Mark T. Holder', 'DendroPy', 'Python library for phylogenetic computing',
+   'Miscellaneous'),
+  ('primer/index', 'DendroPy-Primer', u'DendroPy Primer',
+   u'Jeet Sukumaran and Mark T. Holder', 'DendroPy', 'Python library for phylogenetic computing',
+   'Miscellaneous'),
+  ('programs/sumtrees', 'SumTrees', u'SumTrees Documentation',
+   u'Jeet Sukumaran and Mark T. Holder', 'DendroPy', 'Python library for phylogenetic computing',
+   'Miscellaneous'),
+]
+
+# Documents to append as an appendix to all manuals.
+#texinfo_appendices = []
+
+# If false, no module index is generated.
+#texinfo_domain_indices = True
+
+# How to display URL addresses: 'footnote', 'no', or 'inline'.
+#texinfo_show_urls = 'footnote'
+
+# If true, do not generate a @detailmenu in the "Top" node's menu.
+#texinfo_no_detailmenu = False
+
+
+# -- Options for Epub output ----------------------------------------------
+
+# Bibliographic Dublin Core info.
+epub_title = u'DendroPy'
+epub_author = u'Jeet Sukumaran and Mark T. Holder'
+epub_publisher = u'Jeet Sukumaran and Mark T. Holder'
+epub_copyright = u'2015, Jeet Sukumaran and Mark T. Holder'
+
+# The basename for the epub file. It defaults to the project name.
+#epub_basename = u'DendroPy'
+
+# The HTML theme for the epub output. Since the default themes are not optimized
+# for small screen space, using the same theme for HTML and epub output is
+# usually not wise. This defaults to 'epub', a theme designed to save visual
+# space.
+#epub_theme = 'epub'
+
+# The language of the text. It defaults to the language option
+# or en if the language is not set.
+#epub_language = ''
+
+# The scheme of the identifier. Typical schemes are ISBN or URL.
+#epub_scheme = ''
+
+# The unique identifier of the text. This can be a ISBN number
+# or the project homepage.
+#epub_identifier = ''
+
+# A unique identification for the text.
+#epub_uid = ''
+
+# A tuple containing the cover image and cover page html template filenames.
+#epub_cover = ()
+
+# A sequence of (type, uri, title) tuples for the guide element of content.opf.
+#epub_guide = ()
+
+# HTML files that should be inserted before the pages created by sphinx.
+# The format is a list of tuples containing the path and title.
+#epub_pre_files = []
+
+# HTML files shat should be inserted after the pages created by sphinx.
+# The format is a list of tuples containing the path and title.
+#epub_post_files = []
+
+# A list of files that should not be packed into the epub file.
+epub_exclude_files = ['search.html']
+
+# The depth of the table of contents in toc.ncx.
+#epub_tocdepth = 3
+
+# Allow duplicate toc entries.
+#epub_tocdup = True
+
+# Choose between 'default' and 'includehidden'.
+#epub_tocscope = 'default'
+
+# Fix unsupported image types using the PIL.
+#epub_fix_images = False
+
+# Scale large images.
+#epub_max_image_width = 0
+
+# How to display URL addresses: 'footnote', 'no', or 'inline'.
+#epub_show_urls = 'inline'
+
+# If false, no index is generated.
+#epub_use_index = True
+
+
+# Example configuration for intersphinx: refer to the Python standard library.
+intersphinx_mapping = {'http://docs.python.org/': None}
diff --git a/doc/source/downloading.rst b/doc/source/downloading.rst
new file mode 100644
index 0000000..6a94f50
--- /dev/null
+++ b/doc/source/downloading.rst
@@ -0,0 +1,51 @@
+###################################
+Downloading and Installing DendroPy
+###################################
+
+DendroPy can be installed directly from the `Python Package Index <http://pypi.python.org/pypi/DendroPy/>`_ using a package manager such as |pip|_ or |setuptools|_, or alternatively the source code can be downloaded and manually installed.
+
+Installing From the Python Package Index
+========================================
+
+DendroPy is "easy_installable" directly from the `Python Package Index <http://pypi.python.org/pypi/DendroPy/>`_.
+If you have |pip|_ set up on your system, you can install the latest release of DendroPy by running::
+
+    $ sudo pip install -U dendropy
+
+If you do not have |pip|_ installed, you should *definitely* `install it <https://pip.pypa.io/en/latest/installing.html>`_ !
+Note: the "``sudo``" command should only be used if installing system-wide on a machine on which you have administrative privileges. Otherwise, you would use the "``--user``" flag for a local user install::
+
+    $ pip install --user -U dendropy
+
+Source Download and Installation
+================================
+
+The latest release of DendroPy (|version|), can be downloaded directly from here:
+
+    |dendropy_source_archive_url|
+
+Once downloaded, it can be installed by running:
+
+.. parsed-literal::
+
+    $ tar -xvzf DendroPy-|version|.tar.gz
+    $ cd DendroPy-|version|
+    $ sudo python setup.py install
+
+Cloning the Source Code Repository
+==================================
+
+The DendroPy source code is version-controlled using |Git|_, and the `DendroPy Git repository <http://github.com/jeetsukumaran/DendroPy>`_ can be cloned by running::
+
+    $ git clone git://github.com/jeetsukumaran/DendroPy.git
+
+If you plan to use this repository code as you main library code, you probably want to install DendroPy in developer mode::
+
+    $ cd DendroPy
+    $ sudo python setup.py develop
+
+You will, of course, need to get yourself |Git|_ for the above to work:
+
+    - `Source <http://www.kernel.org/pub/software/scm/git/git-1.6.6.tar.gz>`_
+    - `OS X binaries <http://code.google.com/p/git-osx-installer/downloads/list?can=3>`_
+    - `Microsoft Windows <http://code.google.com/p/msysgit/downloads/list>`_
diff --git a/doc/source/examples/bdtree_multi1.py b/doc/source/examples/bdtree_multi1.py
new file mode 100644
index 0000000..c23b7ac
--- /dev/null
+++ b/doc/source/examples/bdtree_multi1.py
@@ -0,0 +1,22 @@
+# /usr/bin/env python
+
+import random
+import dendropy
+from dendropy.simulate import treesim
+
+def generate(birth_rates, death_rates):
+    assert len(birth_rates) == len(death_rates)
+    tree = dendropy.Tree()
+    for i, br in enumerate(birth_rates):
+        tree = treesim.birth_death_tree(birth_rates[i],
+                                   death_rates[i],
+                                   max_time=random.randint(1,8),
+                                   tree=tree,
+                                   assign_taxa=False,
+                                   repeat_until_success=True)
+        print(tree.as_string(schema='newick'))
+    tree.randomly_assign_taxa(create_required_taxa=True)
+    return tree
+
+tree = generate([0.1, 0.6, 0.1], [0.1, 0.6, 0.1])
+print(tree.as_string(schema='newick'))
diff --git a/doc/source/examples/bdtree_multi2.py b/doc/source/examples/bdtree_multi2.py
new file mode 100644
index 0000000..0734e0e
--- /dev/null
+++ b/doc/source/examples/bdtree_multi2.py
@@ -0,0 +1,20 @@
+#! /usr/bin/env python
+
+import random
+import dendropy
+from dendropy.simulate import treesim
+
+def generate(mean, sd, num_periods):
+    tree = dendropy.Tree()
+    for i in range(num_periods):
+        tree = treesim.birth_death_tree(birth_rate=random.gauss(mean, sd),
+                                   death_rate=random.gauss(mean, sd),
+                                   max_time=random.randint(1,5),
+                                   tree=tree,
+                                   assign_taxa=False,
+                                   repeat_until_success=True)
+    tree.randomly_assign_taxa(create_required_taxa=True)
+    return tree
+
+tree = generate(0.1, 0.01, 100)
+print(tree.as_string(schema='newick'))
diff --git a/doc/source/examples/bibtex_annotations1.py b/doc/source/examples/bibtex_annotations1.py
new file mode 100644
index 0000000..3d82f09
--- /dev/null
+++ b/doc/source/examples/bibtex_annotations1.py
@@ -0,0 +1,24 @@
+#! /usr/bin/env python
+
+import dendropy
+
+citation = """\
+ at article{HeathHH2012,
+	Author = {Tracy A. Heath and Mark T. Holder and John P. Huelsenbeck},
+	Doi = {10.1093/molbev/msr255},
+	Journal = {Molecular Biology and Evolution},
+	Number = {3},
+	Pages = {939-955},
+	Title = {A {Dirichlet} Process Prior for Estimating Lineage-Specific Substitution Rates.},
+	Url = {http://mbe.oxfordjournals.org/content/early/2011/11/04/molbev.msr255.abstract},
+	Volume = {29},
+	Year = {2012}
+	}
+"""
+
+
+dataset = dendropy.DataSet.get(
+        data="(A,(B,(C,(D,E))));",
+        schema="newick")
+dataset.annotations.add_citation(citation)
+print dataset.as_string(schema="nexml")
diff --git a/doc/source/examples/bibtex_annotations2.py b/doc/source/examples/bibtex_annotations2.py
new file mode 100644
index 0000000..c8cf187
--- /dev/null
+++ b/doc/source/examples/bibtex_annotations2.py
@@ -0,0 +1,23 @@
+#! /usr/bin/env python
+
+import dendropy
+
+citation = {
+    "BibType": "article",
+    "Author": "Tracy A. Heath and Mark T. Holder and John P. Huelsenbeck",
+    "Doi": "10.1093/molbev/msr255",
+    "Journal": "Molecular Biology and Evolution",
+    "Number": "3",
+    "Pages": "939-955",
+    "Title": "A {Dirichlet} Process Prior for Estimating Lineage-Specific Substitution Rates.",
+    "Url": "http://mbe.oxfordjournals.org/content/early/2011/11/04/molbev.msr255.abstract",
+    "Volume": "29",
+    "Year": "2012",
+}
+
+
+dataset = dendropy.DataSet.get(
+        data="(A,(B,(C,(D,E))));",
+        schema="newick")
+dataset.annotations.add_citation(citation)
+print dataset.as_string(schema="nexml")
diff --git a/doc/source/examples/bibtex_annotations3.py b/doc/source/examples/bibtex_annotations3.py
new file mode 100644
index 0000000..98f5570
--- /dev/null
+++ b/doc/source/examples/bibtex_annotations3.py
@@ -0,0 +1,29 @@
+#! /usr/bin/env python
+
+import dendropy
+
+citation = """\
+ at article {MEN:MEN2925,
+author = {BROWN, JEREMY M. and SAVIDGE, KEVIN and McTAVISH, EMILY JANE B.},
+title = {DIM SUM: demography and individual migration simulated using a Markov chain},
+journal = {Molecular Ecology Resources},
+volume = {11},
+number = {2},
+publisher = {Blackwell Publishing Ltd},
+issn = {1755-0998},
+url = {http://dx.doi.org/10.1111/j.1755-0998.2010.02925.x},
+doi = {10.1111/j.1755-0998.2010.02925.x},
+pages = {358--363},
+keywords = {dispersal, Markov chain, migration, phylogeography, simulation},
+year = {2011},
+}
+"""
+
+dataset = dendropy.DataSet.get(
+        data="(A,(B,(C,(D,E))));",
+        schema="newick")
+dataset.annotations.add_citation(citation,
+        store_as="prism")
+dataset.annotations.add_citation(citation,
+        store_as="dublin")
+print dataset.as_string(schema="nexml")
diff --git a/doc/source/examples/build_tree_programmatically.py b/doc/source/examples/build_tree_programmatically.py
new file mode 100644
index 0000000..491d19e
--- /dev/null
+++ b/doc/source/examples/build_tree_programmatically.py
@@ -0,0 +1,51 @@
+import dendropy
+
+taxon_namespace = dendropy.TaxonNamespace(["A", "B", "C", "D",])
+tree = dendropy.Tree(taxon_namespace=taxon_namespace)
+
+
+# Create and add a new child node to the seed node,
+# assigning it an edge length:
+#
+#     (seed)
+#      /
+#     /
+#    ch1
+#
+ch1 = tree.seed_node.new_child()
+ch1.edge.length = 1
+
+# Can also assign edge length on construction:
+#
+#     (seed)
+#      / \
+#     /   \
+#   ch1   ch2
+#
+ch2 = tree.seed_node.new_child(edge_length=1)
+
+# Can also add an existing node as child
+#
+#       (seed)
+#       /   \
+#      /     \
+#    ch1     ch2
+#   /  \     /  \
+#  ch3 ch4  ch5 ch6
+ch3 = dendropy.Node(edge_length=1)
+ch4 = dendropy.Node(edge_length=2)
+ch1.add_child(ch3)
+ch1.add_child(ch4)
+ch5 = dendropy.Node(edge_length=1)
+ch6 = dendropy.Node(edge_length=2)
+# Note: this clears/deletes existing child nodes before adding the new ones;
+ch2.set_child_nodes([ch5, ch6])
+
+# Assign taxa
+ch3.taxon = taxon_namespace.get_taxon("A")
+ch4.taxon = taxon_namespace.get_taxon("B")
+ch5.taxon = taxon_namespace.get_taxon("C")
+ch6.taxon = taxon_namespace.get_taxon("D")
+
+print(tree.as_string("newick"))
+print(tree.as_ascii_plot())
diff --git a/doc/source/examples/char_mat_concat.py b/doc/source/examples/char_mat_concat.py
new file mode 100644
index 0000000..6fa8c0a
--- /dev/null
+++ b/doc/source/examples/char_mat_concat.py
@@ -0,0 +1,20 @@
+import dendropy
+taxa = dendropy.TaxonNamespace()
+d1 = dendropy.DnaCharacterMatrix.get(
+        path="primates.chars.subsets-1stpos.nexus",
+        schema="nexus",
+        taxon_namespace=taxa)
+print("d1: {} sequences, {} characters".format(len(d1), d1.max_sequence_size))
+d2 = dendropy.DnaCharacterMatrix.get(
+        path="primates.chars.subsets-2ndpos.nexus",
+        schema="nexus",
+        taxon_namespace=taxa)
+print("d2: {} sequences, {} characters".format(len(d2), d2.max_sequence_size))
+d3 = dendropy.DnaCharacterMatrix.get(
+        path="primates.chars.subsets-3rdpos.nexus",
+        schema="nexus",
+        taxon_namespace=taxa)
+print("d3: {} sequences, {} characters".format(len(d3), d3.max_sequence_size))
+d_all = dendropy.DnaCharacterMatrix.concatenate([d1,d2,d3])
+print("d_all: {} sequences, {} characters".format(len(d_all), d_all.max_sequence_size))
+print("Subsets: {}".format(d_all.character_subsets))
diff --git a/doc/source/examples/char_mat_concat2.py b/doc/source/examples/char_mat_concat2.py
new file mode 100644
index 0000000..7553120
--- /dev/null
+++ b/doc/source/examples/char_mat_concat2.py
@@ -0,0 +1,12 @@
+import dendropy
+taxa = dendropy.TaxonNamespace()
+paths = [
+        "primates.chars.subsets-1stpos.nexus",
+        "primates.chars.subsets-2ndpos.nexus",
+        "primates.chars.subsets-3rdpos.nexus",
+        ]
+d_all = dendropy.DnaCharacterMatrix.concatenate_from_paths(
+        paths=paths,
+        schema="nexus")
+print("d_all: {} sequences, {} characters".format(len(d_all), d_all.max_sequence_size))
+print("Subsets: {}".format(d_all.character_subsets))
diff --git a/doc/source/examples/chars_access1.py b/doc/source/examples/chars_access1.py
new file mode 100644
index 0000000..e29d683
--- /dev/null
+++ b/doc/source/examples/chars_access1.py
@@ -0,0 +1,18 @@
+import dendropy
+
+dna = dendropy.DnaCharacterMatrix.get(
+        path="primates.chars.nexus",
+        schema="nexus")
+
+# access by dereferencing taxon label
+s1 = dna["Macaca sylvanus"]
+
+# access by taxon index
+s2 = dna[0]
+s3 = dna[4]
+s4 = dna[-2]
+
+# access by taxon instance
+t = dna.taxon_namespace.get_taxon(label="Macaca sylvanus")
+s5 = dna[t]
+
diff --git a/doc/source/examples/chars_access2.py b/doc/source/examples/chars_access2.py
new file mode 100644
index 0000000..86d28d1
--- /dev/null
+++ b/doc/source/examples/chars_access2.py
@@ -0,0 +1,18 @@
+import dendropy
+
+dna = dendropy.DnaCharacterMatrix.get(
+        path="primates.chars.nexus",
+        schema="nexus")
+
+# iterate over taxa
+for taxon in dna:
+    print("{}: {}".format(taxon.label, dna[taxon]))
+
+# iterate over the sequences
+for seq in dna.values():
+    print(seq)
+
+# iterate over taxon/sequence pairs
+for taxon, seq in dna.items():
+    print("{}: {}".format(taxon.label, seq))
+
diff --git a/doc/source/examples/chars_access3.py b/doc/source/examples/chars_access3.py
new file mode 100644
index 0000000..171fed3
--- /dev/null
+++ b/doc/source/examples/chars_access3.py
@@ -0,0 +1,32 @@
+import dendropy
+
+cc = dendropy.ContinuousCharacterMatrix.get(
+        path="pythonidae_continuous.chars.nexml",
+        schema="nexml")
+
+s1 = cc[0]
+
+print(type(s1))
+# <class 'dendropy.datamodel.charmatrixmodel.ContinuousCharacterDataSequence'>
+
+print(len(s1))
+# 100
+
+for v in s1:
+    print("{}, {}".format(type(v), str(v)))
+# <type 'float'>, -0.0230088801573
+# <type 'float'>, -0.327376261257
+# <type 'float'>, -0.483676644025
+# ...
+# ...
+
+print(s1.values())
+# [-0.0230088801573, -0.327376261257, -0.483676644025, ...
+
+print(s1.symbols_as_list())
+# ['-0.0230088801573', '-0.327376261257', '-0.483676644025', ...
+
+print(s1.symbols_as_string())
+# -0.0230088801573 -0.327376261257 -0.483676644025 0.0868649474847 ...
+
+
diff --git a/doc/source/examples/chars_access4.py b/doc/source/examples/chars_access4.py
new file mode 100644
index 0000000..daeb5a0
--- /dev/null
+++ b/doc/source/examples/chars_access4.py
@@ -0,0 +1,32 @@
+import dendropy
+
+dna = dendropy.DnaCharacterMatrix.get(
+        path="primates.chars.nexus",
+        schema="nexus")
+
+s1 = dna[0]
+
+print(type(s1))
+# <class 'dendropy.datamodel.charmatrixmodel.DnaCharacterDataSequence'>
+
+print(len(s1))
+# 898
+
+for v in s1:
+    print("{}, {}".format(repr(v), str(v)))
+# <<StateIdentity at 0x10134a290: 'A'>, A
+# <<StateIdentity at 0x10134a290: 'A'>, A
+# <<StateIdentity at 0x10134a350: 'G'>, G
+# ...
+# ...
+
+print(s1.values())
+# [<StateIdentity at 0x101b4a290: 'A'>, <StateIdentity at 0x101b4a290: 'A'>, <StateIdentity at 0x101b4a350: 'G'>, ...
+
+print(s1.symbols_as_list())
+# ['A', 'A', 'G', 'C', 'T', 'T', 'C', 'A', 'T', ...
+
+print(s1.symbols_as_string())
+# AAGCTTCATAGGAGCAACCATTCT ...
+
+
diff --git a/doc/source/examples/contained_coalescent1.py b/doc/source/examples/contained_coalescent1.py
new file mode 100644
index 0000000..129a72e
--- /dev/null
+++ b/doc/source/examples/contained_coalescent1.py
@@ -0,0 +1,19 @@
+#! /usr/bin/env python
+
+import dendropy
+from dendropy.simulate import treesim
+
+
+sp_tree_str = """\
+[&R] (A:10,(B:6,(C:4,(D:2,E:2):2):2):4);
+"""
+
+sp_tree = dendropy.Tree.get(data=sp_tree_str, schema="newick")
+gene_to_species_map = dendropy.TaxonNamespaceMapping.create_contained_taxon_mapping(
+        containing_taxon_namespace=sp_tree.taxon_namespace,
+        num_contained=3)
+gene_tree = treesim.contained_coalescent_tree(containing_tree=sp_tree,
+    gene_to_containing_taxon_map=gene_to_species_map)
+print(gene_tree.as_string(schema='newick'))
+print(gene_tree.as_ascii_plot())
+
diff --git a/doc/source/examples/contained_coalescent2.py b/doc/source/examples/contained_coalescent2.py
new file mode 100644
index 0000000..6efbb0f
--- /dev/null
+++ b/doc/source/examples/contained_coalescent2.py
@@ -0,0 +1,19 @@
+#! /usr/bin/env python
+
+import dendropy
+from dendropy.simulate import treesim
+
+
+sp_tree_str = """\
+[&R] (A:10,(B:6,(C:4,(D:2,E:2):2):2):4);
+"""
+
+sp_tree = dendropy.Tree.get(data=sp_tree_str, schema="newick")
+gene_to_species_map = dendropy.TaxonNamespaceMapping.create_contained_taxon_mapping(
+        containing_taxon_namespace=sp_tree.taxon_namespace,
+        num_contained=[3, 4, 6, 2, 2,])
+gene_tree = treesim.contained_coalescent_tree(containing_tree=sp_tree,
+    gene_to_containing_taxon_map=gene_to_species_map)
+print(gene_tree.as_string(schema='newick'))
+print(gene_tree.as_ascii_plot())
+
diff --git a/doc/source/examples/ds1.py b/doc/source/examples/ds1.py
new file mode 100644
index 0000000..99e80e0
--- /dev/null
+++ b/doc/source/examples/ds1.py
@@ -0,0 +1,43 @@
+import dendropy
+
+# Create the DataSet to store data
+ds = dendropy.DataSet()
+
+# Set it up to manage all data under a single taxon namespace.
+# HIGHLY RECOMMENDED!
+taxon_namespace = dendropy.TaxonNamespace()
+ds.attach_taxon_namespace(taxon_namespace)
+
+# Read from multiple sources
+
+# Add a collection of trees
+ds.read(
+    path='pythonidae.mle.nex',
+    schema='nexus',)
+
+# Add a collection of characters from a Nexus source
+ds.read(
+    path='pythonidae.chars.nexus',
+    schema='nexus',)
+
+# Add a collection of characters from a FASTA source
+# Note that with this format, we have to explicitly provide the type of data
+ds.read(
+    path='pythonidae_cytb.fasta',
+    schema='fasta',
+    data_type="dna")
+
+# Add a collection of characters from a PHYLIP source
+# Note that with this format, we have to explicitly provide the type of data
+ds.read(
+    path='pythonidae.chars.phylip',
+    schema='phylip',
+    data_type="dna")
+
+# Add a collection of continuous characters from a NeXML source
+ds.read(
+    path='pythonidae_continuous.chars.nexml',
+    schema='nexml',)
+
+
+
diff --git a/doc/source/examples/ds2.py b/doc/source/examples/ds2.py
new file mode 100644
index 0000000..dbe4f51
--- /dev/null
+++ b/doc/source/examples/ds2.py
@@ -0,0 +1,16 @@
+import dendropy
+
+ds1 = dendropy.DataSet()
+taxon_namespace = dendropy.TaxonNamespace()
+ds1.attach_taxon_namespace(taxon_namespace)
+ds1.read(
+    path='pythonidae.mle.nex',
+    schema='nexus',)
+ds1.read(
+    path='pythonidae.chars.nexus',
+    schema='nexus',)
+
+ds2 = dendropy.DataSet(ds1)
+
+# for tx1, tx2 in zip(ds1, ds2):
+#     print(tx1 is tx2)
diff --git a/doc/source/examples/ds4.py b/doc/source/examples/ds4.py
new file mode 100644
index 0000000..4139ba5
--- /dev/null
+++ b/doc/source/examples/ds4.py
@@ -0,0 +1,10 @@
+import dendropy
+treelist1 = dendropy.TreeList.get(
+        path='pythonidae.mle.nex',
+        schema='nexus')
+cytb = dendropy.DnaCharacterMatrix.get(
+    path='pythonidae_cytb.fasta',
+    schema='fasta')
+ds = dendropy.DataSet([cytb, treelist1])
+ds.unify_taxon_namespaces()
+
diff --git a/doc/source/examples/ds5.py b/doc/source/examples/ds5.py
new file mode 100644
index 0000000..430d991
--- /dev/null
+++ b/doc/source/examples/ds5.py
@@ -0,0 +1,11 @@
+import dendropy
+ds = dendropy.DataSet()
+treelist1 = dendropy.TreeList.get(
+        path='pythonidae.mle.nex',
+        schema='nexus')
+cytb = dendropy.DnaCharacterMatrix.get(
+    path='pythonidae_cytb.fasta',
+    schema='fasta')
+ds.add(treelist1)
+ds.add(cytb)
+ds.unify_taxon_namespaces()
diff --git a/doc/source/examples/dsrw1.py b/doc/source/examples/dsrw1.py
new file mode 100644
index 0000000..94c5f22
--- /dev/null
+++ b/doc/source/examples/dsrw1.py
@@ -0,0 +1,13 @@
+import dendropy
+taxa = dendropy.TaxonNamespace()
+trees = dendropy.TreeList(taxon_namespace=taxa)
+trees.read(path='pythonidae.mb.run1.t', schema='nexus', tree_offset=10)
+trees.read(path='pythonidae.mb.run2.t', schema='nexus', tree_offset=10)
+trees.read(path='pythonidae.mb.run3.t', schema='nexus', tree_offset=10)
+trees.read(path='pythonidae.mb.run4.t', schema='nexus', tree_offset=10)
+ds = dendropy.DataSet([trees])
+ds.read(path='pythonidae_cytb.fasta',
+        schema='fasta',
+        data_type='dna',
+        )
+ds.write(path='pythonidae_combined.nex', schema='nexus')
diff --git a/doc/source/examples/dynamic_annotations1.py b/doc/source/examples/dynamic_annotations1.py
new file mode 100644
index 0000000..6fe6246
--- /dev/null
+++ b/doc/source/examples/dynamic_annotations1.py
@@ -0,0 +1,34 @@
+#! /usr/bin/env python
+
+import dendropy
+import random
+
+categories = {
+    "A" : "N/A",
+    "B" : "N/A",
+    "C" : "N/A",
+    "D" : "N/A",
+    "E" : "N/A"
+}
+tree = dendropy.Tree.get(
+        data="(A,(B,(C,(D,E))));",
+        schema="newick")
+for taxon in tree.taxon_namespace:
+    taxon.category = categories[taxon.label]
+    taxon.annotations.add_bound_attribute("category")
+for node in tree.postorder_node_iter():
+    node.pop_size = None
+    node.annotations.add_bound_attribute("pop_size")
+for node in tree.postorder_node_iter():
+    node.pop_size = random.randint(100, 10000)
+    if node.taxon is not None:
+        if node.pop_size >= 8000:
+            node.taxon.category = "large"
+        elif node.pop_size >= 6000:
+            node.taxon.category = "medium"
+        elif node.pop_size >= 4000:
+            node.taxon.category = "small"
+        elif node.pop_size >= 2000:
+            node.taxon.category = "tiny"
+print tree.as_string(schema="nexml")
+
diff --git a/doc/source/examples/euctree.py b/doc/source/examples/euctree.py
new file mode 100644
index 0000000..41cc967
--- /dev/null
+++ b/doc/source/examples/euctree.py
@@ -0,0 +1,19 @@
+import dendropy
+from dendropy.calculate import treecompare
+
+s1 = "((t5:0.161175,t6:0.161175):0.392293,((t4:0.104381,(t2:0.075411,t1:0.075411):0.028969):0.065840,t3:0.170221):0.383247);"
+s2 = "((t5:2.161175,t6:0.161175):0.392293,((t4:0.104381,(t2:0.075411,t1:0.075411):1):0.065840,t3:0.170221):0.383247);"
+
+tns = dendropy.TaxonNamespace()
+
+tree1 = dendropy.Tree.get(
+        data=s1,
+        schema='newick',
+        taxon_namespace=tns)
+tree2 = dendropy.Tree.get(
+        data=s2,
+        schema='newick',
+        taxon_namespace=tns)
+
+## Euclidean distance = 2.22326363775
+print(treecompare.euclidean_distance(tree1, tree2))
diff --git a/doc/source/examples/find_taxon_node1.py b/doc/source/examples/find_taxon_node1.py
new file mode 100644
index 0000000..d009d0e
--- /dev/null
+++ b/doc/source/examples/find_taxon_node1.py
@@ -0,0 +1,8 @@
+#! /usr/bin/env python
+
+import dendropy
+
+tree = dendropy.Tree.get(path="pythonidae.mle.nex", schema="nexus")
+filter = lambda taxon: True if taxon.label=='Antaresia maculosa' else False
+node = tree.find_node_with_taxon(filter)
+print(node.description())
diff --git a/doc/source/examples/find_taxon_node2.py b/doc/source/examples/find_taxon_node2.py
new file mode 100644
index 0000000..2f53d91
--- /dev/null
+++ b/doc/source/examples/find_taxon_node2.py
@@ -0,0 +1,7 @@
+#! /usr/bin/env python
+
+import dendropy
+
+tree = dendropy.Tree.get(path="pythonidae.mle.nex", schema="nexus")
+node = tree.find_node_with_taxon_label('Antaresia maculosa')
+print(node.description())
diff --git a/doc/source/examples/hiv1.nexus b/doc/source/examples/hiv1.nexus
new file mode 100644
index 0000000..ff4a6c3
--- /dev/null
+++ b/doc/source/examples/hiv1.nexus
@@ -0,0 +1,399 @@
+#NEXUS
+[R-package APE, Wed May 29 14:05:42 2013]
+
+BEGIN TAXA;
+	DIMENSIONS NTAX = 193;
+	TAXLABELS
+		A97DCA1EQTB52
+		A97DCA1MBFE185
+		A97DCA1MBS12
+		A97DCA1MBS30
+		A97DCA1SJDS17
+		A97DCA1KCD9
+		A97DCA1KTB185
+		A97DCA1KFE58
+		A97DCA1KP18
+		A97DCA1KP28
+		A97DCA1KP78
+		A97DCA1MBS63
+		A97DCA2KP82
+		A97DCA2KP86
+		A97DCA2MBCD5
+		A97DCEQS1
+		A97DCKFE326
+		A97DCMBS32
+		A97DCEQS18
+		A97DCKP72
+		A97DCEQS25
+		A97DCKS34
+		A97DCKS47
+		A97DCKMST147
+		A97DCKMST89
+		A97DCKTB6
+		A97DCEQTB44
+		A97DCKDS85
+		A97DCMBS26
+		A97DCKTB79
+		A97DCKTB118
+		A97DCKTB132
+		A97DCKCC4
+		A97DCKP25
+		A97DCKTB7
+		A97DCMBS9
+		A97DCKS36
+		A97DCKTB37
+		A97DCKCD6
+		A97DCMBFE149
+		A97DCMBFE247
+		A97DCKFE198
+		A97DCKP77
+		A97DCKMST52
+		A97DCKS7
+		A97DCKTB36
+		A97DCKP36
+		A97DCKTB16
+		A97DCMBS28
+		A97DCMBTB54
+		A97DCKP5
+		A97DCMBFE155
+		A97DCMBFE244
+		A97DCMBFE78
+		A97DCMBS7
+		A97DCMBS341
+		A97DCKTB157
+		A97DCMBP2
+		A97DCMBS4
+		A97DCMBTB29
+		A97DCMBDS17
+		A97DCEQS45
+		A97DCEQS49
+		A97DCKS56
+		A97DCEQTB14
+		A97DCKTB20
+		A97DCKTB44
+		A97DCKCC2
+		A97DCKCC3
+		A97DCKTB13
+		A97DCKMST140
+		A97DCKMST121
+		A97DCKP43
+		A97DCKP79
+		A97DCKS55
+		A97DCKTB124
+		A97DCKFE4
+		A97DCKS10
+		A97DCKTB48
+		A97DCKFE288
+		A97DCMBFE5
+		A97DCSJFE26
+		A97DCKMST50
+		A97DCKS14
+		U97DCKFE267
+		U97DCKTB119
+		U97DCMBFE250
+		U97DCKMST91
+		E97DCEQS21
+		E97DCEQS5
+		E97DCKP14
+		E97DCEQTB60
+		U97DCKFE45
+		U97DCKTB49
+		D97DCD1KCD4
+		D97DCD1KMST126
+		D97DCD1KS2
+		D97DCKP54
+		D97DCKP44
+		D97DCKTB181
+		D97DCMBS55
+		D97DCMBS56
+		D97DCD2KTB23
+		D97DCKMST66
+		D97DCKMST30
+		D97DCKP1
+		D97DCKS11
+		D97DCKTB27
+		D97DCKTB4
+		D97DCMBS35
+		D97DCD2KS26
+		D97DCKS15
+		D97DCKS39
+		D97DCMBS342
+		D97DCKMST144
+		D97DCKS29
+		D97DCKFE53
+		C97DCKCD11
+		C97DCKFE372
+		C97DCMBFE92
+		C97DCKTB110
+		C97DCMBFE14
+		C97DCMBFE300
+		C97DCMBFE34
+		C97DCMBS37
+		C97DCMBFE61
+		C97DCMBTB58
+		C97DCMBTB11
+		C97DCMBTB10
+		C97DCSJFE59
+		C97DCMBS20
+		C97DCMBTB3
+		C97DCMBS33
+		C97DCMBS80
+		C97DCMBTB13
+		F97DCF1EQS16
+		F97DCF1KP35
+		F97DCF1KP40
+		F97DCF1KTB136
+		F97DCF1KTB50
+		F97DCF1KS50
+		F97DCF1KTB165
+		F97DCF1MBFE183
+		K97DCEQTB43
+		K97DCKTB160
+		K97DCKP13
+		K97DCKTB111
+		K97DCMBFE71
+		K97DCKTB1
+		U97DCKTB17
+		U97DCEQS8
+		G97DCKCC1
+		G97DCKFE181
+		G97DCKTB56
+		G97DCKFE77
+		G97DCKP74
+		G97DCKMST100
+		G97DCKMST85
+		G97DCMBTB7
+		G97DCKS4
+		G97DCKTB142
+		G97DCKTB18
+		G97DCKMST10
+		G97DCKS30
+		G97DCMBFE91
+		G97DCKS27
+		U97DCKTB22
+		U97DCKMST135
+		U97DCEQS29
+		J97DCKFE339
+		J97DCMBTB4
+		J97DCKS22
+		J97DCMBS41
+		J97DCKTB147
+		J97DCKTB14
+		J97DCKS16
+		U97DCKMST120
+		H97DCEQTB1
+		H97DCEQTB80
+		H97DCKP63
+		H97DCKTB158
+		H97DCKS18
+		H97DCKS42
+		H97DCKTB176
+		H97DCKTB62
+		H97DCKCD2
+		H97DCKMST43
+		H97DCKTB140
+		H97DCKTB188
+		H97DCKS38
+		H97DCKS43
+		H97DCKTB32
+		H97DCKTB52
+	;
+END;
+BEGIN TREES;
+	TRANSLATE
+		1	A97DCA1EQTB52,
+		2	A97DCA1MBFE185,
+		3	A97DCA1MBS12,
+		4	A97DCA1MBS30,
+		5	A97DCA1SJDS17,
+		6	A97DCA1KCD9,
+		7	A97DCA1KTB185,
+		8	A97DCA1KFE58,
+		9	A97DCA1KP18,
+		10	A97DCA1KP28,
+		11	A97DCA1KP78,
+		12	A97DCA1MBS63,
+		13	A97DCA2KP82,
+		14	A97DCA2KP86,
+		15	A97DCA2MBCD5,
+		16	A97DCEQS1,
+		17	A97DCKFE326,
+		18	A97DCMBS32,
+		19	A97DCEQS18,
+		20	A97DCKP72,
+		21	A97DCEQS25,
+		22	A97DCKS34,
+		23	A97DCKS47,
+		24	A97DCKMST147,
+		25	A97DCKMST89,
+		26	A97DCKTB6,
+		27	A97DCEQTB44,
+		28	A97DCKDS85,
+		29	A97DCMBS26,
+		30	A97DCKTB79,
+		31	A97DCKTB118,
+		32	A97DCKTB132,
+		33	A97DCKCC4,
+		34	A97DCKP25,
+		35	A97DCKTB7,
+		36	A97DCMBS9,
+		37	A97DCKS36,
+		38	A97DCKTB37,
+		39	A97DCKCD6,
+		40	A97DCMBFE149,
+		41	A97DCMBFE247,
+		42	A97DCKFE198,
+		43	A97DCKP77,
+		44	A97DCKMST52,
+		45	A97DCKS7,
+		46	A97DCKTB36,
+		47	A97DCKP36,
+		48	A97DCKTB16,
+		49	A97DCMBS28,
+		50	A97DCMBTB54,
+		51	A97DCKP5,
+		52	A97DCMBFE155,
+		53	A97DCMBFE244,
+		54	A97DCMBFE78,
+		55	A97DCMBS7,
+		56	A97DCMBS341,
+		57	A97DCKTB157,
+		58	A97DCMBP2,
+		59	A97DCMBS4,
+		60	A97DCMBTB29,
+		61	A97DCMBDS17,
+		62	A97DCEQS45,
+		63	A97DCEQS49,
+		64	A97DCKS56,
+		65	A97DCEQTB14,
+		66	A97DCKTB20,
+		67	A97DCKTB44,
+		68	A97DCKCC2,
+		69	A97DCKCC3,
+		70	A97DCKTB13,
+		71	A97DCKMST140,
+		72	A97DCKMST121,
+		73	A97DCKP43,
+		74	A97DCKP79,
+		75	A97DCKS55,
+		76	A97DCKTB124,
+		77	A97DCKFE4,
+		78	A97DCKS10,
+		79	A97DCKTB48,
+		80	A97DCKFE288,
+		81	A97DCMBFE5,
+		82	A97DCSJFE26,
+		83	A97DCKMST50,
+		84	A97DCKS14,
+		85	U97DCKFE267,
+		86	U97DCKTB119,
+		87	U97DCMBFE250,
+		88	U97DCKMST91,
+		89	E97DCEQS21,
+		90	E97DCEQS5,
+		91	E97DCKP14,
+		92	E97DCEQTB60,
+		93	U97DCKFE45,
+		94	U97DCKTB49,
+		95	D97DCD1KCD4,
+		96	D97DCD1KMST126,
+		97	D97DCD1KS2,
+		98	D97DCKP54,
+		99	D97DCKP44,
+		100	D97DCKTB181,
+		101	D97DCMBS55,
+		102	D97DCMBS56,
+		103	D97DCD2KTB23,
+		104	D97DCKMST66,
+		105	D97DCKMST30,
+		106	D97DCKP1,
+		107	D97DCKS11,
+		108	D97DCKTB27,
+		109	D97DCKTB4,
+		110	D97DCMBS35,
+		111	D97DCD2KS26,
+		112	D97DCKS15,
+		113	D97DCKS39,
+		114	D97DCMBS342,
+		115	D97DCKMST144,
+		116	D97DCKS29,
+		117	D97DCKFE53,
+		118	C97DCKCD11,
+		119	C97DCKFE372,
+		120	C97DCMBFE92,
+		121	C97DCKTB110,
+		122	C97DCMBFE14,
+		123	C97DCMBFE300,
+		124	C97DCMBFE34,
+		125	C97DCMBS37,
+		126	C97DCMBFE61,
+		127	C97DCMBTB58,
+		128	C97DCMBTB11,
+		129	C97DCMBTB10,
+		130	C97DCSJFE59,
+		131	C97DCMBS20,
+		132	C97DCMBTB3,
+		133	C97DCMBS33,
+		134	C97DCMBS80,
+		135	C97DCMBTB13,
+		136	F97DCF1EQS16,
+		137	F97DCF1KP35,
+		138	F97DCF1KP40,
+		139	F97DCF1KTB136,
+		140	F97DCF1KTB50,
+		141	F97DCF1KS50,
+		142	F97DCF1KTB165,
+		143	F97DCF1MBFE183,
+		144	K97DCEQTB43,
+		145	K97DCKTB160,
+		146	K97DCKP13,
+		147	K97DCKTB111,
+		148	K97DCMBFE71,
+		149	K97DCKTB1,
+		150	U97DCKTB17,
+		151	U97DCEQS8,
+		152	G97DCKCC1,
+		153	G97DCKFE181,
+		154	G97DCKTB56,
+		155	G97DCKFE77,
+		156	G97DCKP74,
+		157	G97DCKMST100,
+		158	G97DCKMST85,
+		159	G97DCMBTB7,
+		160	G97DCKS4,
+		161	G97DCKTB142,
+		162	G97DCKTB18,
+		163	G97DCKMST10,
+		164	G97DCKS30,
+		165	G97DCMBFE91,
+		166	G97DCKS27,
+		167	U97DCKTB22,
+		168	U97DCKMST135,
+		169	U97DCEQS29,
+		170	J97DCKFE339,
+		171	J97DCMBTB4,
+		172	J97DCKS22,
+		173	J97DCMBS41,
+		174	J97DCKTB147,
+		175	J97DCKTB14,
+		176	J97DCKS16,
+		177	U97DCKMST120,
+		178	H97DCEQTB1,
+		179	H97DCEQTB80,
+		180	H97DCKP63,
+		181	H97DCKTB158,
+		182	H97DCKS18,
+		183	H97DCKS42,
+		184	H97DCKTB176,
+		185	H97DCKTB62,
+		186	H97DCKCD2,
+		187	H97DCKMST43,
+		188	H97DCKTB140,
+		189	H97DCKTB188,
+		190	H97DCKS38,
+		191	H97DCKS43,
+		192	H97DCKTB32,
+		193	H97DCKTB52
+	;
+	TREE * UNTITLED = [&R] (((((((((((1:182,2:182):1,(3:181,4:181):2):1,5:184):1,(6:180,7:180):5):1,8:186):1,(9:179,(10:178,(11:177,12:177):1):1):8):1,(((13:174,14:174):1,15:175):1,((((16:170,17:170):1,18:171):1,(19:169,20:169):3):1,((((((21:163,22:163):1,23:164):1,(24:162,(25:161,26:161):1):3):1,((((27:157,((28:155,29:155):1,30:156):1):1,(31:154,32:154):4):1,(33:153,(((34:150,35:150):1,36:151):1,(37:149,38:149):3):1):6):1,(((39:146,(40:145,41:145):1):1,((42:143,43:143):1,(44:142,45:142):2) [...]
+END;
diff --git a/doc/source/examples/ladderize.py b/doc/source/examples/ladderize.py
new file mode 100644
index 0000000..3991a9b
--- /dev/null
+++ b/doc/source/examples/ladderize.py
@@ -0,0 +1,22 @@
+#! /usr/bin/env python
+
+import dendropy
+
+tree_str = "[&R] ((A, (B, (C, (D, E)))),(F, (G, H)));"
+
+tree = dendropy.Tree.get(
+        data=tree_str,
+        schema="newick")
+
+print("Before:")
+print(tree.as_string(schema='newick'))
+print(tree.as_ascii_plot())
+tree.ladderize(ascending=True)
+print("Ladderize, ascending=True:")
+print(tree.as_string(schema='newick'))
+print(tree.as_ascii_plot())
+tree.ladderize(ascending=False)
+print("Ladderize, ascending=False:")
+print(tree.as_string(schema='newick'))
+print(tree.as_ascii_plot())
+
diff --git a/doc/source/examples/ltt.py b/doc/source/examples/ltt.py
new file mode 100644
index 0000000..adbcb7b
--- /dev/null
+++ b/doc/source/examples/ltt.py
@@ -0,0 +1,36 @@
+import dendropy
+
+tree = dendropy.Tree.get(
+            path="hiv1.nexus",
+            schema="nexus")
+
+# Returns distance of node furthest from root, i.e., maximum time available on
+# tree
+total_time = tree.max_distance_from_root()
+
+# Divide time span into 10 steps
+step = float(total_time) / 10
+
+# To store tuples of (time, number of lineages)
+ltt = []
+
+# Start at first time step
+current_time = step
+while current_time <= total_time:
+    # Get number of lineages at current time
+    num_lineages = tree.num_lineages_at(current_time)
+    # Store it
+    ltt.append( (current_time, num_lineages) )
+    # Move to next time step
+    current_time += step
+
+# Get the final number of lineages
+# Note: may not be the same as the number of tips if the tree has extinct
+# tips/taxa; though, if this were the case, we would not be dealing with an
+# ultrametric tree.
+if current_time < total_time:
+    ltt.append( (total_time, tree.num_lineages_at(total_time)) )
+
+# Print results
+for t, num_lineages in ltt:
+    print("{:12.8f}\t{}".format(t, num_lineages))
diff --git a/doc/source/examples/majrule.py b/doc/source/examples/majrule.py
new file mode 100644
index 0000000..d1ade1b
--- /dev/null
+++ b/doc/source/examples/majrule.py
@@ -0,0 +1,15 @@
+#! /usr/bin/env python
+
+import dendropy
+
+trees = dendropy.TreeList()
+for tree_file in ['pythonidae.mb.run1.t',
+        'pythonidae.mb.run2.t',
+        'pythonidae.mb.run3.t',
+        'pythonidae.mb.run4.t']:
+    trees.read(
+            path=tree_file,
+            schema='nexus',
+            tree_offset=20)
+con_tree = trees.consensus(min_freq=0.95)
+print(con_tree.as_string(schema='newick'))
diff --git a/doc/source/examples/mcct.py b/doc/source/examples/mcct.py
new file mode 100644
index 0000000..d66d9d4
--- /dev/null
+++ b/doc/source/examples/mcct.py
@@ -0,0 +1,19 @@
+#! /usr/bin/env python
+
+import dendropy
+
+trees = dendropy.TreeList()
+for tree_file in ['pythonidae.mb.run1.t',
+        'pythonidae.mb.run2.t',
+        'pythonidae.mb.run3.t',
+        'pythonidae.mb.run4.t']:
+    trees.read(
+            path=tree_file,
+            schema='nexus',
+            tree_offset=20)
+
+mcct = trees.maximum_product_of_split_support_tree()
+print("\nTree {} maximizes the product of split support (log product = {}): {}".format(trees.index(mcct), mcct.log_product_of_split_support, mcct))
+
+msst = trees.maximum_sum_of_split_support_tree()
+print("\nTree {} maximizes the sum of split support (sum = {}): {}".format(trees.index(msst), msst.sum_of_split_support, msst))
diff --git a/doc/source/examples/mrca.py b/doc/source/examples/mrca.py
new file mode 100644
index 0000000..895d9c4
--- /dev/null
+++ b/doc/source/examples/mrca.py
@@ -0,0 +1,9 @@
+import dendropy
+
+tree = dendropy.Tree.get(path="pythonidae.mle.nex", schema="nexus")
+taxon_labels=['Python sebae',
+              'Python regius',
+              'Python curtus',
+              'Python molurus']
+mrca = tree.mrca(taxon_labels=taxon_labels)
+print(mrca.description())
diff --git a/doc/source/examples/mrca2.py b/doc/source/examples/mrca2.py
new file mode 100644
index 0000000..09990ea
--- /dev/null
+++ b/doc/source/examples/mrca2.py
@@ -0,0 +1,11 @@
+import dendropy
+from dendropy.calculate import treemeasure
+
+tree = dendropy.Tree.get(
+        path="pythonidae.mle.nex",
+        schema="nexus")
+pdm = treemeasure.PatristicDistanceMatrix(tree)
+for taxon1 in tree.taxon_namespace:
+    for taxon2 in tree.taxon_namespace:
+        mrca = pdm.mrca(taxon1, taxon2)
+        print("MRCA of '{}' and '{}' is: {}".format(taxon1.label, taxon2.label, mrca.description()))
diff --git a/doc/source/examples/node_ages1.py b/doc/source/examples/node_ages1.py
new file mode 100644
index 0000000..778944c
--- /dev/null
+++ b/doc/source/examples/node_ages1.py
@@ -0,0 +1,19 @@
+#! /usr/bin/env python
+
+import dendropy
+from dendropy.calculate import treemeasure
+
+trees = dendropy.TreeList.get(
+        path="pythonidae.beast-mcmc.trees",
+        schema="nexus",
+        tree_offset=200)
+maculosa_childreni_ages = []
+for idx, tree in enumerate(trees):
+    tree.calc_node_ages()
+    taxon_labels = ["Antaresia maculosa","Antaresia childreni"]
+    mrca = tree.mrca(taxon_labels=taxon_labels)
+    maculosa_childreni_ages.append(mrca.age)
+print("Mean age of MRCA of 'Antaresia maculosa' and 'Antaresia childreni': %s" \
+    % (float(sum(maculosa_childreni_ages))/len(maculosa_childreni_ages)))
+
+
diff --git a/doc/source/examples/orti1994.nex b/doc/source/examples/orti1994.nex
new file mode 100644
index 0000000..1ba2cf4
--- /dev/null
+++ b/doc/source/examples/orti1994.nex
@@ -0,0 +1,32 @@
+#NEXUS
+
+Begin data;
+    Dimensions ntax=23 nchar=747;
+    Format datatype=dna;
+    Matrix
+
+EPAC_AK1_E CGAAAAATATCCATCTCGCAGTGCTGAGCTAGACA    AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA [...]
+EPAC_AK2_F CGAAAAGTATCCATCTTGCAGTGCTGAGCTAGACA    AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA [...]
+EPAC_AK3_K CGCAAAGCACTTGCCCCATAGCGCTAAGCCGCGTT    AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA [...]
+EPAC_AK4_L CGCAAAGCGCTTGCCCCATAACGCTAAGCCGCGTT    AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA [...]
+EPAC_AK5_L CGCAAAGCGCTTGCCCCATAACGCTAAGCCGCGTT    AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA [...]
+EPAC_BC1_A CGAAAAGTATCCATCTCGCAGTGCTGAGCTAGACA    AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA [...]
+EPAC_BC2_I CGCGAAGCACTTGCCCCATAGCGCTAAGCCGCGTT    AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA [...]
+EPAC_BC3_J CGCGTAGCACTTGCCCCATAGCGCTAAGCCGCGTT    AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA [...]
+EPAC_BC4_K CGCAAAGCACTTGCCCCATAGCGCTAAGCCGCGTT    AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA [...]
+EPAC_BC5_K CGCAAAGCACTTGCCCCATAGCGCTAAGCCGCGTT    AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA [...]
+EPAC_BC6_L CGCAAAGCGCTTGCCCCATAACGCTAAGCCGCGTT    AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA [...]
+EPAC_BC7_L CGCAAAGCGCTTGCCCCATAACGCTAAGCCGCGTT    AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA [...]
+EPAC_BC8_L CGCAAAGCGCTTGCCCCATAACGCTAAGCCGCGTT    AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA [...]
+EPAC_BC9_L CGCAAAGCGCTTGCCCCATAACGCTAAGCCGCGTT    AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA [...]
+WPAC_JA1_A CGAAAAGTATCCATCTCGCAGTGCTGAGCTAGACA    AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA [...]
+WPAC_JA2_A CGAAAAGTATCCATCTCGCAGTGCTGAGCTAGACA    AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA [...]
+WPAC_JA3_A CGAAAAGTATCCATCTCGCAGTGCTGAGCTAGACA    AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA [...]
+WPAC_JA4_B CGAAAGGTATCCATCTCGCAGTGCTGAGTTAGACA    AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA [...]
+WPAC_JA5_C CAAAAAGTATCCATCTCGCAGTGCTGAGTTAGACA    AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA [...]
+WPAC_JA6_C CAAAAAGTATCCATCTCGCAGTGCTGAGTTAGACA    AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA [...]
+WPAC_JA7_D CGAAAAGTATCCATCTCGCAGTGCTGAACTAGACA    AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA [...]
+WPAC_JA8_G CGAAAAGTATCCATCTCGCAGTGCTAAGCTAGACA    AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA [...]
+WPAC_JA9_H TGAAAAGTATCCATCTCGCAGTGCTAAGCTAGACA    AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA [...]
+    ;
+End;
diff --git a/doc/source/examples/paup_estimate_model.py b/doc/source/examples/paup_estimate_model.py
new file mode 100644
index 0000000..66320d7
--- /dev/null
+++ b/doc/source/examples/paup_estimate_model.py
@@ -0,0 +1,18 @@
+#! /usr/bin/env python
+
+import dendropy
+from dendropy.interop import paup
+
+data = dendropy.DnaCharacterMatrix.get(
+    path="pythonidae.nex",
+    schema="nexus")
+tree = paup.estimate_tree(data,
+        tree_est_criterion='nj')
+est_tree, est_model = paup.estimate_model(data,
+        tree,
+        num_states=2,
+        unequal_base_freqs=True,
+        gamma_rates=False,
+        prop_invar=False)
+for k, v in est_model.items():
+    print("{}: {}".format(k, v))
diff --git a/doc/source/examples/paup_estimate_tree_ml.py b/doc/source/examples/paup_estimate_tree_ml.py
new file mode 100644
index 0000000..a64c4ff
--- /dev/null
+++ b/doc/source/examples/paup_estimate_tree_ml.py
@@ -0,0 +1,15 @@
+#! /usr/bin/env python
+
+import dendropy
+from dendropy.interop import paup
+
+data = dendropy.DnaCharacterMatrix.get(
+    path="pythonidae.nex",
+    schema="nexus")
+tree = paup.estimate_tree(data,
+        tree_est_criterion='likelihood',
+        num_states=2,
+        unequal_base_freqs=True,
+        gamma_rates=False,
+        prop_invar=False)
+print tree.as_string(schema="newick")
diff --git a/doc/source/examples/paup_estimate_tree_nj.py b/doc/source/examples/paup_estimate_tree_nj.py
new file mode 100644
index 0000000..c825abe
--- /dev/null
+++ b/doc/source/examples/paup_estimate_tree_nj.py
@@ -0,0 +1,11 @@
+#! /usr/bin/env python
+
+import dendropy
+from dendropy.interop import paup
+
+data = dendropy.DnaCharacterMatrix.get(
+    path="pythonidae.nex",
+    schema="nexus")
+tree = paup.estimate_tree(data,
+        tree_est_criterion='nj')
+print tree.as_string(schema="newick")
diff --git a/doc/source/examples/pbhg.py b/doc/source/examples/pbhg.py
new file mode 100644
index 0000000..13d8305
--- /dev/null
+++ b/doc/source/examples/pbhg.py
@@ -0,0 +1,16 @@
+#! /usr/bin/env python
+
+import dendropy
+from dendropy.calculate import treemeasure
+
+trees = dendropy.TreeList.get(
+        path="pythonidae.beast-mcmc.trees",
+        schema="nexus",
+        tree_offset=200)
+pbhg = []
+for idx, tree in enumerate(trees):
+    pbhg.append(treemeasure.pybus_harvey_gamma(tree))
+print("Mean Pybus-Harvey-Gamma: %s" \
+    % (float(sum(pbhg))/len(pbhg)))
+
+
diff --git a/doc/source/examples/pdm.py b/doc/source/examples/pdm.py
new file mode 100644
index 0000000..4ca8769
--- /dev/null
+++ b/doc/source/examples/pdm.py
@@ -0,0 +1,12 @@
+#! /usr/bin/env python
+
+import dendropy
+from dendropy.calculate import treemeasure
+
+tree = dendropy.Tree.get(
+    path="pythonidae.mle.nex",
+    schema="nexus")
+pdm = treemeasure.PatristicDistanceMatrix(tree)
+for i, t1 in enumerate(tree.taxon_namespace):
+    for t2 in tree.taxon_namespace[i+1:]:
+        print("Distance between '%s' and '%s': %s" % (t1.label, t2.label, pdm(t1, t2)))
diff --git a/doc/source/examples/pgstats1.py b/doc/source/examples/pgstats1.py
new file mode 100644
index 0000000..60af2fb
--- /dev/null
+++ b/doc/source/examples/pgstats1.py
@@ -0,0 +1,33 @@
+#! /usr/bin/env python
+
+import dendropy
+from dendropy.calculate import popgenstat
+
+seqs = dendropy.DnaCharacterMatrix.get(
+        path="orti1994.nex",
+        schema="nexus")
+p1 = []
+p2 = []
+for idx, t in enumerate(seqs.taxon_namespace):
+    if t.label.startswith('EPAC'):
+        p1.append(seqs[t])
+    else:
+        p2.append(seqs[t])
+pp = popgenstat.PopulationPairSummaryStatistics(p1, p2)
+
+print('Average number of pairwise differences (total): %s' \
+    % pp.average_number_of_pairwise_differences)
+print('Average number of pairwise differences (between populations): %s' \
+    % pp.average_number_of_pairwise_differences_between)
+print('Average number of pairwise differences (within populations): %s' \
+    % pp.average_number_of_pairwise_differences_within)
+print('Average number of pairwise differences (net): %s' \
+    % pp.average_number_of_pairwise_differences_net)
+print('Number of segregating sites: %s' \
+    % pp.num_segregating_sites)
+print("Watterson's theta: %s" \
+    % pp.wattersons_theta)
+print("Wakeley's Psi: %s" \
+    % pp.wakeleys_psi)
+print("Tajima's D: %s" \
+    % pp.tajimas_d)
diff --git a/doc/source/examples/pic1.py b/doc/source/examples/pic1.py
new file mode 100644
index 0000000..ab59179
--- /dev/null
+++ b/doc/source/examples/pic1.py
@@ -0,0 +1,35 @@
+import dendropy
+from dendropy.model import continuous
+
+taxa = dendropy.TaxonNamespace()
+tree = dendropy.Tree.get(
+        path="primates.cc.tre",
+        schema="newick",
+        taxon_namespace=taxa)
+chars = dendropy.ContinuousCharacterMatrix.get_from_path(
+        "primates.cc.nex",
+        "nexus",
+        taxon_namespace=taxa)
+pic = continuous.PhylogeneticIndependentConstrasts(
+        tree=tree,
+        char_matrix=chars)
+for cidx in range(chars.vector_size):
+    ctree1 = pic.contrasts_tree(character_index=cidx,
+            annotate_pic_statistics=True,
+            state_values_as_node_labels=True,
+            corrected_edge_lengths=False)
+    print(ctree1.as_string("newick",
+                suppress_leaf_taxon_labels=True,
+                suppress_leaf_node_labels=False,
+                suppress_internal_taxon_labels=True,
+                suppress_internal_node_labels=False))
+
+
+
+
+
+
+
+
+
+
diff --git a/doc/source/examples/pic2.py b/doc/source/examples/pic2.py
new file mode 100644
index 0000000..1b75562
--- /dev/null
+++ b/doc/source/examples/pic2.py
@@ -0,0 +1,7 @@
+import dendropy
+dataset = dendropy.DataSet.get(
+    path="primates.cc.combined.nex",
+    schema="nexus")
+tree = dataset.tree_lists[0][0]
+chars = dataset.char_matrices[0]
+
diff --git a/doc/source/examples/pic3.py b/doc/source/examples/pic3.py
new file mode 100644
index 0000000..199cf57
--- /dev/null
+++ b/doc/source/examples/pic3.py
@@ -0,0 +1,10 @@
+import dendropy
+taxa = dendropy.TaxonNamespace()
+tree = dendropy.Tree.get(
+    path="primates.cc.tre",
+    schema="newick",
+    taxon_namespace=taxa)
+chars = dendropy.ContinuousCharacterMatrix.get(
+    path="primates.cc.nex",
+    schema="nexus",
+    taxon_namespace=taxa)
diff --git a/doc/source/examples/pic4.py b/doc/source/examples/pic4.py
new file mode 100644
index 0000000..1ee1eb4
--- /dev/null
+++ b/doc/source/examples/pic4.py
@@ -0,0 +1,26 @@
+import dendropy
+from dendropy.model import continuous
+
+taxa = dendropy.TaxonNamespace()
+tree = dendropy.Tree.get(
+    path="primates.cc.tre",
+    schema="newick",
+    taxon_namespace=taxa)
+chars = dendropy.ContinuousCharacterMatrix.get(
+    path="primates.cc.nex",
+    schema="nexus",
+    taxon_namespace=taxa)
+pic = continuous.PhylogeneticIndependentConstrasts(tree=tree, char_matrix=chars)
+ctree1 = pic.contrasts_tree(character_index=0,
+    annotate_pic_statistics=True,
+    state_values_as_node_labels=False,
+    corrected_edge_lengths=False)
+for nd in ctree1.postorder_internal_node_iter():
+    row = [nd.pic_state_value,
+            nd.pic_state_variance,
+            nd.pic_contrast_raw,
+            nd.pic_edge_length_error]
+    row_str = [(("%10.8f") % i) for i in row]
+    row_str = "    ".join(row_str)
+    label = nd.label.ljust(6)
+    print "%s %s" % (label, row_str)
diff --git a/doc/source/examples/pic_annotated.py b/doc/source/examples/pic_annotated.py
new file mode 100644
index 0000000..6865458
--- /dev/null
+++ b/doc/source/examples/pic_annotated.py
@@ -0,0 +1,32 @@
+#! /usr/bin/env python
+
+import dendropy
+from dendropy.model import continuous
+
+taxa = dendropy.TaxonNamespace()
+tree = dendropy.Tree.get(
+        path="primates.cc.tre",
+        schema="newick",
+        taxon_namespace=taxa)
+chars = dendropy.ContinuousCharacterMatrix.get(
+        path="primates.cc.nex",
+        schema="nexus",
+        taxon_namespace=taxa)
+pic = dendropy.continuous.PhylogeneticIndependentConstrasts(
+        tree=tree,
+        char_matrix=chars)
+pic_trees = dendropy.TreeList(taxon_namespace=taxa)
+for cidx in range(chars.vector_size):
+    ctree1 = pic.contrasts_tree(character_index=cidx)
+    ctree1.label = "PIC %d" % (cidx+1)
+    pic_trees.append(ctree1)
+print(pic_trees.as_string(schema="nexus"))
+
+
+
+
+
+
+
+
+
diff --git a/doc/source/examples/preorder_filtered_edge_iteration.py b/doc/source/examples/preorder_filtered_edge_iteration.py
new file mode 100644
index 0000000..8803b1a
--- /dev/null
+++ b/doc/source/examples/preorder_filtered_edge_iteration.py
@@ -0,0 +1,10 @@
+#! /usr/bin/env python
+
+import dendropy
+
+mle = dendropy.Tree.get(
+    path='pythonidae.mle.nex',
+    schema='nexus')
+short = lambda edge: True if edge.length < 0.01 else False
+for edge in mle.postorder_edge_iter(short):
+    print(edge.length)
diff --git a/doc/source/examples/preorder_filtered_node_iteration.py b/doc/source/examples/preorder_filtered_node_iteration.py
new file mode 100644
index 0000000..b757824
--- /dev/null
+++ b/doc/source/examples/preorder_filtered_node_iteration.py
@@ -0,0 +1,10 @@
+#! /usr/bin/env python
+
+import dendropy
+
+mle = dendropy.Tree.get(
+    path='pythonidae.mle.nex',
+    schema='nexus')
+multifurcating = lambda x: True if len(x.child_nodes()) > 2 else False
+for nd in mle.postorder_node_iter(multifurcating):
+    print(nd.description(0))
diff --git a/doc/source/examples/primates.cc.combined.nex b/doc/source/examples/primates.cc.combined.nex
new file mode 100644
index 0000000..ef59ecb
--- /dev/null
+++ b/doc/source/examples/primates.cc.combined.nex
@@ -0,0 +1,17 @@
+#NEXUS
+
+BEGIN DATA;
+    DIMENSIONS  NTAX=5 NCHAR=2;
+    FORMAT DATATYPE = CONTINUOUS GAP = - MISSING = ?;
+    MATRIX
+        Homo      4.09434   4.74493
+        Pongo     3.61092   3.33220
+        Macaca    2.37024   3.36730
+        Ateles    2.02815   2.89037
+        Galago   -1.46968   2.30259
+    ;
+END;
+
+BEGIN TREES;
+    TREE 1 = [&R] ((((Homo:0.21,Pongo:0.21)HP:0.28,Macaca:0.49)HPM:0.13,Ateles:0.62)HPMA:0.38,Galago:1.00)Root:0.0;
+END;
diff --git a/doc/source/examples/primates.cc.nex b/doc/source/examples/primates.cc.nex
new file mode 100644
index 0000000..b46d2b3
--- /dev/null
+++ b/doc/source/examples/primates.cc.nex
@@ -0,0 +1,12 @@
+#NEXUS
+BEGIN DATA;
+    DIMENSIONS  NTAX=5 NCHAR=2;
+    FORMAT DATATYPE = CONTINUOUS GAP = - MISSING = ?;
+    MATRIX
+        Homo      4.09434   4.74493
+        Pongo     3.61092   3.33220
+        Macaca    2.37024   3.36730
+        Ateles    2.02815   2.89037
+        Galago   -1.46968   2.30259
+    ;
+END;
diff --git a/doc/source/examples/primates.cc.tre b/doc/source/examples/primates.cc.tre
new file mode 100644
index 0000000..8ed788c
--- /dev/null
+++ b/doc/source/examples/primates.cc.tre
@@ -0,0 +1 @@
+[&R] ((((Homo:0.21,Pongo:0.21)HP:0.28,Macaca:0.49)HPM:0.13,Ateles:0.62)HPMA:0.38,Galago:1.00)Root:0.0;
diff --git a/doc/source/examples/primates.chars.interleaved.nexus b/doc/source/examples/primates.chars.interleaved.nexus
new file mode 100644
index 0000000..78f465e
--- /dev/null
+++ b/doc/source/examples/primates.chars.interleaved.nexus
@@ -0,0 +1,228 @@
+#NEXUS 
+
+[Modified from example data distributed with PAUP* 4.01b.
+ Original Data from:
+	Hayasaka, K., T. Gojobori, and S. Horai. 1988. Molecular phylogeny
+		and evolution of primate mitochondrial DNA. Mol. Biol. Evol.
+		5:626-644.
+]
+
+BEGIN TAXA;
+	DIMENSIONS NTAX=12;
+	TAXLABELS
+        Lemur_catta        	
+        Homo_sapiens       	
+        Pan                	
+        Gorilla            	
+        Pongo              	
+        Hylobates          	
+        Macaca_fuscata     	
+        Macaca_mulatta     	
+        Macaca_fascicularis	
+        Macaca_sylvanus    	
+        Saimiri_sciureus   	
+        Tarsius_syrichta   	
+        ;	
+END;
+
+BEGIN CHARACTERS;
+	DIMENSIONS NCHAR=898;
+	FORMAT DATATYPE=DNA GAP=- INTERLEAVE=yes;
+	MATRIX
+	Lemur_catta         AAGCTTCATAGGAGCAACCATTCTAATAATCGCACATGGCCTTACATCATCCATATTATT
+	Homo_sapiens        AAGCTTCACCGGCGCAGTCATTCTCATAATCGCCCACGGGCTTACATCCTCATTACTATT
+	Pan                 AAGCTTCACCGGCGCAATTATCCTCATAATCGCCCACGGACTTACATCCTCATTATTATT
+	Gorilla             AAGCTTCACCGGCGCAGTTGTTCTTATAATTGCCCACGGACTTACATCATCATTATTATT
+	Pongo               AAGCTTCACCGGCGCAACCACCCTCATGATTGCCCATGGACTCACATCCTCCCTACTGTT
+	Hylobates           AAGCTTTACAGGTGCAACCGTCCTCATAATCGCCCACGGACTAACCTCTTCCCTGCTATT
+	Macaca_fuscata      AAGCTTTTCCGGCGCAACCATCCTTATGATCGCTCACGGACTCACCTCTTCCATATATTT
+	Macaca_mulatta      AAGCTTTTCTGGCGCAACCATCCTCATGATTGCTCACGGACTCACCTCTTCCATATATTT
+	Macaca_fascicularis AAGCTTCTCCGGCGCAACCACCCTTATAATCGCCCACGGGCTCACCTCTTCCATGTATTT
+	Macaca_sylvanus     AAGCTTCTCCGGTGCAACTATCCTTATAGTTGCCCATGGACTCACCTCTTCCATATACTT
+	Saimiri_sciureus    AAGCTTCACCGGCGCAATGATCCTAATAATCGCTCACGGGTTTACTTCGTCTATGCTATT
+	Tarsius_syrichta    AAGTTTCATTGGAGCCACCACTCTTATAATTGCCCATGGCCTCACCTCCTCCCTATTATT
+
+	Lemur_catta         CTGTCTAGCCAACTCTAACTACGAACGAATCCATAGCCGTACAATACTACTAGCACGAGG
+	Homo_sapiens        CTGCCTAGCAAACTCAAACTACGAACGCACTCACAGTCGCATCATAATCCTCTCTCAAGG
+	Pan                 CTGCCTAGCAAACTCAAATTATGAACGCACCCACAGTCGCATCATAATTCTCTCCCAAGG
+	Gorilla             CTGCCTAGCAAACTCAAACTACGAACGAACCCACAGCCGCATCATAATTCTCTCTCAAGG
+	Pongo               CTGCCTAGCAAACTCAAACTACGAACGAACCCACAGCCGCATCATAATCCTCTCTCAAGG
+	Hylobates           CTGCCTTGCAAACTCAAACTACGAACGAACTCACAGCCGCATCATAATCCTATCTCGAGG
+	Macaca_fuscata      CTGCCTAGCCAATTCAAACTATGAACGCACTCACAACCGTACCATACTACTGTCCCGAGG
+	Macaca_mulatta      CTGCCTAGCCAATTCAAACTATGAACGCACTCACAACCGTACCATACTACTGTCCCGGGG
+	Macaca_fascicularis CTGCTTGGCCAATTCAAACTATGAGCGCACTCATAACCGTACCATACTACTATCCCGAGG
+	Macaca_sylvanus     CTGCTTGGCCAACTCAAACTACGAACGCACCCACAGCCGCATCATACTACTATCCCGAGG
+	Saimiri_sciureus    CTGCCTAGCAAACTCAAATTACGAACGAATTCACAGCCGAACAATAACATTTACTCGAGG
+	Tarsius_syrichta    TTGCCTAGCAAATACAAACTACGAACGAGTCCACAGTCGAACAATAGCACTAGCCCGTGG
+
+	Lemur_catta         GATCCAAACCATTCTCCCTCTTATAGCCACCTGATGACTACTCGCCAGCCTAACTAACCT
+	Homo_sapiens        ACTTCAAACTCTACTCCCACTAATAGCTTTTTGATGACTTCTAGCAAGCCTCGCTAACCT
+	Pan                 ACTTCAAACTCTACTCCCACTAATAGCCTTTTGATGACTCCTAGCAAGCCTCGCTAACCT
+	Gorilla             ACTCCAAACCCTACTCCCACTAATAGCCCTTTGATGACTTCTGGCAAGCCTCGCCAACCT
+	Pongo               CCTTCAAACTCTACTCCCCCTAATAGCCCTCTGATGACTTCTAGCAAGCCTCACTAACCT
+	Hylobates           GCTCCAAGCCTTACTCCCACTGATAGCCTTCTGATGACTCGCAGCAAGCCTCGCTAACCT
+	Macaca_fuscata      ACTTCAAATCCTACTTCCACTAACAGCCTTTTGATGATTAACAGCAAGCCTTACTAACCT
+	Macaca_mulatta      ACTTCAAATCCTACTTCCACTAACAGCTTTCTGATGATTAACAGCAAGCCTTACTAACCT
+	Macaca_fascicularis ACTTCAAATTCTACTTCCATTGACAGCCTTCTGATGACTCACAGCAAGCCTTACTAACCT
+	Macaca_sylvanus     ACTCCAAATCCTACTCCCACTAACAGCCTTCTGATGATTCACAGCAAGCCTTACTAATCT
+	Saimiri_sciureus    GCTCCAAACACTATTCCCGCTTATAGGCCTCTGATGACTCCTAGCAAATCTCGCTAACCT
+	Tarsius_syrichta    CCTTCAAACCCTATTACCTCTTGCAGCAACATGATGACTCCTCGCCAGCTTAACCAACCT
+
+	Lemur_catta         AGCCCTACCCACCTCTATCAATTTAATTGGCGAACTATTCGTCACTATAGCATCCTTCTC
+	Homo_sapiens        CGCCTTACCCCCCACTATTAACCTACTGGGAGAACTCTCTGTGCTAGTAACCACGTTCTC
+	Pan                 CGCCCTACCCCCTACCATTAATCTCCTAGGGGAACTCTCCGTGCTAGTAACCTCATTCTC
+	Gorilla             CGCCTTACCCCCCACCATTAACCTACTAGGAGAGCTCTCCGTACTAGTAACCACATTCTC
+	Pongo               TGCCCTACCACCCACCATCAACCTTCTAGGAGAACTCTCCGTACTAATAGCCATATTCTC
+	Hylobates           CGCCCTACCCCCCACTATTAACCTCCTAGGTGAACTCTTCGTACTAATGGCCTCCTTCTC
+	Macaca_fuscata      TGCCCTACCCCCCACTATCAATCTACTAGGTGAACTCTTTGTAATCGCAACCTCATTCTC
+	Macaca_mulatta      TGCCCTACCCCCCACTATCAACCTACTAGGTGAACTCTTTGTAATCGCGACCTCATTCTC
+	Macaca_fascicularis TGCCCTACCCCCCACTATTAATCTACTAGGCGAACTCTTTGTAATCACAACTTCATTTTC
+	Macaca_sylvanus     TGCTCTACCCTCCACTATTAATCTACTGGGCGAACTCTTCGTAATCGCAACCTCATTTTC
+	Saimiri_sciureus    CGCCCTACCCACAGCTATTAATCTAGTAGGAGAATTACTCACAATCGTATCTTCCTTCTC
+	Tarsius_syrichta    GGCCCTTCCCCCAACAATTAATTTAATCGGTGAACTGTCCGTAATAATAGCAGCATTTTC
+
+	Lemur_catta         ATGATCAAACATTACAATTATCTTAATAGGCTTAAATATGCTCATCACCGCTCTCTATTC
+	Homo_sapiens        CTGATCAAATATCACTCTCCTACTTACAGGACTCAACATACTAGTCACAGCCCTATACTC
+	Pan                 CTGATCAAATACCACTCTCCTACTCACAGGATTCAACATACTAATCACAGCCCTGTACTC
+	Gorilla             CTGATCAAACACCACCCTTTTACTTACAGGATCTAACATACTAATTACAGCCCTGTACTC
+	Pongo               TTGATCTAACATCACCATCCTACTAACAGGACTCAACATACTAATCACAACCCTATACTC
+	Hylobates           CTGGGCAAACACTACTATTACACTCACCGGGCTCAACGTACTAATCACGGCCCTATACTC
+	Macaca_fuscata      CTGATCCCATATCACCATTATGCTAACAGGACTTAACATATTAATTACGGCCCTCTACTC
+	Macaca_mulatta      CTGGTCCCATATCACCATTATATTAACAGGATTTAACATACTAATTACGGCCCTCTACTC
+	Macaca_fascicularis CTGATCCCATATCACCATTGTGTTAACGGGCCTTAATATACTAATCACAGCCCTCTACTC
+	Macaca_sylvanus     CTGATCCCACATCACCATCATACTAACAGGACTGAACATACTAATTACAGCCCTCTACTC
+	Saimiri_sciureus    TTGATCCAACTTTACTATTATATTCACAGGACTTAATATACTAATTACAGCACTCTACTC
+	Tarsius_syrichta    ATGGTCACACCTAACTATTATCTTAGTAGGCCTTAACACCCTTATCACCGCCCTATATTC
+
+	Lemur_catta         CCTCTATATATTAACTACTACACAACGAGGAAAACTCACATATCATTCGCACAACCTAAA
+	Homo_sapiens        CCTCTACATATTTACCACAACACAATGGGGCTCACTCACCCACCACATTAACAACATAAA
+	Pan                 CCTCTACATGTTTACCACAACACAATGAGGCTCACTCACCCACCACATTAATAACATAAA
+	Gorilla             CCTTTATATATTTACCACAACACAATGAGGCCCACTCACACACCACATCACCAACATAAA
+	Pongo               TCTCTATATATTCACCACAACACAACGAGGTACACCCACACACCACATCAACAACATAAA
+	Hylobates           CCTTTACATATTTATCATAACACAACGAGGCACACTTACACACCACATTAAAAACATAAA
+	Macaca_fuscata      TCTCCACATATTCACTACAACACAACGAGGAACACTCACACATCACATAATCAACATAAA
+	Macaca_mulatta      CCTCCACATATTCACCACAACACAACGAGGAGCACTCACACATCACATAATCAACATAAA
+	Macaca_fascicularis TCTCCACATGTTCATTACAGTACAACGAGGAACACTCACACACCACATAATCAATATAAA
+	Macaca_sylvanus     TCTTCACATATTCACCACAACACAACGAGGAGCGCTCACACACCACATAATTAACATAAA
+	Saimiri_sciureus    ACTTCATATGTATGCCTCTACACAGCGAGGTCCACTTACATACAGCACCAGCAATATAAA
+	Tarsius_syrichta    CCTATATATACTAATCATAACTCAACGAGGAAAATACACATATCATATCAACAATATCAT
+
+	Lemur_catta         CCCATCCTTTACACGAGAAAACACCCTTATATCCATACACATACTCCCCCTTCTCCTATT
+	Homo_sapiens        ACCCTCATTCACACGAGAAAACACCCTCATGTTCATACACCTATCCCCCATTCTCCTCCT
+	Pan                 GCCCTCATTCACACGAGAAAATACTCTCATATTTTTACACCTATCCCCCATCCTCCTTCT
+	Gorilla             ACCCTCATTTACACGAGAAAACATCCTCATATTCATGCACCTATCCCCCATCCTCCTCCT
+	Pongo               ACCTTCTTTCACACGCGAAAATACCCTCATGCTCATACACCTATCCCCCATCCTCCTCTT
+	Hylobates           ACCCTCACTCACACGAGAAAACATATTAATACTTATGCACCTCTTCCCCCTCCTCCTCCT
+	Macaca_fuscata      GCCCCCCTTCACACGAGAAAACACATTAATATTCATACACCTCGCTCCAATTATCCTTCT
+	Macaca_mulatta      ACCCCCCTTCACACGAGAAAACATATTAATATTCATACACCTCGCTCCAATCATCCTCCT
+	Macaca_fascicularis ACCCCCCTTCACACGAGAAAACATATTAATATTCATACACCTCGCTCCAATTATCCTTCT
+	Macaca_sylvanus     ACCACCTTTCACACGAGAAAACATATTAATACTCATACACCTCGCTCCAATTATTCTTCT
+	Saimiri_sciureus    ACCAATATTTACACGAGAAAATACGCTAATATTTATACATATAACACCAATCCTCCTCCT
+	Tarsius_syrichta    GCCCCCTTTCACCCGAGAAAATACATTAATAATCATACACCTATTTCCCTTAATCCTACT
+
+	Lemur_catta         TACCTTAAACCCCAAAATTATTCTAGGACCCACGTACTGTAAATATAGTTTAAA-AAAAC
+	Homo_sapiens        ATCCCTCAACCCCGACATCATTACCGGGTTTTCCTCTTGTAAATATAGTTTAACCAAAAC
+	Pan                 ATCCCTCAATCCTGATATCATCACTGGATTCACCTCCTGTAAATATAGTTTAACCAAAAC
+	Gorilla             ATCCCTCAACCCCGATATTATCACCGGGTTCACCTCCTGTAAATATAGTTTAACCAAAAC
+	Pongo               ATCCCTCAACCCCAGCATCATCGCTGGGTTCGCCTACTGTAAATATAGTTTAACCAAAAC
+	Hylobates           AACCCTCAACCCTAACATCATTACTGGCTTTACTCCCTGTAAACATAGTTTAATCAAAAC
+	Macaca_fuscata      ATCCCTCAACCCCAACATCATCCTGGGGTTTACCTCCTGTAGATATAGTTTAACTAAAAC
+	Macaca_mulatta      ATCTCTCAACCCCAACATCATCCTGGGGTTTACTTCCTGTAGATATAGTTTAACTAAAAC
+	Macaca_fascicularis ATCTCTCAACCCCAACATCATCCTGGGGTTTACCTCCTGTAAATATAGTTTAACTAAAAC
+	Macaca_sylvanus     ATCTCTTAACCCCAACATCATTCTAGGATTTACTTCCTGTAAATATAGTTTAATTAAAAC
+	Saimiri_sciureus    TACCTTGAGCCCCAAGGTAATTATAGGACCCTCACCTTGTAATTATAGTTTAGCTAAAAC
+	Tarsius_syrichta    ATCTACCAACCCCAAAGTAATTATAGGAACCATGTACTGTAAATATAGTTTAAACAAAAC
+
+	Lemur_catta         ACTAGATTGTGAATCCAGAAATAGAAGCTCAAAC-CTTCTTATTTACCGAGAAAGTAATG
+	Homo_sapiens        ATCAGATTGTGAATCTGACAACAGAGGCTTA-CGACCCCTTATTTACCGAGAAAGCT-CA
+	Pan                 ATCAGATTGTGAATCTGACAACAGAGGCTCA-CGACCCCTTATTTACCGAGAAAGCT-TA
+	Gorilla             ATCAGATTGTGAATCTGATAACAGAGGCTCA-CAACCCCTTATTTACCGAGAAAGCT-CG
+	Pongo               ATTAGATTGTGAATCTAATAATAGGGCCCCA-CAACCCCTTATTTACCGAGAAAGCT-CA
+	Hylobates           ATTAGATTGTGAATCTAACAATAGAGGCTCG-AAACCTCTTGCTTACCGAGAAAGCC-CA
+	Macaca_fuscata      ACTAGATTGTGAATCTAACCATAGAGACTCA-CCACCTCTTATTTACCGAGAAAACT-CG
+	Macaca_mulatta      ATTAGATTGTGAATCTAACCATAGAGACTTA-CCACCTCTTATTTACCGAGAAAACT-CG
+	Macaca_fascicularis ATTAGATTGTGAATCTAACTATAGAGGCCTA-CCACTTCTTATTTACCGAGAAAACT-CG
+	Macaca_sylvanus     ATTAGACTGTGAATCTAACTATAGAAGCTTA-CCACTTCTTATTTACCGAGAAAACT-TG
+	Saimiri_sciureus    ATTAGATTGTGAATCTAATAATAGAAGAATA-TAACTTCTTAATTACCGAGAAAGTG-CG
+	Tarsius_syrichta    ATTAGATTGTGAGTCTAATAATAGAAGCCCAAAGATTTCTTATTTACCAAGAAAGTA-TG
+
+	Lemur_catta         TATGAACTGCTAACTCTGCACTCCGTATATAAAAATACGGCTATCTCAACTTTTAAAGGA
+	Homo_sapiens        CAAGAACTGCTAACTCATGCCCCCATGTCTAACAACATGGCTTTCTCAACTTTTAAAGGA
+	Pan                 TAAGAACTGCTAATTCATATCCCCATGCCTGACAACATGGCTTTCTCAACTTTTAAAGGA
+	Gorilla             TAAGAGCTGCTAACTCATACCCCCGTGCTTGACAACATGGCTTTCTCAACTTTTAAAGGA
+	Pongo               CAAGAACTGCTAACTCTCACT-CCATGTGTGACAACATGGCTTTCTCAGCTTTTAAAGGA
+	Hylobates           CAAGAACTGCTAACTCACTATCCCATGTATGACAACATGGCTTTCTCAACTTTTAAAGGA
+	Macaca_fuscata      CAAGGACTGCTAACCCATGTACCCGTACCTAAAATTACGGTTTTCTCAACTTTTAAAGGA
+	Macaca_mulatta      CGAGGACTGCTAACCCATGTATCCGTACCTAAAATTACGGTTTTCTCAACTTTTAAAGGA
+	Macaca_fascicularis CAAGGACTGCTAATCCATGCCTCCGTACTTAAAACTACGGTTTCCTCAACTTTTAAAGGA
+	Macaca_sylvanus     CAAGGACCGCTAATCCACACCTCCGTACTTAAAACTACGGTTTTCTCAACTTTTAAAGGA
+	Saimiri_sciureus    CAAGAACTGCTAATTCATGCTCCCAAGACTAACAACTTGGCTTCCTCAACTTTTAAAGGA
+	Tarsius_syrichta    CAAGAACTGCTAACTCATGCCTCCATATATAACAATGTGGCTTTCTT-ACTTTTAAAGGA
+
+	Lemur_catta         TAGAAGTAATCCATTGGCCTTAGGAGCCAAAAA-ATTGGTGCAACTCCAAATAAAAGTAA
+	Homo_sapiens        TAACAGCTATCCATTGGTCTTAGGCCCCAAAAATTTTGGTGCAACTCCAAATAAAAGTAA
+	Pan                 TAACAGCCATCCGTTGGTCTTAGGCCCCAAAAATTTTGGTGCAACTCCAAATAAAAGTAA
+	Gorilla             TAACAGCTATCCATTGGTCTTAGGACCCAAAAATTTTGGTGCAACTCCAAATAAAAGTAA
+	Pongo               TAACAGCTATCCCTTGGTCTTAGGATCCAAAAATTTTGGTGCAACTCCAAATAAAAGTAA
+	Hylobates           TAACAGCTATCCATTGGTCTTAGGACCCAAAAATTTTGGTGCAACTCCAAATAAAAGTAA
+	Macaca_fuscata      TAACAGCTATCCATTGACCTTAGGAGTCAAAAACATTGGTGCAACTCCAAATAAAAGTAA
+	Macaca_mulatta      TAACAGCTATCCATTGACCTTAGGAGTCAAAAATATTGGTGCAACTCCAAATAAAAGTAA
+	Macaca_fascicularis TAACAGCTATCCATTGACCTTAGGAGTCAAAAACATTGGTGCAACTCCAAATAAAAGTAA
+	Macaca_sylvanus     TAACAGCTATCCATTGGCCTTAGGAGTCAAAAATATTGGTGCAACTCCAAATAAAAGTAA
+	Saimiri_sciureus    TAGTAGTTATCCATTGGTCTTAGGAGCCAAAAACATTGGTGCAACTCCAAATAAAAGTAA
+	Tarsius_syrichta    TAGAAGTAATCCATCGGTCTTAGGAACCGAAAA-ATTGGTGCAACTCCAAATAAAAGTAA
+
+	Lemur_catta         TAAATCTATTATCCTCTTTCACCCTTGTCACACTGATTATCCTAACTTTACCTATCATTA
+	Homo_sapiens        TAACCATGCACACTACTATAACCACCCTAACCCTGACTTCCCTAATTCCCCCCATCCTTA
+	Pan                 TAACCATGTATACTACCATAACCACCTTAACCCTAACTCCCTTAATTCTCCCCATCCTCA
+	Gorilla             TAACTATGTACGCTACCATAACCACCTTAGCCCTAACTTCCTTAATTCCCCCTATCCTTA
+	Pongo               CAGCCATGTTTACCACCATAACTGCCCTCACCTTAACTTCCCTAATCCCCCCCATTACCG
+	Hylobates           TAGCAATGTACACCACCATAGCCATTCTAACGCTAACCTCCCTAATTCCCCCCATTACAG
+	Macaca_fuscata      TAATCATGCACACCCCCATCATTATAACAACCCTTATCTCCCTAACTCTCCCAATTTTTG
+	Macaca_mulatta      TAATCATGCACACCCCTATCATAATAACAACCCTTATCTCCCTAACTCTCCCAATTTTTG
+	Macaca_fascicularis TAATCATGCACACCCCCATCATAATAACAACCCTCATCTCCCTGACCCTTCCAATTTTTG
+	Macaca_sylvanus     TAATCATGTATACCCCCATCATAATAACAACTCTCATCTCCCTAACTCTTCCAATTTTCG
+	Saimiri_sciureus    TA---ATACACTTCTCCATCACTCTAATAACACTAATTAGCCTACTAGCGCCAATCCTAG
+	Tarsius_syrichta    TAAATTTATTTTCATCCTCCATTTTACTATCACTTACACTCTTAATTACCCCATTTATTA
+
+	Lemur_catta         TAAACGTTACAAACATATACAAAAACTACCCCTATGCACCATACGTAAAATCTTCTATTG
+	Homo_sapiens        CCACCCTCGTTAACCCTAACAAAAAAAACTCATACCCCCATTATGTAAAATCCATTGTCG
+	Pan                 CCACCCTCATTAACCCTAACAAAAAAAACTCATATCCCCATTATGTGAAATCCATTATCG
+	Gorilla             CCACCTTCATCAATCCTAACAAAAAAAGCTCATACCCCCATTACGTAAAATCTATCGTCG
+	Pongo               CTACCCTCATTAACCCCAACAAAAAAAACCCATACCCCCACTATGTAAAAACGGCCATCG
+	Hylobates           CCACCCTTATTAACCCCAATAAAAAGAACTTATACCCGCACTACGTAAAAATGACCATTG
+	Macaca_fuscata      CCACCCTCATCAACCCTTACAAAAAACGTCCATACCCAGATTACGTAAAAACAACCGTAA
+	Macaca_mulatta      CCACCCTCATCAACCCTTACAAAAAACGTCCATACCCAGATTACGTAAAAACAACCGTAA
+	Macaca_fascicularis CCACCCTCACCAACCCCTATAAAAAACGTTCATACCCAGACTACGTAAAAACAACCGTAA
+	Macaca_sylvanus     CTACCCTTATCAACCCCAACAAAAAACACCTATATCCAAACTACGTAAAAACAGCCGTAA
+	Saimiri_sciureus    CTACCCTCATTAACCCTAACAAAAGCACACTATACCCGTACTACGTAAAACTAGCCATCA
+	Tarsius_syrichta    TTACAACAACTAAAAAATATGAAACACATGCATACCCTTACTACGTAAAAAACTCTATCG
+
+	Lemur_catta         CATGTGCCTTCATCACTAGCCTCATCCCAACTATATTATTTATCTCCTCAGGACAAGAAA
+	Homo_sapiens        CATCCACCTTTATTATCAGTCTCTTCCCCACAACAATATTCATGTGCCTAGACCAAGAAG
+	Pan                 CGTCCACCTTTATCATTAGCCTTTTCCCCACAACAATATTCATATGCCTAGACCAAGAAG
+	Gorilla             CATCCACCTTTATCATCAGCCTCTTCCCCACAACAATATTTCTATGCCTAGACCAAGAAG
+	Pongo               CATCCGCCTTTACTATCAGCCTTATCCCAACAACAATATTTATCTGCCTAGGACAAGAAA
+	Hylobates           CCTCTACCTTTATAATCAGCCTATTTCCCACAATAATATTCATGTGCACAGACCAAGAAA
+	Macaca_fuscata      TATATGCTTTCATCATCAGCCTCCCCTCAACAACTTTATTCATCTTCTCAAACCAAGAAA
+	Macaca_mulatta      TATATGCTTTCATCATCAGCCTCCCCTCAACAACTTTATTCATCTTCTCAAACCAAGAAA
+	Macaca_fascicularis TATATGCTTTTATTACCAGTCTCCCCTCAACAACCCTATTCATCCTCTCAAACCAAGAAA
+	Macaca_sylvanus     TATATGCTTTCATTACCAGCCTCTCTTCAACAACTTTATATATATTCTTAAACCAAGAAA
+	Saimiri_sciureus    TCTACGCCCTCATTACCAGTACCTTATCTATAATATTCTTTATCCTTACAGGCCAAGAAT
+	Tarsius_syrichta    CCTGCGCATTTATAACAAGCCTAGTCCCAATGCTCATATTTCTATACACAAATCAAGAAA
+
+	Lemur_catta         CAATCATTTCCAACTGACATTGAATAACAATCCAAACCCTAAAACTATCTATTAGCTT
+	Homo_sapiens        TTATTATCTCGAACTGACACTGAGCCACAACCCAAACAACCCAGCTCTCCCTAAGCTT
+	Pan                 CTATTATCTCAAACTGGCACTGAGCAACAACCCAAACAACCCAGCTCTCCCTAAGCTT
+	Gorilla             CTATTATCTCAAGCTGACACTGAGCAACAACCCAAACAATTCAACTCTCCCTAAGCTT
+	Pongo               CCATCGTCACAAACTGATGCTGAACAACCACCCAGACACTACAACTCTCACTAAGCTT
+	Hylobates           CCATTATTTCAAACTGACACTGAACTGCAACCCAAACGCTAGAACTCTCCCTAAGCTT
+	Macaca_fuscata      CAACCATTTGGAGCTGACATTGAATAATGACCCAAACACTAGACCTAACGCTAAGCTT
+	Macaca_mulatta      CAACCATTTGAAGCTGACATTGAATAATAACCCAAACACTAGACCTAACACTAAGCTT
+	Macaca_fascicularis CAACCATTTGGAGTTGACATTGAATAACAACCCAAACATTAGACCTAACACTAAGCTT
+	Macaca_sylvanus     CAATCATCTGAAGCTGGCACTGAATAATAACCCAAACACTAAGCCTAACATTAAGCTT
+	Saimiri_sciureus    CAATAATTTCAAACTGACACTGAATAACTATCCAAACCATCAAACTATCCCTAAGCTT
+	Tarsius_syrichta    TAATCATTTCCAACTGACATTGAATAACGATTCATACTATCAAATTATGCCTAAGCTT
+		;
+		
+END;
diff --git a/doc/source/examples/primates.chars.nexus b/doc/source/examples/primates.chars.nexus
new file mode 100644
index 0000000..75f4722
--- /dev/null
+++ b/doc/source/examples/primates.chars.nexus
@@ -0,0 +1,45 @@
+#NEXUS 
+
+[Modified from example data distributed with PAUP* 4.01b.
+ Original Data from:
+	Hayasaka, K., T. Gojobori, and S. Horai. 1988. Molecular phylogeny
+		and evolution of primate mitochondrial DNA. Mol. Biol. Evol.
+		5:626-644.
+]
+
+BEGIN TAXA;
+	DIMENSIONS NTAX=12;
+	TAXLABELS
+        Lemur_catta        	
+        Homo_sapiens       	
+        Pan                	
+        Gorilla            	
+        Pongo              	
+        Hylobates          	
+        Macaca_fuscata     	
+        Macaca_mulatta     	
+        Macaca_fascicularis	
+        Macaca_sylvanus    	
+        Saimiri_sciureus   	
+        Tarsius_syrichta   	
+	;	
+END;
+
+BEGIN CHARACTERS;
+	DIMENSIONS NCHAR=898;
+	FORMAT DATATYPE=DNA GAP=-;
+	MATRIX
+Lemur_catta      AAGCTTCATAGGAGCAACCATTCTAATAATCGCACATGGCCTTACATCATCCATATTATTCTGTCTAGCCAACTCTAACTACGAACGAATCCATAGCCGTACAATACTACTAGCACGAGGGATCCAAACCATTCTCCCTCTTATAGCCACCTGATGACTACTCGCCAGCCTAACTAACCTAGCCCTACCCACCTCTATCAATTTAATTGGCGAACTATTCGTCACTATAGCATCCTTCTCATGATCAAACATTACAATTATCTTAATAGGCTTAAATATGCTCATCACCGCTCTCTATTCCCTCTATATATTAACTACTACACAACGAGGAAAACTCACATATCATTCGCACAACCTAAACCCATCCTTTACACGAGAAAACACCCTTATATCCATACACATACTCCCCCTTCTCCTATTTACCTTAAACCCCAAAATTATTCTAGGACCCACGTACTGTAAATATAGTTTAAA- [...]
+Homo_sapiens     AAGCTTCACCGGCGCAGTCATTCTCATAATCGCCCACGGGCTTACATCCTCATTACTATTCTGCCTAGCAAACTCAAACTACGAACGCACTCACAGTCGCATCATAATCCTCTCTCAAGGACTTCAAACTCTACTCCCACTAATAGCTTTTTGATGACTTCTAGCAAGCCTCGCTAACCTCGCCTTACCCCCCACTATTAACCTACTGGGAGAACTCTCTGTGCTAGTAACCACGTTCTCCTGATCAAATATCACTCTCCTACTTACAGGACTCAACATACTAGTCACAGCCCTATACTCCCTCTACATATTTACCACAACACAATGGGGCTCACTCACCCACCACATTAACAACATAAAACCCTCATTCACACGAGAAAACACCCTCATGTTCATACACCTATCCCCCATTCTCCTCCTATCCCTCAACCCCGACATCATTACCGGGTTTTCCTCTTGTAAATATAGTTTAACC [...]
+Pan              AAGCTTCACCGGCGCAATTATCCTCATAATCGCCCACGGACTTACATCCTCATTATTATTCTGCCTAGCAAACTCAAATTATGAACGCACCCACAGTCGCATCATAATTCTCTCCCAAGGACTTCAAACTCTACTCCCACTAATAGCCTTTTGATGACTCCTAGCAAGCCTCGCTAACCTCGCCCTACCCCCTACCATTAATCTCCTAGGGGAACTCTCCGTGCTAGTAACCTCATTCTCCTGATCAAATACCACTCTCCTACTCACAGGATTCAACATACTAATCACAGCCCTGTACTCCCTCTACATGTTTACCACAACACAATGAGGCTCACTCACCCACCACATTAATAACATAAAGCCCTCATTCACACGAGAAAATACTCTCATATTTTTACACCTATCCCCCATCCTCCTTCTATCCCTCAATCCTGATATCATCACTGGATTCACCTCCTGTAAATATAGTTTAACC [...]
+Gorilla          AAGCTTCACCGGCGCAGTTGTTCTTATAATTGCCCACGGACTTACATCATCATTATTATTCTGCCTAGCAAACTCAAACTACGAACGAACCCACAGCCGCATCATAATTCTCTCTCAAGGACTCCAAACCCTACTCCCACTAATAGCCCTTTGATGACTTCTGGCAAGCCTCGCCAACCTCGCCTTACCCCCCACCATTAACCTACTAGGAGAGCTCTCCGTACTAGTAACCACATTCTCCTGATCAAACACCACCCTTTTACTTACAGGATCTAACATACTAATTACAGCCCTGTACTCCCTTTATATATTTACCACAACACAATGAGGCCCACTCACACACCACATCACCAACATAAAACCCTCATTTACACGAGAAAACATCCTCATATTCATGCACCTATCCCCCATCCTCCTCCTATCCCTCAACCCCGATATTATCACCGGGTTCACCTCCTGTAAATATAGTTTAACC [...]
+Pongo            AAGCTTCACCGGCGCAACCACCCTCATGATTGCCCATGGACTCACATCCTCCCTACTGTTCTGCCTAGCAAACTCAAACTACGAACGAACCCACAGCCGCATCATAATCCTCTCTCAAGGCCTTCAAACTCTACTCCCCCTAATAGCCCTCTGATGACTTCTAGCAAGCCTCACTAACCTTGCCCTACCACCCACCATCAACCTTCTAGGAGAACTCTCCGTACTAATAGCCATATTCTCTTGATCTAACATCACCATCCTACTAACAGGACTCAACATACTAATCACAACCCTATACTCTCTCTATATATTCACCACAACACAACGAGGTACACCCACACACCACATCAACAACATAAAACCTTCTTTCACACGCGAAAATACCCTCATGCTCATACACCTATCCCCCATCCTCCTCTTATCCCTCAACCCCAGCATCATCGCTGGGTTCGCCTACTGTAAATATAGTTTAACC [...]
+Hylobates        AAGCTTTACAGGTGCAACCGTCCTCATAATCGCCCACGGACTAACCTCTTCCCTGCTATTCTGCCTTGCAAACTCAAACTACGAACGAACTCACAGCCGCATCATAATCCTATCTCGAGGGCTCCAAGCCTTACTCCCACTGATAGCCTTCTGATGACTCGCAGCAAGCCTCGCTAACCTCGCCCTACCCCCCACTATTAACCTCCTAGGTGAACTCTTCGTACTAATGGCCTCCTTCTCCTGGGCAAACACTACTATTACACTCACCGGGCTCAACGTACTAATCACGGCCCTATACTCCCTTTACATATTTATCATAACACAACGAGGCACACTTACACACCACATTAAAAACATAAAACCCTCACTCACACGAGAAAACATATTAATACTTATGCACCTCTTCCCCCTCCTCCTCCTAACCCTCAACCCTAACATCATTACTGGCTTTACTCCCTGTAAACATAGTTTAATC [...]
+Macaca_fuscata   AAGCTTTTCCGGCGCAACCATCCTTATGATCGCTCACGGACTCACCTCTTCCATATATTTCTGCCTAGCCAATTCAAACTATGAACGCACTCACAACCGTACCATACTACTGTCCCGAGGACTTCAAATCCTACTTCCACTAACAGCCTTTTGATGATTAACAGCAAGCCTTACTAACCTTGCCCTACCCCCCACTATCAATCTACTAGGTGAACTCTTTGTAATCGCAACCTCATTCTCCTGATCCCATATCACCATTATGCTAACAGGACTTAACATATTAATTACGGCCCTCTACTCTCTCCACATATTCACTACAACACAACGAGGAACACTCACACATCACATAATCAACATAAAGCCCCCCTTCACACGAGAAAACACATTAATATTCATACACCTCGCTCCAATTATCCTTCTATCCCTCAACCCCAACATCATCCTGGGGTTTACCTCCTGTAGATATAGTTTAACT [...]
+Macaca_mulatta       AAGCTTTTCTGGCGCAACCATCCTCATGATTGCTCACGGACTCACCTCTTCCATATATTTCTGCCTAGCCAATTCAAACTATGAACGCACTCACAACCGTACCATACTACTGTCCCGGGGACTTCAAATCCTACTTCCACTAACAGCTTTCTGATGATTAACAGCAAGCCTTACTAACCTTGCCCTACCCCCCACTATCAACCTACTAGGTGAACTCTTTGTAATCGCGACCTCATTCTCCTGGTCCCATATCACCATTATATTAACAGGATTTAACATACTAATTACGGCCCTCTACTCCCTCCACATATTCACCACAACACAACGAGGAGCACTCACACATCACATAATCAACATAAAACCCCCCTTCACACGAGAAAACATATTAATATTCATACACCTCGCTCCAATCATCCTCCTATCTCTCAACCCCAACATCATCCTGGGGTTTACTTCCTGTAGATATAGTTT [...]
+Macaca_fascicularis  AAGCTTCTCCGGCGCAACCACCCTTATAATCGCCCACGGGCTCACCTCTTCCATGTATTTCTGCTTGGCCAATTCAAACTATGAGCGCACTCATAACCGTACCATACTACTATCCCGAGGACTTCAAATTCTACTTCCATTGACAGCCTTCTGATGACTCACAGCAAGCCTTACTAACCTTGCCCTACCCCCCACTATTAATCTACTAGGCGAACTCTTTGTAATCACAACTTCATTTTCCTGATCCCATATCACCATTGTGTTAACGGGCCTTAATATACTAATCACAGCCCTCTACTCTCTCCACATGTTCATTACAGTACAACGAGGAACACTCACACACCACATAATCAATATAAAACCCCCCTTCACACGAGAAAACATATTAATATTCATACACCTCGCTCCAATTATCCTTCTATCTCTCAACCCCAACATCATCCTGGGGTTTACCTCCTGTAAATATAGTTT [...]
+Macaca_sylvanus      AAGCTTCTCCGGTGCAACTATCCTTATAGTTGCCCATGGACTCACCTCTTCCATATACTTCTGCTTGGCCAACTCAAACTACGAACGCACCCACAGCCGCATCATACTACTATCCCGAGGACTCCAAATCCTACTCCCACTAACAGCCTTCTGATGATTCACAGCAAGCCTTACTAATCTTGCTCTACCCTCCACTATTAATCTACTGGGCGAACTCTTCGTAATCGCAACCTCATTTTCCTGATCCCACATCACCATCATACTAACAGGACTGAACATACTAATTACAGCCCTCTACTCTCTTCACATATTCACCACAACACAACGAGGAGCGCTCACACACCACATAATTAACATAAAACCACCTTTCACACGAGAAAACATATTAATACTCATACACCTCGCTCCAATTATTCTTCTATCTCTTAACCCCAACATCATTCTAGGATTTACTTCCTGTAAATATAGTTT [...]
+Saimiri_sciureus AAGCTTCACCGGCGCAATGATCCTAATAATCGCTCACGGGTTTACTTCGTCTATGCTATTCTGCCTAGCAAACTCAAATTACGAACGAATTCACAGCCGAACAATAACATTTACTCGAGGGCTCCAAACACTATTCCCGCTTATAGGCCTCTGATGACTCCTAGCAAATCTCGCTAACCTCGCCCTACCCACAGCTATTAATCTAGTAGGAGAATTACTCACAATCGTATCTTCCTTCTCTTGATCCAACTTTACTATTATATTCACAGGACTTAATATACTAATTACAGCACTCTACTCACTTCATATGTATGCCTCTACACAGCGAGGTCCACTTACATACAGCACCAGCAATATAAAACCAATATTTACACGAGAAAATACGCTAATATTTATACATATAACACCAATCCTCCTCCTTACCTTGAGCCCCAAGGTAATTATAGGACCCTCACCTTGTAATTATAGTTTAGCT [...]
+Tarsius_syrichta AAGTTTCATTGGAGCCACCACTCTTATAATTGCCCATGGCCTCACCTCCTCCCTATTATTTTGCCTAGCAAATACAAACTACGAACGAGTCCACAGTCGAACAATAGCACTAGCCCGTGGCCTTCAAACCCTATTACCTCTTGCAGCAACATGATGACTCCTCGCCAGCTTAACCAACCTGGCCCTTCCCCCAACAATTAATTTAATCGGTGAACTGTCCGTAATAATAGCAGCATTTTCATGGTCACACCTAACTATTATCTTAGTAGGCCTTAACACCCTTATCACCGCCCTATATTCCCTATATATACTAATCATAACTCAACGAGGAAAATACACATATCATATCAACAATATCATGCCCCCTTTCACCCGAGAAAATACATTAATAATCATACACCTATTTCCCTTAATCCTACTATCTACCAACCCCAAAGTAATTATAGGAACCATGTACTGTAAATATAGTTTAAAC [...]
+	;
+END;
diff --git a/doc/source/examples/primates.chars.simple.interleaved.nexus b/doc/source/examples/primates.chars.simple.interleaved.nexus
new file mode 100644
index 0000000..cfe248a
--- /dev/null
+++ b/doc/source/examples/primates.chars.simple.interleaved.nexus
@@ -0,0 +1,202 @@
+#NEXUS 
+
+Begin data;
+	Dimensions ntax=12 nchar=898;
+	Format datatype=dna interleave gap=-;
+	Matrix
+Lemur_catta         AAGCTTCATAGGAGCAACCATTCTAATAATCGCACATGGCCTTACATCATCCATATTATT
+Homo_sapiens        AAGCTTCACCGGCGCAGTCATTCTCATAATCGCCCACGGGCTTACATCCTCATTACTATT
+Pan                 AAGCTTCACCGGCGCAATTATCCTCATAATCGCCCACGGACTTACATCCTCATTATTATT
+Gorilla             AAGCTTCACCGGCGCAGTTGTTCTTATAATTGCCCACGGACTTACATCATCATTATTATT
+Pongo               AAGCTTCACCGGCGCAACCACCCTCATGATTGCCCATGGACTCACATCCTCCCTACTGTT
+Hylobates           AAGCTTTACAGGTGCAACCGTCCTCATAATCGCCCACGGACTAACCTCTTCCCTGCTATT
+Macaca_fuscata      AAGCTTTTCCGGCGCAACCATCCTTATGATCGCTCACGGACTCACCTCTTCCATATATTT
+Macaca_mulatta      AAGCTTTTCTGGCGCAACCATCCTCATGATTGCTCACGGACTCACCTCTTCCATATATTT
+Macaca_fascicularis AAGCTTCTCCGGCGCAACCACCCTTATAATCGCCCACGGGCTCACCTCTTCCATGTATTT
+Macaca_sylvanus     AAGCTTCTCCGGTGCAACTATCCTTATAGTTGCCCATGGACTCACCTCTTCCATATACTT
+Saimiri_sciureus    AAGCTTCACCGGCGCAATGATCCTAATAATCGCTCACGGGTTTACTTCGTCTATGCTATT
+Tarsius_syrichta    AAGTTTCATTGGAGCCACCACTCTTATAATTGCCCATGGCCTCACCTCCTCCCTATTATT
+
+Lemur_catta         CTGTCTAGCCAACTCTAACTACGAACGAATCCATAGCCGTACAATACTACTAGCACGAGG
+Homo_sapiens        CTGCCTAGCAAACTCAAACTACGAACGCACTCACAGTCGCATCATAATCCTCTCTCAAGG
+Pan                 CTGCCTAGCAAACTCAAATTATGAACGCACCCACAGTCGCATCATAATTCTCTCCCAAGG
+Gorilla             CTGCCTAGCAAACTCAAACTACGAACGAACCCACAGCCGCATCATAATTCTCTCTCAAGG
+Pongo               CTGCCTAGCAAACTCAAACTACGAACGAACCCACAGCCGCATCATAATCCTCTCTCAAGG
+Hylobates           CTGCCTTGCAAACTCAAACTACGAACGAACTCACAGCCGCATCATAATCCTATCTCGAGG
+Macaca_fuscata      CTGCCTAGCCAATTCAAACTATGAACGCACTCACAACCGTACCATACTACTGTCCCGAGG
+Macaca_mulatta      CTGCCTAGCCAATTCAAACTATGAACGCACTCACAACCGTACCATACTACTGTCCCGGGG
+Macaca_fascicularis CTGCTTGGCCAATTCAAACTATGAGCGCACTCATAACCGTACCATACTACTATCCCGAGG
+Macaca_sylvanus     CTGCTTGGCCAACTCAAACTACGAACGCACCCACAGCCGCATCATACTACTATCCCGAGG
+Saimiri_sciureus    CTGCCTAGCAAACTCAAATTACGAACGAATTCACAGCCGAACAATAACATTTACTCGAGG
+Tarsius_syrichta    TTGCCTAGCAAATACAAACTACGAACGAGTCCACAGTCGAACAATAGCACTAGCCCGTGG
+
+Lemur_catta         GATCCAAACCATTCTCCCTCTTATAGCCACCTGATGACTACTCGCCAGCCTAACTAACCT
+Homo_sapiens        ACTTCAAACTCTACTCCCACTAATAGCTTTTTGATGACTTCTAGCAAGCCTCGCTAACCT
+Pan                 ACTTCAAACTCTACTCCCACTAATAGCCTTTTGATGACTCCTAGCAAGCCTCGCTAACCT
+Gorilla             ACTCCAAACCCTACTCCCACTAATAGCCCTTTGATGACTTCTGGCAAGCCTCGCCAACCT
+Pongo               CCTTCAAACTCTACTCCCCCTAATAGCCCTCTGATGACTTCTAGCAAGCCTCACTAACCT
+Hylobates           GCTCCAAGCCTTACTCCCACTGATAGCCTTCTGATGACTCGCAGCAAGCCTCGCTAACCT
+Macaca_fuscata      ACTTCAAATCCTACTTCCACTAACAGCCTTTTGATGATTAACAGCAAGCCTTACTAACCT
+Macaca_mulatta      ACTTCAAATCCTACTTCCACTAACAGCTTTCTGATGATTAACAGCAAGCCTTACTAACCT
+Macaca_fascicularis ACTTCAAATTCTACTTCCATTGACAGCCTTCTGATGACTCACAGCAAGCCTTACTAACCT
+Macaca_sylvanus     ACTCCAAATCCTACTCCCACTAACAGCCTTCTGATGATTCACAGCAAGCCTTACTAATCT
+Saimiri_sciureus    GCTCCAAACACTATTCCCGCTTATAGGCCTCTGATGACTCCTAGCAAATCTCGCTAACCT
+Tarsius_syrichta    CCTTCAAACCCTATTACCTCTTGCAGCAACATGATGACTCCTCGCCAGCTTAACCAACCT
+
+Lemur_catta         AGCCCTACCCACCTCTATCAATTTAATTGGCGAACTATTCGTCACTATAGCATCCTTCTC
+Homo_sapiens        CGCCTTACCCCCCACTATTAACCTACTGGGAGAACTCTCTGTGCTAGTAACCACGTTCTC
+Pan                 CGCCCTACCCCCTACCATTAATCTCCTAGGGGAACTCTCCGTGCTAGTAACCTCATTCTC
+Gorilla             CGCCTTACCCCCCACCATTAACCTACTAGGAGAGCTCTCCGTACTAGTAACCACATTCTC
+Pongo               TGCCCTACCACCCACCATCAACCTTCTAGGAGAACTCTCCGTACTAATAGCCATATTCTC
+Hylobates           CGCCCTACCCCCCACTATTAACCTCCTAGGTGAACTCTTCGTACTAATGGCCTCCTTCTC
+Macaca_fuscata      TGCCCTACCCCCCACTATCAATCTACTAGGTGAACTCTTTGTAATCGCAACCTCATTCTC
+Macaca_mulatta      TGCCCTACCCCCCACTATCAACCTACTAGGTGAACTCTTTGTAATCGCGACCTCATTCTC
+Macaca_fascicularis TGCCCTACCCCCCACTATTAATCTACTAGGCGAACTCTTTGTAATCACAACTTCATTTTC
+Macaca_sylvanus     TGCTCTACCCTCCACTATTAATCTACTGGGCGAACTCTTCGTAATCGCAACCTCATTTTC
+Saimiri_sciureus    CGCCCTACCCACAGCTATTAATCTAGTAGGAGAATTACTCACAATCGTATCTTCCTTCTC
+Tarsius_syrichta    GGCCCTTCCCCCAACAATTAATTTAATCGGTGAACTGTCCGTAATAATAGCAGCATTTTC
+
+Lemur_catta         ATGATCAAACATTACAATTATCTTAATAGGCTTAAATATGCTCATCACCGCTCTCTATTC
+Homo_sapiens        CTGATCAAATATCACTCTCCTACTTACAGGACTCAACATACTAGTCACAGCCCTATACTC
+Pan                 CTGATCAAATACCACTCTCCTACTCACAGGATTCAACATACTAATCACAGCCCTGTACTC
+Gorilla             CTGATCAAACACCACCCTTTTACTTACAGGATCTAACATACTAATTACAGCCCTGTACTC
+Pongo               TTGATCTAACATCACCATCCTACTAACAGGACTCAACATACTAATCACAACCCTATACTC
+Hylobates           CTGGGCAAACACTACTATTACACTCACCGGGCTCAACGTACTAATCACGGCCCTATACTC
+Macaca_fuscata      CTGATCCCATATCACCATTATGCTAACAGGACTTAACATATTAATTACGGCCCTCTACTC
+Macaca_mulatta      CTGGTCCCATATCACCATTATATTAACAGGATTTAACATACTAATTACGGCCCTCTACTC
+Macaca_fascicularis CTGATCCCATATCACCATTGTGTTAACGGGCCTTAATATACTAATCACAGCCCTCTACTC
+Macaca_sylvanus     CTGATCCCACATCACCATCATACTAACAGGACTGAACATACTAATTACAGCCCTCTACTC
+Saimiri_sciureus    TTGATCCAACTTTACTATTATATTCACAGGACTTAATATACTAATTACAGCACTCTACTC
+Tarsius_syrichta    ATGGTCACACCTAACTATTATCTTAGTAGGCCTTAACACCCTTATCACCGCCCTATATTC
+
+Lemur_catta         CCTCTATATATTAACTACTACACAACGAGGAAAACTCACATATCATTCGCACAACCTAAA
+Homo_sapiens        CCTCTACATATTTACCACAACACAATGGGGCTCACTCACCCACCACATTAACAACATAAA
+Pan                 CCTCTACATGTTTACCACAACACAATGAGGCTCACTCACCCACCACATTAATAACATAAA
+Gorilla             CCTTTATATATTTACCACAACACAATGAGGCCCACTCACACACCACATCACCAACATAAA
+Pongo               TCTCTATATATTCACCACAACACAACGAGGTACACCCACACACCACATCAACAACATAAA
+Hylobates           CCTTTACATATTTATCATAACACAACGAGGCACACTTACACACCACATTAAAAACATAAA
+Macaca_fuscata      TCTCCACATATTCACTACAACACAACGAGGAACACTCACACATCACATAATCAACATAAA
+Macaca_mulatta      CCTCCACATATTCACCACAACACAACGAGGAGCACTCACACATCACATAATCAACATAAA
+Macaca_fascicularis TCTCCACATGTTCATTACAGTACAACGAGGAACACTCACACACCACATAATCAATATAAA
+Macaca_sylvanus     TCTTCACATATTCACCACAACACAACGAGGAGCGCTCACACACCACATAATTAACATAAA
+Saimiri_sciureus    ACTTCATATGTATGCCTCTACACAGCGAGGTCCACTTACATACAGCACCAGCAATATAAA
+Tarsius_syrichta    CCTATATATACTAATCATAACTCAACGAGGAAAATACACATATCATATCAACAATATCAT
+
+Lemur_catta         CCCATCCTTTACACGAGAAAACACCCTTATATCCATACACATACTCCCCCTTCTCCTATT
+Homo_sapiens        ACCCTCATTCACACGAGAAAACACCCTCATGTTCATACACCTATCCCCCATTCTCCTCCT
+Pan                 GCCCTCATTCACACGAGAAAATACTCTCATATTTTTACACCTATCCCCCATCCTCCTTCT
+Gorilla             ACCCTCATTTACACGAGAAAACATCCTCATATTCATGCACCTATCCCCCATCCTCCTCCT
+Pongo               ACCTTCTTTCACACGCGAAAATACCCTCATGCTCATACACCTATCCCCCATCCTCCTCTT
+Hylobates           ACCCTCACTCACACGAGAAAACATATTAATACTTATGCACCTCTTCCCCCTCCTCCTCCT
+Macaca_fuscata      GCCCCCCTTCACACGAGAAAACACATTAATATTCATACACCTCGCTCCAATTATCCTTCT
+Macaca_mulatta      ACCCCCCTTCACACGAGAAAACATATTAATATTCATACACCTCGCTCCAATCATCCTCCT
+Macaca_fascicularis ACCCCCCTTCACACGAGAAAACATATTAATATTCATACACCTCGCTCCAATTATCCTTCT
+Macaca_sylvanus     ACCACCTTTCACACGAGAAAACATATTAATACTCATACACCTCGCTCCAATTATTCTTCT
+Saimiri_sciureus    ACCAATATTTACACGAGAAAATACGCTAATATTTATACATATAACACCAATCCTCCTCCT
+Tarsius_syrichta    GCCCCCTTTCACCCGAGAAAATACATTAATAATCATACACCTATTTCCCTTAATCCTACT
+
+Lemur_catta         TACCTTAAACCCCAAAATTATTCTAGGACCCACGTACTGTAAATATAGTTTAAA-AAAAC
+Homo_sapiens        ATCCCTCAACCCCGACATCATTACCGGGTTTTCCTCTTGTAAATATAGTTTAACCAAAAC
+Pan                 ATCCCTCAATCCTGATATCATCACTGGATTCACCTCCTGTAAATATAGTTTAACCAAAAC
+Gorilla             ATCCCTCAACCCCGATATTATCACCGGGTTCACCTCCTGTAAATATAGTTTAACCAAAAC
+Pongo               ATCCCTCAACCCCAGCATCATCGCTGGGTTCGCCTACTGTAAATATAGTTTAACCAAAAC
+Hylobates           AACCCTCAACCCTAACATCATTACTGGCTTTACTCCCTGTAAACATAGTTTAATCAAAAC
+Macaca_fuscata      ATCCCTCAACCCCAACATCATCCTGGGGTTTACCTCCTGTAGATATAGTTTAACTAAAAC
+Macaca_mulatta      ATCTCTCAACCCCAACATCATCCTGGGGTTTACTTCCTGTAGATATAGTTTAACTAAAAC
+Macaca_fascicularis ATCTCTCAACCCCAACATCATCCTGGGGTTTACCTCCTGTAAATATAGTTTAACTAAAAC
+Macaca_sylvanus     ATCTCTTAACCCCAACATCATTCTAGGATTTACTTCCTGTAAATATAGTTTAATTAAAAC
+Saimiri_sciureus    TACCTTGAGCCCCAAGGTAATTATAGGACCCTCACCTTGTAATTATAGTTTAGCTAAAAC
+Tarsius_syrichta    ATCTACCAACCCCAAAGTAATTATAGGAACCATGTACTGTAAATATAGTTTAAACAAAAC
+
+Lemur_catta         ACTAGATTGTGAATCCAGAAATAGAAGCTCAAAC-CTTCTTATTTACCGAGAAAGTAATG
+Homo_sapiens        ATCAGATTGTGAATCTGACAACAGAGGCTTA-CGACCCCTTATTTACCGAGAAAGCT-CA
+Pan                 ATCAGATTGTGAATCTGACAACAGAGGCTCA-CGACCCCTTATTTACCGAGAAAGCT-TA
+Gorilla             ATCAGATTGTGAATCTGATAACAGAGGCTCA-CAACCCCTTATTTACCGAGAAAGCT-CG
+Pongo               ATTAGATTGTGAATCTAATAATAGGGCCCCA-CAACCCCTTATTTACCGAGAAAGCT-CA
+Hylobates           ATTAGATTGTGAATCTAACAATAGAGGCTCG-AAACCTCTTGCTTACCGAGAAAGCC-CA
+Macaca_fuscata      ACTAGATTGTGAATCTAACCATAGAGACTCA-CCACCTCTTATTTACCGAGAAAACT-CG
+Macaca_mulatta      ATTAGATTGTGAATCTAACCATAGAGACTTA-CCACCTCTTATTTACCGAGAAAACT-CG
+Macaca_fascicularis ATTAGATTGTGAATCTAACTATAGAGGCCTA-CCACTTCTTATTTACCGAGAAAACT-CG
+Macaca_sylvanus     ATTAGACTGTGAATCTAACTATAGAAGCTTA-CCACTTCTTATTTACCGAGAAAACT-TG
+Saimiri_sciureus    ATTAGATTGTGAATCTAATAATAGAAGAATA-TAACTTCTTAATTACCGAGAAAGTG-CG
+Tarsius_syrichta    ATTAGATTGTGAGTCTAATAATAGAAGCCCAAAGATTTCTTATTTACCAAGAAAGTA-TG
+
+Lemur_catta         TATGAACTGCTAACTCTGCACTCCGTATATAAAAATACGGCTATCTCAACTTTTAAAGGA
+Homo_sapiens        CAAGAACTGCTAACTCATGCCCCCATGTCTAACAACATGGCTTTCTCAACTTTTAAAGGA
+Pan                 TAAGAACTGCTAATTCATATCCCCATGCCTGACAACATGGCTTTCTCAACTTTTAAAGGA
+Gorilla             TAAGAGCTGCTAACTCATACCCCCGTGCTTGACAACATGGCTTTCTCAACTTTTAAAGGA
+Pongo               CAAGAACTGCTAACTCTCACT-CCATGTGTGACAACATGGCTTTCTCAGCTTTTAAAGGA
+Hylobates           CAAGAACTGCTAACTCACTATCCCATGTATGACAACATGGCTTTCTCAACTTTTAAAGGA
+Macaca_fuscata      CAAGGACTGCTAACCCATGTACCCGTACCTAAAATTACGGTTTTCTCAACTTTTAAAGGA
+Macaca_mulatta      CGAGGACTGCTAACCCATGTATCCGTACCTAAAATTACGGTTTTCTCAACTTTTAAAGGA
+Macaca_fascicularis CAAGGACTGCTAATCCATGCCTCCGTACTTAAAACTACGGTTTCCTCAACTTTTAAAGGA
+Macaca_sylvanus     CAAGGACCGCTAATCCACACCTCCGTACTTAAAACTACGGTTTTCTCAACTTTTAAAGGA
+Saimiri_sciureus    CAAGAACTGCTAATTCATGCTCCCAAGACTAACAACTTGGCTTCCTCAACTTTTAAAGGA
+Tarsius_syrichta    CAAGAACTGCTAACTCATGCCTCCATATATAACAATGTGGCTTTCTT-ACTTTTAAAGGA
+
+Lemur_catta         TAGAAGTAATCCATTGGCCTTAGGAGCCAAAAA-ATTGGTGCAACTCCAAATAAAAGTAA
+Homo_sapiens        TAACAGCTATCCATTGGTCTTAGGCCCCAAAAATTTTGGTGCAACTCCAAATAAAAGTAA
+Pan                 TAACAGCCATCCGTTGGTCTTAGGCCCCAAAAATTTTGGTGCAACTCCAAATAAAAGTAA
+Gorilla             TAACAGCTATCCATTGGTCTTAGGACCCAAAAATTTTGGTGCAACTCCAAATAAAAGTAA
+Pongo               TAACAGCTATCCCTTGGTCTTAGGATCCAAAAATTTTGGTGCAACTCCAAATAAAAGTAA
+Hylobates           TAACAGCTATCCATTGGTCTTAGGACCCAAAAATTTTGGTGCAACTCCAAATAAAAGTAA
+Macaca_fuscata      TAACAGCTATCCATTGACCTTAGGAGTCAAAAACATTGGTGCAACTCCAAATAAAAGTAA
+Macaca_mulatta      TAACAGCTATCCATTGACCTTAGGAGTCAAAAATATTGGTGCAACTCCAAATAAAAGTAA
+Macaca_fascicularis TAACAGCTATCCATTGACCTTAGGAGTCAAAAACATTGGTGCAACTCCAAATAAAAGTAA
+Macaca_sylvanus     TAACAGCTATCCATTGGCCTTAGGAGTCAAAAATATTGGTGCAACTCCAAATAAAAGTAA
+Saimiri_sciureus    TAGTAGTTATCCATTGGTCTTAGGAGCCAAAAACATTGGTGCAACTCCAAATAAAAGTAA
+Tarsius_syrichta    TAGAAGTAATCCATCGGTCTTAGGAACCGAAAA-ATTGGTGCAACTCCAAATAAAAGTAA
+
+Lemur_catta         TAAATCTATTATCCTCTTTCACCCTTGTCACACTGATTATCCTAACTTTACCTATCATTA
+Homo_sapiens        TAACCATGCACACTACTATAACCACCCTAACCCTGACTTCCCTAATTCCCCCCATCCTTA
+Pan                 TAACCATGTATACTACCATAACCACCTTAACCCTAACTCCCTTAATTCTCCCCATCCTCA
+Gorilla             TAACTATGTACGCTACCATAACCACCTTAGCCCTAACTTCCTTAATTCCCCCTATCCTTA
+Pongo               CAGCCATGTTTACCACCATAACTGCCCTCACCTTAACTTCCCTAATCCCCCCCATTACCG
+Hylobates           TAGCAATGTACACCACCATAGCCATTCTAACGCTAACCTCCCTAATTCCCCCCATTACAG
+Macaca_fuscata      TAATCATGCACACCCCCATCATTATAACAACCCTTATCTCCCTAACTCTCCCAATTTTTG
+Macaca_mulatta      TAATCATGCACACCCCTATCATAATAACAACCCTTATCTCCCTAACTCTCCCAATTTTTG
+Macaca_fascicularis TAATCATGCACACCCCCATCATAATAACAACCCTCATCTCCCTGACCCTTCCAATTTTTG
+Macaca_sylvanus     TAATCATGTATACCCCCATCATAATAACAACTCTCATCTCCCTAACTCTTCCAATTTTCG
+Saimiri_sciureus    TA---ATACACTTCTCCATCACTCTAATAACACTAATTAGCCTACTAGCGCCAATCCTAG
+Tarsius_syrichta    TAAATTTATTTTCATCCTCCATTTTACTATCACTTACACTCTTAATTACCCCATTTATTA
+
+Lemur_catta         TAAACGTTACAAACATATACAAAAACTACCCCTATGCACCATACGTAAAATCTTCTATTG
+Homo_sapiens        CCACCCTCGTTAACCCTAACAAAAAAAACTCATACCCCCATTATGTAAAATCCATTGTCG
+Pan                 CCACCCTCATTAACCCTAACAAAAAAAACTCATATCCCCATTATGTGAAATCCATTATCG
+Gorilla             CCACCTTCATCAATCCTAACAAAAAAAGCTCATACCCCCATTACGTAAAATCTATCGTCG
+Pongo               CTACCCTCATTAACCCCAACAAAAAAAACCCATACCCCCACTATGTAAAAACGGCCATCG
+Hylobates           CCACCCTTATTAACCCCAATAAAAAGAACTTATACCCGCACTACGTAAAAATGACCATTG
+Macaca_fuscata      CCACCCTCATCAACCCTTACAAAAAACGTCCATACCCAGATTACGTAAAAACAACCGTAA
+Macaca_mulatta      CCACCCTCATCAACCCTTACAAAAAACGTCCATACCCAGATTACGTAAAAACAACCGTAA
+Macaca_fascicularis CCACCCTCACCAACCCCTATAAAAAACGTTCATACCCAGACTACGTAAAAACAACCGTAA
+Macaca_sylvanus     CTACCCTTATCAACCCCAACAAAAAACACCTATATCCAAACTACGTAAAAACAGCCGTAA
+Saimiri_sciureus    CTACCCTCATTAACCCTAACAAAAGCACACTATACCCGTACTACGTAAAACTAGCCATCA
+Tarsius_syrichta    TTACAACAACTAAAAAATATGAAACACATGCATACCCTTACTACGTAAAAAACTCTATCG
+
+Lemur_catta         CATGTGCCTTCATCACTAGCCTCATCCCAACTATATTATTTATCTCCTCAGGACAAGAAA
+Homo_sapiens        CATCCACCTTTATTATCAGTCTCTTCCCCACAACAATATTCATGTGCCTAGACCAAGAAG
+Pan                 CGTCCACCTTTATCATTAGCCTTTTCCCCACAACAATATTCATATGCCTAGACCAAGAAG
+Gorilla             CATCCACCTTTATCATCAGCCTCTTCCCCACAACAATATTTCTATGCCTAGACCAAGAAG
+Pongo               CATCCGCCTTTACTATCAGCCTTATCCCAACAACAATATTTATCTGCCTAGGACAAGAAA
+Hylobates           CCTCTACCTTTATAATCAGCCTATTTCCCACAATAATATTCATGTGCACAGACCAAGAAA
+Macaca_fuscata      TATATGCTTTCATCATCAGCCTCCCCTCAACAACTTTATTCATCTTCTCAAACCAAGAAA
+Macaca_mulatta      TATATGCTTTCATCATCAGCCTCCCCTCAACAACTTTATTCATCTTCTCAAACCAAGAAA
+Macaca_fascicularis TATATGCTTTTATTACCAGTCTCCCCTCAACAACCCTATTCATCCTCTCAAACCAAGAAA
+Macaca_sylvanus     TATATGCTTTCATTACCAGCCTCTCTTCAACAACTTTATATATATTCTTAAACCAAGAAA
+Saimiri_sciureus    TCTACGCCCTCATTACCAGTACCTTATCTATAATATTCTTTATCCTTACAGGCCAAGAAT
+Tarsius_syrichta    CCTGCGCATTTATAACAAGCCTAGTCCCAATGCTCATATTTCTATACACAAATCAAGAAA
+
+Lemur_catta         CAATCATTTCCAACTGACATTGAATAACAATCCAAACCCTAAAACTATCTATTAGCTT
+Homo_sapiens        TTATTATCTCGAACTGACACTGAGCCACAACCCAAACAACCCAGCTCTCCCTAAGCTT
+Pan                 CTATTATCTCAAACTGGCACTGAGCAACAACCCAAACAACCCAGCTCTCCCTAAGCTT
+Gorilla             CTATTATCTCAAGCTGACACTGAGCAACAACCCAAACAATTCAACTCTCCCTAAGCTT
+Pongo               CCATCGTCACAAACTGATGCTGAACAACCACCCAGACACTACAACTCTCACTAAGCTT
+Hylobates           CCATTATTTCAAACTGACACTGAACTGCAACCCAAACGCTAGAACTCTCCCTAAGCTT
+Macaca_fuscata      CAACCATTTGGAGCTGACATTGAATAATGACCCAAACACTAGACCTAACGCTAAGCTT
+Macaca_mulatta      CAACCATTTGAAGCTGACATTGAATAATAACCCAAACACTAGACCTAACACTAAGCTT
+Macaca_fascicularis CAACCATTTGGAGTTGACATTGAATAACAACCCAAACATTAGACCTAACACTAAGCTT
+Macaca_sylvanus     CAATCATCTGAAGCTGGCACTGAATAATAACCCAAACACTAAGCCTAACATTAAGCTT
+Saimiri_sciureus    CAATAATTTCAAACTGACACTGAATAACTATCCAAACCATCAAACTATCCCTAAGCTT
+Tarsius_syrichta    TAATCATTTCCAACTGACATTGAATAACGATTCATACTATCAAATTATGCCTAAGCTT
+	;
+End;
diff --git a/doc/source/examples/primates.chars.simple.nexus b/doc/source/examples/primates.chars.simple.nexus
new file mode 100644
index 0000000..668762b
--- /dev/null
+++ b/doc/source/examples/primates.chars.simple.nexus
@@ -0,0 +1,27 @@
+#NEXUS 
+
+[Modified from example data distributed with PAUP* 4.01b.
+ Original Data from:
+	Hayasaka, K., T. Gojobori, and S. Horai. 1988. Molecular phylogeny
+		and evolution of primate mitochondrial DNA. Mol. Biol. Evol.
+		5:626-644.
+]
+
+BEGIN DATA;
+	DIMENSIONS NTAX=12 NCHAR=898;
+	FORMAT DATATYPE=DNA GAP=-;
+	MATRIX
+Lemur_catta      AAGCTTCATAGGAGCAACCATTCTAATAATCGCACATGGCCTTACATCATCCATATTATTCTGTCTAGCCAACTCTAACTACGAACGAATCCATAGCCGTACAATACTACTAGCACGAGGGATCCAAACCATTCTCCCTCTTATAGCCACCTGATGACTACTCGCCAGCCTAACTAACCTAGCCCTACCCACCTCTATCAATTTAATTGGCGAACTATTCGTCACTATAGCATCCTTCTCATGATCAAACATTACAATTATCTTAATAGGCTTAAATATGCTCATCACCGCTCTCTATTCCCTCTATATATTAACTACTACACAACGAGGAAAACTCACATATCATTCGCACAACCTAAACCCATCCTTTACACGAGAAAACACCCTTATATCCATACACATACTCCCCCTTCTCCTATTTACCTTAAACCCCAAAATTATTCTAGGACCCACGTACTGTAAATATAGTTTAAA- [...]
+Homo_sapiens     AAGCTTCACCGGCGCAGTCATTCTCATAATCGCCCACGGGCTTACATCCTCATTACTATTCTGCCTAGCAAACTCAAACTACGAACGCACTCACAGTCGCATCATAATCCTCTCTCAAGGACTTCAAACTCTACTCCCACTAATAGCTTTTTGATGACTTCTAGCAAGCCTCGCTAACCTCGCCTTACCCCCCACTATTAACCTACTGGGAGAACTCTCTGTGCTAGTAACCACGTTCTCCTGATCAAATATCACTCTCCTACTTACAGGACTCAACATACTAGTCACAGCCCTATACTCCCTCTACATATTTACCACAACACAATGGGGCTCACTCACCCACCACATTAACAACATAAAACCCTCATTCACACGAGAAAACACCCTCATGTTCATACACCTATCCCCCATTCTCCTCCTATCCCTCAACCCCGACATCATTACCGGGTTTTCCTCTTGTAAATATAGTTTAACC [...]
+Pan              AAGCTTCACCGGCGCAATTATCCTCATAATCGCCCACGGACTTACATCCTCATTATTATTCTGCCTAGCAAACTCAAATTATGAACGCACCCACAGTCGCATCATAATTCTCTCCCAAGGACTTCAAACTCTACTCCCACTAATAGCCTTTTGATGACTCCTAGCAAGCCTCGCTAACCTCGCCCTACCCCCTACCATTAATCTCCTAGGGGAACTCTCCGTGCTAGTAACCTCATTCTCCTGATCAAATACCACTCTCCTACTCACAGGATTCAACATACTAATCACAGCCCTGTACTCCCTCTACATGTTTACCACAACACAATGAGGCTCACTCACCCACCACATTAATAACATAAAGCCCTCATTCACACGAGAAAATACTCTCATATTTTTACACCTATCCCCCATCCTCCTTCTATCCCTCAATCCTGATATCATCACTGGATTCACCTCCTGTAAATATAGTTTAACC [...]
+Gorilla          AAGCTTCACCGGCGCAGTTGTTCTTATAATTGCCCACGGACTTACATCATCATTATTATTCTGCCTAGCAAACTCAAACTACGAACGAACCCACAGCCGCATCATAATTCTCTCTCAAGGACTCCAAACCCTACTCCCACTAATAGCCCTTTGATGACTTCTGGCAAGCCTCGCCAACCTCGCCTTACCCCCCACCATTAACCTACTAGGAGAGCTCTCCGTACTAGTAACCACATTCTCCTGATCAAACACCACCCTTTTACTTACAGGATCTAACATACTAATTACAGCCCTGTACTCCCTTTATATATTTACCACAACACAATGAGGCCCACTCACACACCACATCACCAACATAAAACCCTCATTTACACGAGAAAACATCCTCATATTCATGCACCTATCCCCCATCCTCCTCCTATCCCTCAACCCCGATATTATCACCGGGTTCACCTCCTGTAAATATAGTTTAACC [...]
+Pongo            AAGCTTCACCGGCGCAACCACCCTCATGATTGCCCATGGACTCACATCCTCCCTACTGTTCTGCCTAGCAAACTCAAACTACGAACGAACCCACAGCCGCATCATAATCCTCTCTCAAGGCCTTCAAACTCTACTCCCCCTAATAGCCCTCTGATGACTTCTAGCAAGCCTCACTAACCTTGCCCTACCACCCACCATCAACCTTCTAGGAGAACTCTCCGTACTAATAGCCATATTCTCTTGATCTAACATCACCATCCTACTAACAGGACTCAACATACTAATCACAACCCTATACTCTCTCTATATATTCACCACAACACAACGAGGTACACCCACACACCACATCAACAACATAAAACCTTCTTTCACACGCGAAAATACCCTCATGCTCATACACCTATCCCCCATCCTCCTCTTATCCCTCAACCCCAGCATCATCGCTGGGTTCGCCTACTGTAAATATAGTTTAACC [...]
+Hylobates        AAGCTTTACAGGTGCAACCGTCCTCATAATCGCCCACGGACTAACCTCTTCCCTGCTATTCTGCCTTGCAAACTCAAACTACGAACGAACTCACAGCCGCATCATAATCCTATCTCGAGGGCTCCAAGCCTTACTCCCACTGATAGCCTTCTGATGACTCGCAGCAAGCCTCGCTAACCTCGCCCTACCCCCCACTATTAACCTCCTAGGTGAACTCTTCGTACTAATGGCCTCCTTCTCCTGGGCAAACACTACTATTACACTCACCGGGCTCAACGTACTAATCACGGCCCTATACTCCCTTTACATATTTATCATAACACAACGAGGCACACTTACACACCACATTAAAAACATAAAACCCTCACTCACACGAGAAAACATATTAATACTTATGCACCTCTTCCCCCTCCTCCTCCTAACCCTCAACCCTAACATCATTACTGGCTTTACTCCCTGTAAACATAGTTTAATC [...]
+Macaca_fuscata   AAGCTTTTCCGGCGCAACCATCCTTATGATCGCTCACGGACTCACCTCTTCCATATATTTCTGCCTAGCCAATTCAAACTATGAACGCACTCACAACCGTACCATACTACTGTCCCGAGGACTTCAAATCCTACTTCCACTAACAGCCTTTTGATGATTAACAGCAAGCCTTACTAACCTTGCCCTACCCCCCACTATCAATCTACTAGGTGAACTCTTTGTAATCGCAACCTCATTCTCCTGATCCCATATCACCATTATGCTAACAGGACTTAACATATTAATTACGGCCCTCTACTCTCTCCACATATTCACTACAACACAACGAGGAACACTCACACATCACATAATCAACATAAAGCCCCCCTTCACACGAGAAAACACATTAATATTCATACACCTCGCTCCAATTATCCTTCTATCCCTCAACCCCAACATCATCCTGGGGTTTACCTCCTGTAGATATAGTTTAACT [...]
+Macaca_mulatta       AAGCTTTTCTGGCGCAACCATCCTCATGATTGCTCACGGACTCACCTCTTCCATATATTTCTGCCTAGCCAATTCAAACTATGAACGCACTCACAACCGTACCATACTACTGTCCCGGGGACTTCAAATCCTACTTCCACTAACAGCTTTCTGATGATTAACAGCAAGCCTTACTAACCTTGCCCTACCCCCCACTATCAACCTACTAGGTGAACTCTTTGTAATCGCGACCTCATTCTCCTGGTCCCATATCACCATTATATTAACAGGATTTAACATACTAATTACGGCCCTCTACTCCCTCCACATATTCACCACAACACAACGAGGAGCACTCACACATCACATAATCAACATAAAACCCCCCTTCACACGAGAAAACATATTAATATTCATACACCTCGCTCCAATCATCCTCCTATCTCTCAACCCCAACATCATCCTGGGGTTTACTTCCTGTAGATATAGTTT [...]
+Macaca_fascicularis  AAGCTTCTCCGGCGCAACCACCCTTATAATCGCCCACGGGCTCACCTCTTCCATGTATTTCTGCTTGGCCAATTCAAACTATGAGCGCACTCATAACCGTACCATACTACTATCCCGAGGACTTCAAATTCTACTTCCATTGACAGCCTTCTGATGACTCACAGCAAGCCTTACTAACCTTGCCCTACCCCCCACTATTAATCTACTAGGCGAACTCTTTGTAATCACAACTTCATTTTCCTGATCCCATATCACCATTGTGTTAACGGGCCTTAATATACTAATCACAGCCCTCTACTCTCTCCACATGTTCATTACAGTACAACGAGGAACACTCACACACCACATAATCAATATAAAACCCCCCTTCACACGAGAAAACATATTAATATTCATACACCTCGCTCCAATTATCCTTCTATCTCTCAACCCCAACATCATCCTGGGGTTTACCTCCTGTAAATATAGTTT [...]
+Macaca_sylvanus      AAGCTTCTCCGGTGCAACTATCCTTATAGTTGCCCATGGACTCACCTCTTCCATATACTTCTGCTTGGCCAACTCAAACTACGAACGCACCCACAGCCGCATCATACTACTATCCCGAGGACTCCAAATCCTACTCCCACTAACAGCCTTCTGATGATTCACAGCAAGCCTTACTAATCTTGCTCTACCCTCCACTATTAATCTACTGGGCGAACTCTTCGTAATCGCAACCTCATTTTCCTGATCCCACATCACCATCATACTAACAGGACTGAACATACTAATTACAGCCCTCTACTCTCTTCACATATTCACCACAACACAACGAGGAGCGCTCACACACCACATAATTAACATAAAACCACCTTTCACACGAGAAAACATATTAATACTCATACACCTCGCTCCAATTATTCTTCTATCTCTTAACCCCAACATCATTCTAGGATTTACTTCCTGTAAATATAGTTT [...]
+Saimiri_sciureus AAGCTTCACCGGCGCAATGATCCTAATAATCGCTCACGGGTTTACTTCGTCTATGCTATTCTGCCTAGCAAACTCAAATTACGAACGAATTCACAGCCGAACAATAACATTTACTCGAGGGCTCCAAACACTATTCCCGCTTATAGGCCTCTGATGACTCCTAGCAAATCTCGCTAACCTCGCCCTACCCACAGCTATTAATCTAGTAGGAGAATTACTCACAATCGTATCTTCCTTCTCTTGATCCAACTTTACTATTATATTCACAGGACTTAATATACTAATTACAGCACTCTACTCACTTCATATGTATGCCTCTACACAGCGAGGTCCACTTACATACAGCACCAGCAATATAAAACCAATATTTACACGAGAAAATACGCTAATATTTATACATATAACACCAATCCTCCTCCTTACCTTGAGCCCCAAGGTAATTATAGGACCCTCACCTTGTAATTATAGTTTAGCT [...]
+Tarsius_syrichta AAGTTTCATTGGAGCCACCACTCTTATAATTGCCCATGGCCTCACCTCCTCCCTATTATTTTGCCTAGCAAATACAAACTACGAACGAGTCCACAGTCGAACAATAGCACTAGCCCGTGGCCTTCAAACCCTATTACCTCTTGCAGCAACATGATGACTCCTCGCCAGCTTAACCAACCTGGCCCTTCCCCCAACAATTAATTTAATCGGTGAACTGTCCGTAATAATAGCAGCATTTTCATGGTCACACCTAACTATTATCTTAGTAGGCCTTAACACCCTTATCACCGCCCTATATTCCCTATATATACTAATCATAACTCAACGAGGAAAATACACATATCATATCAACAATATCATGCCCCCTTTCACCCGAGAAAATACATTAATAATCATACACCTATTTCCCTTAATCCTACTATCTACCAACCCCAAAGTAATTATAGGAACCATGTACTGTAAATATAGTTTAAAC [...]
+	;
+END;
diff --git a/doc/source/examples/primates.chars.subsets-1stpos.nexus b/doc/source/examples/primates.chars.subsets-1stpos.nexus
new file mode 100644
index 0000000..7b3e247
--- /dev/null
+++ b/doc/source/examples/primates.chars.subsets-1stpos.nexus
@@ -0,0 +1,20 @@
+#NEXUS 
+
+Begin data;
+	Dimensions ntax=12 nchar=231;
+	Format datatype=dna gap=-;
+	Matrix
+Lemur_catta         ATAGGAACAAGCGCATTATTTCGATATGCACACAACCGCGACAACCCAGATTCCGACAACGCCATAATAGGCTGAAGTTTTTAAAAATAGTAACAAGCTTCTATAAACCGACATCTCACACTTACGAACATACACCCCCTATACAAACGCATAACTTTTACGACAACATCAAAAGAAATAATCTGCTGATTAGTGTAAACACAATTATTGCGAAATATCTAAACACACTAA
+Homo_sapiens        ATAGGGACAAGCGCATTTCTTCGATATGCACACAAACTCGCCACCCCAGTTTCCGACGACGTCCAAACCGGCTGCGAATTTTAAACCCAGCAACGAGCTTCTATAAACTGTCACCAAAAACTTACGAACATACCTCACCCTCACGAAAGTTTAAACAAAAACACATCACCACAACGACAAAATTCCTGATAGGTATAAACTCAAATATCGCGGAATATCTGAACAACCTCA
+Pan                 ATAGGAACAAGCGCATTTTTTCGATATGCACACAAACTCGCCACCCCAGTTTCCGACGACGCCCAAACCGGCTGCGATTTTTAAACCCAGTAACAAGCTTCTATAAACTGTCACCAAAAACTTACGAACATTCCTCACCCTCACGAAAGTATAAATAAAAATACACTACCACAACAACAAAATTCCTGATAAGTATAAACTCAAATATCGCGGAATATCTGAACAACCTCA
+Gorilla             ATAGGGGCAAGCGCATTTTTTCGATATGCACACAAACTCGCCACCCCAGCTTCCGACGACGTCCAAACCGGCTGCGAATTTTAAACTCAGTAACAAGCTTCTATAAACTGCCACCAAAAACTTACGAACATACCTCACCCTCACGAAAGTATAAATGAAAATGCATTACCACAATAACAAAATTCCTGATAGGTATAAACTCAAATCTCGCGGAATATCTGAACAACCTCA
+Pongo               ATAGGAACAAGCGCATTCCTTCGATATGCACACAAACTCGCCACCCCAGCTTCCGACAACGCCCAAACCGGCTGCAGATTTTAAAACCAGCAACAAACTTCTATAAACCGACACCAAAAACTTACGAACACACCTCACCTTCACAAAGGTGTAGATAAAAGCATATCACCAAGACAACAAAACTCCTGAAGAGTGTAAACACAAATATCGCGAAGAATTTAAACACCCTCA
+Hylobates           ATAGGAGCAAGCGCATTCCTTCGATATGCACACAAACTCGCCGTCCCAGTTTCGGACGACGCCCAAACCGGCTGCAGTTTTGAAAAACAGCAGCAAGCTTCTATAAACCGACACCAAAAACTCACGAATACACCTCCCCCACACAAAAGTACAGATAAAGACACATCACCAAGACAACAAAATTCCTGAAAAGTATAAACTCAAATATAGCGAAATATCTAGACACGCTCA
+Macaca_fuscata      ATTGGAACAAGCGCATTATTTCGATATGCACACAACCTCGCCACCCCAGTTTTAGACAACGCCCAAACCGGCTGAGATTTTTCAAAACAGCAATAAGCTTCCATAAACCGACACCAAAAACCTACGAATATACCGCAACCTCACAAACGTATAAACACAAAAACATCACCATGACAACTAACCTCGTGAAAGATGTAAACCTAATTATTACGAAATATCTAAACACGCACA
+Macaca_mulatta      ATTGGAACAAGCGCATTATTTCGATATGCACACAACCTCGCCACCCCAGTTTTAGACAACGCCCAAACCGGCTGAGATTTTTCAAAATAGTAACAAGCTTCCATAAACCGGCACCAAAAACCTACGAATATACCGCAACCTCACAAACGTATAAACACAAAAACATCACCATGACAACTAACCTCGTGAAAGATGTAAACCTAATTATTACGAAATATCTAAACACGCACA
+Macaca_fascicularis ATTGGAACAAGCGCATTATTTTGATATGCACACAACCTCGCCACCCTAGTTTCAGACAACGCCCAAACCGGCTGAAATTTTTCAAAGTAGCAACAAGCTTCCATAAGCCGACACCAAAAACCTACGAATATACCGCAACCTCACAAACGTATAAACACAAAAACATCACCATGACAACTAACTTCGTGAAAGATGTAAACCTAACTACTACGAAATATCTAAACATGCACA
+Macaca_sylvanus     ATTGGAACAGGCGCATTATTTTGATATGCACACAACCTCGCCACCCCAGTTTTAGACAACGCCTAAACCGGCTGAGATTTTTCAAAACAGCAACAAGCTTCCATAAACCGGCACCAAAAACCTACGAATACACCGCAACCTCACAAACGTATAAATACAAAAACATCACCATGACAACAAACCTCATGAAGGATGTAAACTTAATTATTACGAAATATCTAAACACACATA
+Saimiri_sciureus    ATAGGAACAAGCGTATTACTTCGATATGCACACAAATACGCCACTCCAGCTTCCGACGACGCCAGAACGGGTCAAGTTTTTTATAAATAGCAACAAGCTTCCATGTACCGCCATAAAAAACATACGAACATACAACACCCATACAGAAGCTCA-ACTTAACAACAACCGCACGACAACAAAACTCTTGACGAATGCAAAATTAATTACAGCGTAATATCTAAACAAACTCA
+Tarsius_syrichta    ATAGGAACAAGCGCATTCTTTCGAAATGCGCACAAGCGCGCCACTCCGGATTCCGATAACGCCCAAATAGGCTGAAGGTTTTCCAAATGGCAACAAGCTTCTACAAACCGATATCAAAAACCTACGAATAAACCTCTACCTAACAGAAGAATAATTTTTATCTCACTAACTAAAAAAATGACGTCTTGAATAGTGTAAACGCACATCTAACGAAATATCTAAACAAATTCA
+	;
+End;
diff --git a/doc/source/examples/primates.chars.subsets-2ndpos.nexus b/doc/source/examples/primates.chars.subsets-2ndpos.nexus
new file mode 100644
index 0000000..c173d81
--- /dev/null
+++ b/doc/source/examples/primates.chars.subsets-2ndpos.nexus
@@ -0,0 +1,20 @@
+#NEXUS 
+
+Begin data;
+	Dimensions ntax=12 nchar=231;
+	Format datatype=dna gap=-;
+	Matrix
+Lemur_catta         GTTGCCTTTTCAGTCCCTTTGTCACAAAGTAGGCTTTCGGTACTTCTTCCGGTTCGTCATCTCCCTATTGATTTCTCCTCGCATCTTTTGTATTTCCTACTATTCCCAGGATCAACAATACCTCGAACTTCTATTCTTTTCTACATTTGCCATATTCCTCTTCTTTTCTCTTTATCATAAAACACCATACCTCGCTTCGTTCCTTTTCCGAACTTCAGAGTCTACTATCTG
+Homo_sapiens        GTCGCTTTTTCAGTCCCTTTGTCACAAAGCAGGTTTTCAGTACTTCTTCTGGTTCGTCATCTCCCTATTGATCTTTCCTCGCATCTTTCGTATTTCCTACTATTCCCAGGCTCAATAATACCTCGAACTTTTATCCTTTTCTACATTCGTCCTCTACCTCCTCTCCTTCCTTCCTTACAAAACACAATACTTCCCTTTGTTCCCTTTGTAAATTTCAGAGCCCACCATCTG
+Pan                 GTCGCTTTTTCAGTCCCTTTGTCACAAAGCAGGTTTTCAGTACTTCTTCTGGTTCGTCATCTCCCTATTGATCTTTCCTCGCACCTTTCGTATTTCCTACTATTCCCAGGCTCAATAATACCTCGAACTTTTATCCTTTTCTACATTCGTCCTCTACCTCCTCTCCTTTCTTCCTTACAAAACACAATACTTCCCTTTGTTCCCTTTGTAAACTTCAGAGCCCACCATCTG
+Gorilla             GTCGCTTTTTCAGTCCCTTTGTCACAAAGCAGGTTTTCAGTACTTCTTCTGGTTCGTCATCTCCCTATTGATCTTTCCTCGCACCTTTCGCATTTCCTACTATTCCCAGGCTCAATCATACCTCGAATTTTTATCCTTTTCTACATTCGTCCTCTACCTCCTCTCCTTCCTTCCTTACAAAGCACAATACTTCCCTTTGTTCCCTTTGTAAACTTCGGAGCCCACTATCTG
+Pongo               GTCGCCCTTTCAGTCCCTTTGTCACAAAGCAGGTTTTCAGTACTTCTTCTGGTTCGTCATCTCCCTATTGATCTTTCTTCGCATCTTTCGTATTTCCTACTATTCCCAGGCCCAATAATACCTCGAACTTTTATCCTTTTCTACGTTCGTCACCTTCCTCCTCTCCTTCCTCCCTTACAAAACACAATACCTCCCTCTGTTCCCTTTGTGAACTTCAGGGCCCACTATCTG
+Hylobates           GTCGCCTTTTCAGTCCCTTTGTCACAAAGCAGGTTTTCGGTACTTCTTCTGGTCCGTCATCTCCCTATTGATTTTTCCTCGCACCTCTCGTATTTCCTACTATTTTCAGGCTCAATAATACCTCGAATTTTTATTCTTTTCTACATTCGTCCTCTACCTCTTCTCCTTCCTCCCTTACAAAATACAATATCTCCCTTTGTTCCTTTTGCAAACTTCAGAGCCCACTATCTG
+Macaca_fuscata      GTCGCCTTTTCAGTCCCTATGTCACAAAGCAAGCTTTCGGTATTTCTCCTGGTCCGTCATCTCCCTATTGATTTTCCCTCGCATCTTTCGTATTTCCTACTATTCCCAGGCTCAATTATACCTCGAACTTTTATCCTTTTCTACATTTGTCCTTTACCTTTCCTTCTCTCTTCCTTACAAAGCACAATACCTTACTTTGTCCCCTTTTCAAACCTGGGAGTTCACTATCTG
+Macaca_mulatta      GTCGCCTTTTCAGTCCCTATGTCACAAAGCAAGCTTTCGGTATTTCTCCTGGTCCGTCATCTCCCTATTGATTTTCCCTCGCATCTTTCGTATTTCCTACTATTCCCAGGCTCAATTATACCTCGAATTTTTATCCTTTTCTACATTTGTCCTTTACCTTTCCTTCTCTCTTCCTTACAAAGCACAATACCTTACTTTGTCCCCTTTTCAAACCTGGGAGTTCACTATCTG
+Macaca_fascicularis GTCGCCCTTTCAGTCCCTATGTCACAAAGCAAGCTTTCGGTATTTCTCCTGGTCCGTCATCTCCCTATTGATTTTCCCTCGCATCTTTCGTATTTCCTACTATTTCTAGGCTCAATTATACCTCGAATTTTTATCCTTTTCTACATTTGTCCTTTACCTTTCCTTCTCTCTTCCTCACAAAGCACAATACCTTACTTCGTCCCCTTTTCAAACCTGGGAGTCCACTATCTG
+Macaca_sylvanus     GTCGCCTTTTCAGTCCCTATGTCACAAAGCAGGTTTTCGGTATTTCTCCTGGTCCGTCATCTCCCTATTGATTTTCCCTCGCATCTTTCGTATTTCCTACTATTCCCAGGCTCAATTATACCTCGAATTTTTATCCTTTTCTACATTTGTCCTTTACCTTTCCTTCTCTCTTCCTTACAAAATACAATACCTTACTTCGTCCCCTATTTAAACTTGGGAGTTCACTGTCTG
+Saimiri_sciureus    GTCGCTTTTTCAGTCCCTTTGTCACAAAGTAGGCTCTCGGTACTTCTTGTGGTTCATCATCTCCCTATTGATTCTTCCTCGCATCTTTCGTATTTCCTACTATACCCAGGCTCAGCGATACTTCGAACTTTTATCCTTTTCTGCATTTGCCCT-TATCTCTTCTTGTTCCTTCCTTACAAGCTACAATATCTTACTTCGCTCTTTTTTCGAACTTCAGAGTCTACTATCTG
+Tarsius_syrichta    GTTGCCCTTTCAGTCCCTTTGTCACAAAGTAGGCTCTCGGTACTTCTCCCGGTTCGTCATCTCCCTATTGATCTTTCCTCGCATCTTTTGTACTTCCTACTATTTTCAGGAACAATAATTCCTCGAACTTTTATTCTTTTCCACATTTGCTATATTCCCTTTCTCTTTCCTTTCCCAAAACACACAATAACTCGCTTCGTTCTTTTTACAAATTTCAGAGTCTACTATGTG
+	;
+End;
diff --git a/doc/source/examples/primates.chars.subsets-3rdpos.nexus b/doc/source/examples/primates.chars.subsets-3rdpos.nexus
new file mode 100644
index 0000000..c0934b1
--- /dev/null
+++ b/doc/source/examples/primates.chars.subsets-3rdpos.nexus
@@ -0,0 +1,20 @@
+#NEXUS 
+
+Begin data;
+	Dimensions ntax=12 nchar=231;
+	Format datatype=dna gap=-;
+	Matrix
+Lemur_catta         CCAAACTAACATCTAACAACTACCTCCAACTCTAAAAAAGCACTCTTACCAAACCCATCACACCTCTATCAACCTAACCAAACTATCAACATGCCCTCTCCTAATTAAAAACATTGCCACACTAAACCTACACACCTCATCACCATTAACGCATAACTCCTCAGTCATATCTACTACACACCCTAACAATTTATCCCTCCCATAATCCAAAAACTCCATAAACACAAATTC
+Homo_sapiens        CCCCACTCACCCGTACAAACCAACACCACTCTCCACCTAATATACAAATTAATAACCTCCCACCTTCAGAACTGAACGCCAATCTCATAACCAACACACCCCATCAAAGCACCCCTCCAACACAAACCCGCACACCTCCACCCCCCTCGTCTACGCTTACCACGTCATCCCTCCCTCTCAACACCTTAACTCACCTTCTCCCAAACGCACAATTCGCACACACAACGCCAC
+Pan                 CCCCATCCACCCATACAAACCAACATTACCCTCCATCCAATATACAAACTAACAACCTCCCACTCTTCAGACCGAACACCAATCTCACAACCAACACGCCCCGTCAAAACACCCCTTCAGCACAAATTCATACACCCCTACCTTTCCTACCCACGTTCACCACATCATCCCCCCCTCTCAACATCTTGACTCGCCTCTCTCCAAACACACAATTCACGCAAACAACGCCAC
+Gorilla             CCCCATTTATCCATAAAAACCAACACCAACCCCCATCTAACACACAAACTAATGACCCCCCACCCTCAAAGCCAAACACCAACCCTATAATCAATACGCCTTATCAAAACACACCCCCAACATAAACCCACGCACCCCCACCCCTTCCGCCCATGCTCACCACATCATCTCTCCCCTTCAACACCTCAATCCACCTCCCCCCAAATACACAATTCACACAAACAATACCAC
+Pongo               CCCCACCCGTCTACACCAGCCAACACCAACCCCCACCTACTATACCAACCAATAACCTCTCAACCCCTAAACCAAACACTATCCCCAAAACCAACACACTCTACCAAAATACACCCCCAATTCACATCCGCACACCCCCACCCCCCCTGCCCACGTCCATCCCATCACCCTCTCCTCCCAACACCCTAAGCCACCTTCCTCAAAATCCAAAACCCACACAACCGAAACAAC
+Hylobates           CTATACCCACCCAACTCGACCTACACCAATCCCCACATAGCACACAGACCAACAACCTCCCACCTTCCATACCAAGCCCCGACTTTACCGCCAACGCACCTCATCAAAACATACCTACAACACAAACAAATGCCCCCCCACCCTCCTTCTTCAAGCCCACTAGACCATCCTACCTTCCTAGCACGCCAAGCTCTCTACCATCAAACGCACAACTTACACATACAGAACCAC
+Macaca_fuscata      CTCCACCTGCTCACCTCATCCACTACTACTCCTCAAGCAATACATAAACTAAAAACTTCTCACCTCTAATACTACACACCACTCCTGAAATCAATGCCCTCCACTAAAAAACATCACCAGCCCAAACAAACACCTATCTACCCCCCCGGTCCACGCCCCTAACTCCATCATTCCCCCTCAATACATCAAACAATTCCCCCCAATACCCACAAACTGCATAAGCAAACAGAC
+Macaca_mulatta      CTTCACCCGTTCACCTCATCCACTACTACTCCTCAAGCGATACATAAATCAAAAACTTCTCACCTCCAATACTACGCACCGCTCCTAAAATCAATGCCCCCCACCAAAAAACATCACCAACCCAAACAAACACCTACCCATCCCCCCGGTTCACGCCTCAAACTCCATCATTCCCCCTCAATACATCAAACAATTCCCCCCAATACCCACAAACTACATAAACAAACAAAC
+Macaca_fascicularis CCCCACCTACCCGCCTCGTCCGCTACTGCTTCTCAAACAATATATAGACCAACAACTTCTCACCTTTAACACTACATATCACTCCTGAGCTTAACACCCTCCGCTAAAAAACACCACTAACCCAAACAAACACCTATCTATCCCCCCGGTCCACGCCCCAAACCCCGCTATTCCCCCCTAATACACCAAACAATTTTCTCCAACACCCACAAACTGTATAAACAAACAAAC
+Macaca_sylvanus     CCCTATCTATCTACCTCACCCGCCACCACCCCCCAAACAACACACAAACCAACAACTTTTTACCTTTAGCACCACACATCACCCCCAAAAGCAATACCCTTCACCAAAAAGCACCATCAAATCAAACAAACACCTATTTATTCCCCTAATTCACGTCCCAAATCCCATTATCTCTCCCCAACATACCAAACAATTCTCCCTAATATACACAAACCACGCAAACAAACAAAC
+Saimiri_sciureus    CCCCAGCAACTCGTTGTGACCAACATCAATCCAAAATTAGCAAACGTACCAACAATCTCCCACATTTAAAAACACATCCTACCTTTACAATTAATAACCATTGTCTAGATATACCCCTAAAATAAATGAATATAAACCCTCGCCGATAACATA-ACCCCTAAAATCAAGACATCCTCTCACAACGCCAAACCCCCCTCTCATAACTCTACAAAATACACAATCACCAACAC
+Tarsius_syrichta    TCTACCTTATCTCCCCCAATCAATACCAACCTAAAAACTCTACAATTAAAAACCCCACCGCTCAATTACTAGCAAAAATAGACATTCAACTCCTCCCATCATAACATAAAACATTCCTCGCTCCAATAAACACATCACAATCCCAATAACGCATATACCTAAATACATCATTTAATAATAATACTCCAACTCCCATAACACAGCATACATAAACTCCATAAGTTTCAACAC
+	;
+End;
diff --git a/doc/source/examples/primates.chars.subsets-all.nexus b/doc/source/examples/primates.chars.subsets-all.nexus
new file mode 100644
index 0000000..6505d4f
--- /dev/null
+++ b/doc/source/examples/primates.chars.subsets-all.nexus
@@ -0,0 +1,72 @@
+#NEXUS
+
+[Modified from example data distributed with PAUP* 4.01b.
+ Original Data from:
+	Hayasaka, K., T. Gojobori, and S. Horai. 1988. Molecular phylogeny
+		and evolution of primate mitochondrial DNA. Mol. Biol. Evol.
+		5:626-644.
+]
+
+BEGIN TAXA;
+	DIMENSIONS NTAX=12;
+	TAXLABELS
+        Lemur_catta
+        Homo_sapiens
+        Pan
+        Gorilla
+        Pongo
+        Hylobates
+        Macaca_fuscata
+        Macaca_mulatta
+        Macaca_fascicularis
+        Macaca_sylvanus
+        Saimiri_sciureus
+        Tarsius_syrichta
+	;
+END;
+
+BEGIN CHARACTERS;
+	DIMENSIONS NCHAR=898;
+	FORMAT DATATYPE=DNA GAP=-;
+	MATRIX
+Lemur_catta      AAGCTTCATAGGAGCAACCATTCTAATAATCGCACATGGCCTTACATCATCCATATTATTCTGTCTAGCCAACTCTAACTACGAACGAATCCATAGCCGTACAATACTACTAGCACGAGGGATCCAAACCATTCTCCCTCTTATAGCCACCTGATGACTACTCGCCAGCCTAACTAACCTAGCCCTACCCACCTCTATCAATTTAATTGGCGAACTATTCGTCACTATAGCATCCTTCTCATGATCAAACATTACAATTATCTTAATAGGCTTAAATATGCTCATCACCGCTCTCTATTCCCTCTATATATTAACTACTACACAACGAGGAAAACTCACATATCATTCGCACAACCTAAACCCATCCTTTACACGAGAAAACACCCTTATATCCATACACATACTCCCCCTTCTCCTATTTACCTTAAACCCCAAAATTATTCTAGGACCCACGTACTGTAAATATAGTTTAAA- [...]
+Homo_sapiens     AAGCTTCACCGGCGCAGTCATTCTCATAATCGCCCACGGGCTTACATCCTCATTACTATTCTGCCTAGCAAACTCAAACTACGAACGCACTCACAGTCGCATCATAATCCTCTCTCAAGGACTTCAAACTCTACTCCCACTAATAGCTTTTTGATGACTTCTAGCAAGCCTCGCTAACCTCGCCTTACCCCCCACTATTAACCTACTGGGAGAACTCTCTGTGCTAGTAACCACGTTCTCCTGATCAAATATCACTCTCCTACTTACAGGACTCAACATACTAGTCACAGCCCTATACTCCCTCTACATATTTACCACAACACAATGGGGCTCACTCACCCACCACATTAACAACATAAAACCCTCATTCACACGAGAAAACACCCTCATGTTCATACACCTATCCCCCATTCTCCTCCTATCCCTCAACCCCGACATCATTACCGGGTTTTCCTCTTGTAAATATAGTTTAACC [...]
+Pan              AAGCTTCACCGGCGCAATTATCCTCATAATCGCCCACGGACTTACATCCTCATTATTATTCTGCCTAGCAAACTCAAATTATGAACGCACCCACAGTCGCATCATAATTCTCTCCCAAGGACTTCAAACTCTACTCCCACTAATAGCCTTTTGATGACTCCTAGCAAGCCTCGCTAACCTCGCCCTACCCCCTACCATTAATCTCCTAGGGGAACTCTCCGTGCTAGTAACCTCATTCTCCTGATCAAATACCACTCTCCTACTCACAGGATTCAACATACTAATCACAGCCCTGTACTCCCTCTACATGTTTACCACAACACAATGAGGCTCACTCACCCACCACATTAATAACATAAAGCCCTCATTCACACGAGAAAATACTCTCATATTTTTACACCTATCCCCCATCCTCCTTCTATCCCTCAATCCTGATATCATCACTGGATTCACCTCCTGTAAATATAGTTTAACC [...]
+Gorilla          AAGCTTCACCGGCGCAGTTGTTCTTATAATTGCCCACGGACTTACATCATCATTATTATTCTGCCTAGCAAACTCAAACTACGAACGAACCCACAGCCGCATCATAATTCTCTCTCAAGGACTCCAAACCCTACTCCCACTAATAGCCCTTTGATGACTTCTGGCAAGCCTCGCCAACCTCGCCTTACCCCCCACCATTAACCTACTAGGAGAGCTCTCCGTACTAGTAACCACATTCTCCTGATCAAACACCACCCTTTTACTTACAGGATCTAACATACTAATTACAGCCCTGTACTCCCTTTATATATTTACCACAACACAATGAGGCCCACTCACACACCACATCACCAACATAAAACCCTCATTTACACGAGAAAACATCCTCATATTCATGCACCTATCCCCCATCCTCCTCCTATCCCTCAACCCCGATATTATCACCGGGTTCACCTCCTGTAAATATAGTTTAACC [...]
+Pongo            AAGCTTCACCGGCGCAACCACCCTCATGATTGCCCATGGACTCACATCCTCCCTACTGTTCTGCCTAGCAAACTCAAACTACGAACGAACCCACAGCCGCATCATAATCCTCTCTCAAGGCCTTCAAACTCTACTCCCCCTAATAGCCCTCTGATGACTTCTAGCAAGCCTCACTAACCTTGCCCTACCACCCACCATCAACCTTCTAGGAGAACTCTCCGTACTAATAGCCATATTCTCTTGATCTAACATCACCATCCTACTAACAGGACTCAACATACTAATCACAACCCTATACTCTCTCTATATATTCACCACAACACAACGAGGTACACCCACACACCACATCAACAACATAAAACCTTCTTTCACACGCGAAAATACCCTCATGCTCATACACCTATCCCCCATCCTCCTCTTATCCCTCAACCCCAGCATCATCGCTGGGTTCGCCTACTGTAAATATAGTTTAACC [...]
+Hylobates        AAGCTTTACAGGTGCAACCGTCCTCATAATCGCCCACGGACTAACCTCTTCCCTGCTATTCTGCCTTGCAAACTCAAACTACGAACGAACTCACAGCCGCATCATAATCCTATCTCGAGGGCTCCAAGCCTTACTCCCACTGATAGCCTTCTGATGACTCGCAGCAAGCCTCGCTAACCTCGCCCTACCCCCCACTATTAACCTCCTAGGTGAACTCTTCGTACTAATGGCCTCCTTCTCCTGGGCAAACACTACTATTACACTCACCGGGCTCAACGTACTAATCACGGCCCTATACTCCCTTTACATATTTATCATAACACAACGAGGCACACTTACACACCACATTAAAAACATAAAACCCTCACTCACACGAGAAAACATATTAATACTTATGCACCTCTTCCCCCTCCTCCTCCTAACCCTCAACCCTAACATCATTACTGGCTTTACTCCCTGTAAACATAGTTTAATC [...]
+Macaca_fuscata   AAGCTTTTCCGGCGCAACCATCCTTATGATCGCTCACGGACTCACCTCTTCCATATATTTCTGCCTAGCCAATTCAAACTATGAACGCACTCACAACCGTACCATACTACTGTCCCGAGGACTTCAAATCCTACTTCCACTAACAGCCTTTTGATGATTAACAGCAAGCCTTACTAACCTTGCCCTACCCCCCACTATCAATCTACTAGGTGAACTCTTTGTAATCGCAACCTCATTCTCCTGATCCCATATCACCATTATGCTAACAGGACTTAACATATTAATTACGGCCCTCTACTCTCTCCACATATTCACTACAACACAACGAGGAACACTCACACATCACATAATCAACATAAAGCCCCCCTTCACACGAGAAAACACATTAATATTCATACACCTCGCTCCAATTATCCTTCTATCCCTCAACCCCAACATCATCCTGGGGTTTACCTCCTGTAGATATAGTTTAACT [...]
+Macaca_mulatta       AAGCTTTTCTGGCGCAACCATCCTCATGATTGCTCACGGACTCACCTCTTCCATATATTTCTGCCTAGCCAATTCAAACTATGAACGCACTCACAACCGTACCATACTACTGTCCCGGGGACTTCAAATCCTACTTCCACTAACAGCTTTCTGATGATTAACAGCAAGCCTTACTAACCTTGCCCTACCCCCCACTATCAACCTACTAGGTGAACTCTTTGTAATCGCGACCTCATTCTCCTGGTCCCATATCACCATTATATTAACAGGATTTAACATACTAATTACGGCCCTCTACTCCCTCCACATATTCACCACAACACAACGAGGAGCACTCACACATCACATAATCAACATAAAACCCCCCTTCACACGAGAAAACATATTAATATTCATACACCTCGCTCCAATCATCCTCCTATCTCTCAACCCCAACATCATCCTGGGGTTTACTTCCTGTAGATATAGTTT [...]
+Macaca_fascicularis  AAGCTTCTCCGGCGCAACCACCCTTATAATCGCCCACGGGCTCACCTCTTCCATGTATTTCTGCTTGGCCAATTCAAACTATGAGCGCACTCATAACCGTACCATACTACTATCCCGAGGACTTCAAATTCTACTTCCATTGACAGCCTTCTGATGACTCACAGCAAGCCTTACTAACCTTGCCCTACCCCCCACTATTAATCTACTAGGCGAACTCTTTGTAATCACAACTTCATTTTCCTGATCCCATATCACCATTGTGTTAACGGGCCTTAATATACTAATCACAGCCCTCTACTCTCTCCACATGTTCATTACAGTACAACGAGGAACACTCACACACCACATAATCAATATAAAACCCCCCTTCACACGAGAAAACATATTAATATTCATACACCTCGCTCCAATTATCCTTCTATCTCTCAACCCCAACATCATCCTGGGGTTTACCTCCTGTAAATATAGTTT [...]
+Macaca_sylvanus      AAGCTTCTCCGGTGCAACTATCCTTATAGTTGCCCATGGACTCACCTCTTCCATATACTTCTGCTTGGCCAACTCAAACTACGAACGCACCCACAGCCGCATCATACTACTATCCCGAGGACTCCAAATCCTACTCCCACTAACAGCCTTCTGATGATTCACAGCAAGCCTTACTAATCTTGCTCTACCCTCCACTATTAATCTACTGGGCGAACTCTTCGTAATCGCAACCTCATTTTCCTGATCCCACATCACCATCATACTAACAGGACTGAACATACTAATTACAGCCCTCTACTCTCTTCACATATTCACCACAACACAACGAGGAGCGCTCACACACCACATAATTAACATAAAACCACCTTTCACACGAGAAAACATATTAATACTCATACACCTCGCTCCAATTATTCTTCTATCTCTTAACCCCAACATCATTCTAGGATTTACTTCCTGTAAATATAGTTT [...]
+Saimiri_sciureus AAGCTTCACCGGCGCAATGATCCTAATAATCGCTCACGGGTTTACTTCGTCTATGCTATTCTGCCTAGCAAACTCAAATTACGAACGAATTCACAGCCGAACAATAACATTTACTCGAGGGCTCCAAACACTATTCCCGCTTATAGGCCTCTGATGACTCCTAGCAAATCTCGCTAACCTCGCCCTACCCACAGCTATTAATCTAGTAGGAGAATTACTCACAATCGTATCTTCCTTCTCTTGATCCAACTTTACTATTATATTCACAGGACTTAATATACTAATTACAGCACTCTACTCACTTCATATGTATGCCTCTACACAGCGAGGTCCACTTACATACAGCACCAGCAATATAAAACCAATATTTACACGAGAAAATACGCTAATATTTATACATATAACACCAATCCTCCTCCTTACCTTGAGCCCCAAGGTAATTATAGGACCCTCACCTTGTAATTATAGTTTAGCT [...]
+Tarsius_syrichta AAGTTTCATTGGAGCCACCACTCTTATAATTGCCCATGGCCTCACCTCCTCCCTATTATTTTGCCTAGCAAATACAAACTACGAACGAGTCCACAGTCGAACAATAGCACTAGCCCGTGGCCTTCAAACCCTATTACCTCTTGCAGCAACATGATGACTCCTCGCCAGCTTAACCAACCTGGCCCTTCCCCCAACAATTAATTTAATCGGTGAACTGTCCGTAATAATAGCAGCATTTTCATGGTCACACCTAACTATTATCTTAGTAGGCCTTAACACCCTTATCACCGCCCTATATTCCCTATATATACTAATCATAACTCAACGAGGAAAATACACATATCATATCAACAATATCATGCCCCCTTTCACCCGAGAAAATACATTAATAATCATACACCTATTTCCCTTAATCCTACTATCTACCAACCCCAAAGTAATTATAGGAACCATGTACTGTAAATATAGTTTAAAC [...]
+	;
+END;
+
+begin sets;
+	charset coding = 2-457 660-896;
+	charset noncoding = 1 458-659 897-898;
+	charset 1stpos = 2-457\3 660-896\3;
+	charset 2ndpos = 3-457\3 661-896\3;
+	charset 3rdpos = 4-457\3 662-.\3;
+end;
+
+begin paup;
+    exclude all;
+    include coding;
+    export file=primates.chars.subsets-coding.nexus format=nexus replace=yes;
+    exclude all;
+    include noncoding;
+    export file=primates.chars.subsets-noncoding.nexus format=nexus replace=yes;
+    exclude all;
+    include 1stpos;
+    export file=primates.chars.subsets-1stpos.nexus format=nexus replace=yes;
+    exclude all;
+    include 2ndpos;
+    export file=primates.chars.subsets-2ndpos.nexus format=nexus replace=yes;
+    exclude all;
+    include 3rdpos;
+    export file=primates.chars.subsets-3rdpos.nexus format=nexus replace=yes;
+end;
+
diff --git a/doc/source/examples/primates.chars.subsets-coding.nexus b/doc/source/examples/primates.chars.subsets-coding.nexus
new file mode 100644
index 0000000..df0dbcf
--- /dev/null
+++ b/doc/source/examples/primates.chars.subsets-coding.nexus
@@ -0,0 +1,20 @@
+#NEXUS 
+
+Begin data;
+	Dimensions ntax=12 nchar=693;
+	Format datatype=dna gap=-;
+	Matrix
+Lemur_catta         AGCTTCATAGGAGCAACCATTCTAATAATCGCACATGGCCTTACATCATCCATATTATTCTGTCTAGCCAACTCTAACTACGAACGAATCCATAGCCGTACAATACTACTAGCACGAGGGATCCAAACCATTCTCCCTCTTATAGCCACCTGATGACTACTCGCCAGCCTAACTAACCTAGCCCTACCCACCTCTATCAATTTAATTGGCGAACTATTCGTCACTATAGCATCCTTCTCATGATCAAACATTACAATTATCTTAATAGGCTTAAATATGCTCATCACCGCTCTCTATTCCCTCTATATATTAACTACTACACAACGAGGAAAACTCACATATCATTCGCACAACCTAAACCCATCCTTTACACGAGAAAACACCCTTATATCCATACACATACTCCCCCTTCTCCTATTTACCTTAAACCCCAAAATTATTCTAGGACCCACGTACATAAATCTATTATCCT [...]
+Homo_sapiens        AGCTTCACCGGCGCAGTCATTCTCATAATCGCCCACGGGCTTACATCCTCATTACTATTCTGCCTAGCAAACTCAAACTACGAACGCACTCACAGTCGCATCATAATCCTCTCTCAAGGACTTCAAACTCTACTCCCACTAATAGCTTTTTGATGACTTCTAGCAAGCCTCGCTAACCTCGCCTTACCCCCCACTATTAACCTACTGGGAGAACTCTCTGTGCTAGTAACCACGTTCTCCTGATCAAATATCACTCTCCTACTTACAGGACTCAACATACTAGTCACAGCCCTATACTCCCTCTACATATTTACCACAACACAATGGGGCTCACTCACCCACCACATTAACAACATAAAACCCTCATTCACACGAGAAAACACCCTCATGTTCATACACCTATCCCCCATTCTCCTCCTATCCCTCAACCCCGACATCATTACCGGGTTTTCCTCTATAACCATGCACACTA [...]
+Pan                 AGCTTCACCGGCGCAATTATCCTCATAATCGCCCACGGACTTACATCCTCATTATTATTCTGCCTAGCAAACTCAAATTATGAACGCACCCACAGTCGCATCATAATTCTCTCCCAAGGACTTCAAACTCTACTCCCACTAATAGCCTTTTGATGACTCCTAGCAAGCCTCGCTAACCTCGCCCTACCCCCTACCATTAATCTCCTAGGGGAACTCTCCGTGCTAGTAACCTCATTCTCCTGATCAAATACCACTCTCCTACTCACAGGATTCAACATACTAATCACAGCCCTGTACTCCCTCTACATGTTTACCACAACACAATGAGGCTCACTCACCCACCACATTAATAACATAAAGCCCTCATTCACACGAGAAAATACTCTCATATTTTTACACCTATCCCCCATCCTCCTTCTATCCCTCAATCCTGATATCATCACTGGATTCACCTCCATAACCATGTATACTA [...]
+Gorilla             AGCTTCACCGGCGCAGTTGTTCTTATAATTGCCCACGGACTTACATCATCATTATTATTCTGCCTAGCAAACTCAAACTACGAACGAACCCACAGCCGCATCATAATTCTCTCTCAAGGACTCCAAACCCTACTCCCACTAATAGCCCTTTGATGACTTCTGGCAAGCCTCGCCAACCTCGCCTTACCCCCCACCATTAACCTACTAGGAGAGCTCTCCGTACTAGTAACCACATTCTCCTGATCAAACACCACCCTTTTACTTACAGGATCTAACATACTAATTACAGCCCTGTACTCCCTTTATATATTTACCACAACACAATGAGGCCCACTCACACACCACATCACCAACATAAAACCCTCATTTACACGAGAAAACATCCTCATATTCATGCACCTATCCCCCATCCTCCTCCTATCCCTCAACCCCGATATTATCACCGGGTTCACCTCCATAACTATGTACGCTA [...]
+Pongo               AGCTTCACCGGCGCAACCACCCTCATGATTGCCCATGGACTCACATCCTCCCTACTGTTCTGCCTAGCAAACTCAAACTACGAACGAACCCACAGCCGCATCATAATCCTCTCTCAAGGCCTTCAAACTCTACTCCCCCTAATAGCCCTCTGATGACTTCTAGCAAGCCTCACTAACCTTGCCCTACCACCCACCATCAACCTTCTAGGAGAACTCTCCGTACTAATAGCCATATTCTCTTGATCTAACATCACCATCCTACTAACAGGACTCAACATACTAATCACAACCCTATACTCTCTCTATATATTCACCACAACACAACGAGGTACACCCACACACCACATCAACAACATAAAACCTTCTTTCACACGCGAAAATACCCTCATGCTCATACACCTATCCCCCATCCTCCTCTTATCCCTCAACCCCAGCATCATCGCTGGGTTCGCCTACACAGCCATGTTTACCA [...]
+Hylobates           AGCTTTACAGGTGCAACCGTCCTCATAATCGCCCACGGACTAACCTCTTCCCTGCTATTCTGCCTTGCAAACTCAAACTACGAACGAACTCACAGCCGCATCATAATCCTATCTCGAGGGCTCCAAGCCTTACTCCCACTGATAGCCTTCTGATGACTCGCAGCAAGCCTCGCTAACCTCGCCCTACCCCCCACTATTAACCTCCTAGGTGAACTCTTCGTACTAATGGCCTCCTTCTCCTGGGCAAACACTACTATTACACTCACCGGGCTCAACGTACTAATCACGGCCCTATACTCCCTTTACATATTTATCATAACACAACGAGGCACACTTACACACCACATTAAAAACATAAAACCCTCACTCACACGAGAAAACATATTAATACTTATGCACCTCTTCCCCCTCCTCCTCCTAACCCTCAACCCTAACATCATTACTGGCTTTACTCCCATAGCAATGTACACCA [...]
+Macaca_fuscata      AGCTTTTCCGGCGCAACCATCCTTATGATCGCTCACGGACTCACCTCTTCCATATATTTCTGCCTAGCCAATTCAAACTATGAACGCACTCACAACCGTACCATACTACTGTCCCGAGGACTTCAAATCCTACTTCCACTAACAGCCTTTTGATGATTAACAGCAAGCCTTACTAACCTTGCCCTACCCCCCACTATCAATCTACTAGGTGAACTCTTTGTAATCGCAACCTCATTCTCCTGATCCCATATCACCATTATGCTAACAGGACTTAACATATTAATTACGGCCCTCTACTCTCTCCACATATTCACTACAACACAACGAGGAACACTCACACATCACATAATCAACATAAAGCCCCCCTTCACACGAGAAAACACATTAATATTCATACACCTCGCTCCAATTATCCTTCTATCCCTCAACCCCAACATCATCCTGGGGTTTACCTCCATAATCATGCACACCC [...]
+Macaca_mulatta      AGCTTTTCTGGCGCAACCATCCTCATGATTGCTCACGGACTCACCTCTTCCATATATTTCTGCCTAGCCAATTCAAACTATGAACGCACTCACAACCGTACCATACTACTGTCCCGGGGACTTCAAATCCTACTTCCACTAACAGCTTTCTGATGATTAACAGCAAGCCTTACTAACCTTGCCCTACCCCCCACTATCAACCTACTAGGTGAACTCTTTGTAATCGCGACCTCATTCTCCTGGTCCCATATCACCATTATATTAACAGGATTTAACATACTAATTACGGCCCTCTACTCCCTCCACATATTCACCACAACACAACGAGGAGCACTCACACATCACATAATCAACATAAAACCCCCCTTCACACGAGAAAACATATTAATATTCATACACCTCGCTCCAATCATCCTCCTATCTCTCAACCCCAACATCATCCTGGGGTTTACTTCCATAATCATGCACACCC [...]
+Macaca_fascicularis AGCTTCTCCGGCGCAACCACCCTTATAATCGCCCACGGGCTCACCTCTTCCATGTATTTCTGCTTGGCCAATTCAAACTATGAGCGCACTCATAACCGTACCATACTACTATCCCGAGGACTTCAAATTCTACTTCCATTGACAGCCTTCTGATGACTCACAGCAAGCCTTACTAACCTTGCCCTACCCCCCACTATTAATCTACTAGGCGAACTCTTTGTAATCACAACTTCATTTTCCTGATCCCATATCACCATTGTGTTAACGGGCCTTAATATACTAATCACAGCCCTCTACTCTCTCCACATGTTCATTACAGTACAACGAGGAACACTCACACACCACATAATCAATATAAAACCCCCCTTCACACGAGAAAACATATTAATATTCATACACCTCGCTCCAATTATCCTTCTATCTCTCAACCCCAACATCATCCTGGGGTTTACCTCCATAATCATGCACACCC [...]
+Macaca_sylvanus     AGCTTCTCCGGTGCAACTATCCTTATAGTTGCCCATGGACTCACCTCTTCCATATACTTCTGCTTGGCCAACTCAAACTACGAACGCACCCACAGCCGCATCATACTACTATCCCGAGGACTCCAAATCCTACTCCCACTAACAGCCTTCTGATGATTCACAGCAAGCCTTACTAATCTTGCTCTACCCTCCACTATTAATCTACTGGGCGAACTCTTCGTAATCGCAACCTCATTTTCCTGATCCCACATCACCATCATACTAACAGGACTGAACATACTAATTACAGCCCTCTACTCTCTTCACATATTCACCACAACACAACGAGGAGCGCTCACACACCACATAATTAACATAAAACCACCTTTCACACGAGAAAACATATTAATACTCATACACCTCGCTCCAATTATTCTTCTATCTCTTAACCCCAACATCATTCTAGGATTTACTTCCATAATCATGTATACCC [...]
+Saimiri_sciureus    AGCTTCACCGGCGCAATGATCCTAATAATCGCTCACGGGTTTACTTCGTCTATGCTATTCTGCCTAGCAAACTCAAATTACGAACGAATTCACAGCCGAACAATAACATTTACTCGAGGGCTCCAAACACTATTCCCGCTTATAGGCCTCTGATGACTCCTAGCAAATCTCGCTAACCTCGCCCTACCCACAGCTATTAATCTAGTAGGAGAATTACTCACAATCGTATCTTCCTTCTCTTGATCCAACTTTACTATTATATTCACAGGACTTAATATACTAATTACAGCACTCTACTCACTTCATATGTATGCCTCTACACAGCGAGGTCCACTTACATACAGCACCAGCAATATAAAACCAATATTTACACGAGAAAATACGCTAATATTTATACATATAACACCAATCCTCCTCCTTACCTTGAGCCCCAAGGTAATTATAGGACCCTCACCTATA---ATACACTTCT [...]
+Tarsius_syrichta    AGTTTCATTGGAGCCACCACTCTTATAATTGCCCATGGCCTCACCTCCTCCCTATTATTTTGCCTAGCAAATACAAACTACGAACGAGTCCACAGTCGAACAATAGCACTAGCCCGTGGCCTTCAAACCCTATTACCTCTTGCAGCAACATGATGACTCCTCGCCAGCTTAACCAACCTGGCCCTTCCCCCAACAATTAATTTAATCGGTGAACTGTCCGTAATAATAGCAGCATTTTCATGGTCACACCTAACTATTATCTTAGTAGGCCTTAACACCCTTATCACCGCCCTATATTCCCTATATATACTAATCATAACTCAACGAGGAAAATACACATATCATATCAACAATATCATGCCCCCTTTCACCCGAGAAAATACATTAATAATCATACACCTATTTCCCTTAATCCTACTATCTACCAACCCCAAAGTAATTATAGGAACCATGTACATAAATTTATTTTCAT [...]
+	;
+End;
diff --git a/doc/source/examples/primates.chars.subsets-noncoding.nexus b/doc/source/examples/primates.chars.subsets-noncoding.nexus
new file mode 100644
index 0000000..b43cac2
--- /dev/null
+++ b/doc/source/examples/primates.chars.subsets-noncoding.nexus
@@ -0,0 +1,20 @@
+#NEXUS 
+
+Begin data;
+	Dimensions ntax=12 nchar=205;
+	Format datatype=dna gap=-;
+	Matrix
+Lemur_catta         ATGTAAATATAGTTTAAA-AAAACACTAGATTGTGAATCCAGAAATAGAAGCTCAAAC-CTTCTTATTTACCGAGAAAGTAATGTATGAACTGCTAACTCTGCACTCCGTATATAAAAATACGGCTATCTCAACTTTTAAAGGATAGAAGTAATCCATTGGCCTTAGGAGCCAAAAA-ATTGGTGCAACTCCAAATAAAAGTATT
+Homo_sapiens        ATGTAAATATAGTTTAACCAAAACATCAGATTGTGAATCTGACAACAGAGGCTTA-CGACCCCTTATTTACCGAGAAAGCT-CACAAGAACTGCTAACTCATGCCCCCATGTCTAACAACATGGCTTTCTCAACTTTTAAAGGATAACAGCTATCCATTGGTCTTAGGCCCCAAAAATTTTGGTGCAACTCCAAATAAAAGTATT
+Pan                 ATGTAAATATAGTTTAACCAAAACATCAGATTGTGAATCTGACAACAGAGGCTCA-CGACCCCTTATTTACCGAGAAAGCT-TATAAGAACTGCTAATTCATATCCCCATGCCTGACAACATGGCTTTCTCAACTTTTAAAGGATAACAGCCATCCGTTGGTCTTAGGCCCCAAAAATTTTGGTGCAACTCCAAATAAAAGTATT
+Gorilla             ATGTAAATATAGTTTAACCAAAACATCAGATTGTGAATCTGATAACAGAGGCTCA-CAACCCCTTATTTACCGAGAAAGCT-CGTAAGAGCTGCTAACTCATACCCCCGTGCTTGACAACATGGCTTTCTCAACTTTTAAAGGATAACAGCTATCCATTGGTCTTAGGACCCAAAAATTTTGGTGCAACTCCAAATAAAAGTATT
+Pongo               ATGTAAATATAGTTTAACCAAAACATTAGATTGTGAATCTAATAATAGGGCCCCA-CAACCCCTTATTTACCGAGAAAGCT-CACAAGAACTGCTAACTCTCACT-CCATGTGTGACAACATGGCTTTCTCAGCTTTTAAAGGATAACAGCTATCCCTTGGTCTTAGGATCCAAAAATTTTGGTGCAACTCCAAATAAAAGTATT
+Hylobates           ATGTAAACATAGTTTAATCAAAACATTAGATTGTGAATCTAACAATAGAGGCTCG-AAACCTCTTGCTTACCGAGAAAGCC-CACAAGAACTGCTAACTCACTATCCCATGTATGACAACATGGCTTTCTCAACTTTTAAAGGATAACAGCTATCCATTGGTCTTAGGACCCAAAAATTTTGGTGCAACTCCAAATAAAAGTATT
+Macaca_fuscata      ATGTAGATATAGTTTAACTAAAACACTAGATTGTGAATCTAACCATAGAGACTCA-CCACCTCTTATTTACCGAGAAAACT-CGCAAGGACTGCTAACCCATGTACCCGTACCTAAAATTACGGTTTTCTCAACTTTTAAAGGATAACAGCTATCCATTGACCTTAGGAGTCAAAAACATTGGTGCAACTCCAAATAAAAGTATT
+Macaca_mulatta      ATGTAGATATAGTTTAACTAAAACATTAGATTGTGAATCTAACCATAGAGACTTA-CCACCTCTTATTTACCGAGAAAACT-CGCGAGGACTGCTAACCCATGTATCCGTACCTAAAATTACGGTTTTCTCAACTTTTAAAGGATAACAGCTATCCATTGACCTTAGGAGTCAAAAATATTGGTGCAACTCCAAATAAAAGTATT
+Macaca_fascicularis ATGTAAATATAGTTTAACTAAAACATTAGATTGTGAATCTAACTATAGAGGCCTA-CCACTTCTTATTTACCGAGAAAACT-CGCAAGGACTGCTAATCCATGCCTCCGTACTTAAAACTACGGTTTCCTCAACTTTTAAAGGATAACAGCTATCCATTGACCTTAGGAGTCAAAAACATTGGTGCAACTCCAAATAAAAGTATT
+Macaca_sylvanus     ATGTAAATATAGTTTAATTAAAACATTAGACTGTGAATCTAACTATAGAAGCTTA-CCACTTCTTATTTACCGAGAAAACT-TGCAAGGACCGCTAATCCACACCTCCGTACTTAAAACTACGGTTTTCTCAACTTTTAAAGGATAACAGCTATCCATTGGCCTTAGGAGTCAAAAATATTGGTGCAACTCCAAATAAAAGTATT
+Saimiri_sciureus    ATGTAATTATAGTTTAGCTAAAACATTAGATTGTGAATCTAATAATAGAAGAATA-TAACTTCTTAATTACCGAGAAAGTG-CGCAAGAACTGCTAATTCATGCTCCCAAGACTAACAACTTGGCTTCCTCAACTTTTAAAGGATAGTAGTTATCCATTGGTCTTAGGAGCCAAAAACATTGGTGCAACTCCAAATAAAAGTATT
+Tarsius_syrichta    ATGTAAATATAGTTTAAACAAAACATTAGATTGTGAGTCTAATAATAGAAGCCCAAAGATTTCTTATTTACCAAGAAAGTA-TGCAAGAACTGCTAACTCATGCCTCCATATATAACAATGTGGCTTTCTT-ACTTTTAAAGGATAGAAGTAATCCATCGGTCTTAGGAACCGAAAA-ATTGGTGCAACTCCAAATAAAAGTATT
+	;
+End;
diff --git a/doc/source/examples/prune_taxa_with_labels.py b/doc/source/examples/prune_taxa_with_labels.py
new file mode 100644
index 0000000..713ccd8
--- /dev/null
+++ b/doc/source/examples/prune_taxa_with_labels.py
@@ -0,0 +1,17 @@
+#! /usr/bin/env python
+
+import dendropy
+
+tree_str = "[&R] ((A, (B, (C, (D, E)))),(F, (G, H)));"
+
+tree = dendropy.Tree.get(
+        data=tree_str,
+        schema="newick")
+print("Before:")
+print(tree.as_string(schema='newick'))
+print(tree.as_ascii_plot())
+tree.prune_taxa_with_labels(["A", "C", "G"])
+print("After:")
+print(tree.as_string(schema='newick'))
+print(tree.as_ascii_plot())
+
diff --git a/doc/source/examples/pure_kingman1.py b/doc/source/examples/pure_kingman1.py
new file mode 100644
index 0000000..51c535b
--- /dev/null
+++ b/doc/source/examples/pure_kingman1.py
@@ -0,0 +1,11 @@
+#! /usr/bin/env python
+
+import dendropy
+from dendropy.simulate import treesim
+
+taxa = dendropy.TaxonNamespace(["z1", "z2", "z3", "z4", "z5", "z6", "z7", "z8"])
+tree = treesim.pure_kingman_tree(
+        taxon_namespace=taxa,
+        pop_size=10000)
+print(tree.as_string(schema="newick"))
+print(tree.as_ascii_plot())
diff --git a/doc/source/examples/pythonidae.beast-mcmc.trees b/doc/source/examples/pythonidae.beast-mcmc.trees
new file mode 100644
index 0000000..8f8bcb5
--- /dev/null
+++ b/doc/source/examples/pythonidae.beast-mcmc.trees
@@ -0,0 +1,1079 @@
+#NEXUS
+
+Begin taxa;
+	Dimensions ntax=33;
+	Taxlabels
+		'Antaresia childreni'
+		'Antaresia maculosa'
+		'Antaresia melanocephalus'
+		'Antaresia perthensis'
+		'Antaresia ramsayi'
+		'Antaresia stimsoni'
+		'Apodora papuana'
+		'Bothrochilus boa'
+		'Candoia aspera'
+		'Liasis albertisii'
+		'Liasis fuscus'
+		'Liasis mackloti'
+		'Liasis olivaceus'
+		'Loxocemus bicolor'
+		'Morelia amethistina'
+		'Morelia boeleni'
+		'Morelia bredli'
+		'Morelia carinata'
+		'Morelia clastolepis'
+		'Morelia kinghorni'
+		'Morelia nauta'
+		'Morelia oenpelliensis'
+		'Morelia spilota'
+		'Morelia tracyae'
+		'Morelia viridisN'
+		'Morelia viridisS'
+		'Python curtus'
+		'Python molurus'
+		'Python regius'
+		'Python reticulatus'
+		'Python sebae'
+		'Python timoriensis'
+		'Xenopeltis unicolor'
+		;
+End;
+
+Begin trees;
+	Translate
+		1 'Antaresia childreni',
+		2 'Antaresia maculosa',
+		3 'Antaresia melanocephalus',
+		4 'Antaresia perthensis',
+		5 'Antaresia ramsayi',
+		6 'Antaresia stimsoni',
+		7 'Apodora papuana',
+		8 'Bothrochilus boa',
+		9 'Candoia aspera',
+		10 'Liasis albertisii',
+		11 'Liasis fuscus',
+		12 'Liasis mackloti',
+		13 'Liasis olivaceus',
+		14 'Loxocemus bicolor',
+		15 'Morelia amethistina',
+		16 'Morelia boeleni',
+		17 'Morelia bredli',
+		18 'Morelia carinata',
+		19 'Morelia clastolepis',
+		20 'Morelia kinghorni',
+		21 'Morelia nauta',
+		22 'Morelia oenpelliensis',
+		23 'Morelia spilota',
+		24 'Morelia tracyae',
+		25 'Morelia viridisN',
+		26 'Morelia viridisS',
+		27 'Python curtus',
+		28 'Python molurus',
+		29 'Python regius',
+		30 'Python reticulatus',
+		31 'Python sebae',
+		32 'Python timoriensis',
+		33 'Xenopeltis unicolor'
+		;
+tree STATE_0 [&lnP=-44525.18904166426] = [&R] (((((((((1:[&rate=7.813313919960342E-4]3.821032667225258,(19:[&rate=8.156721432760262E-4]1.0980939899093431,14:[&rate=8.342959841736593E-4]1.0980939899093431):[&rate=0.0010008786783363328]2.7229386773159145):[&rate=0.0010048187869511436]10.479510512577459,((((3:[&rate=8.478081834140964E-4]1.4764984794753326,4:[&rate=8.586904710144556E-4]1.4764984794753326):[&rate=0.001008789743818317]3.5696431175846692,(32:[&rate=8.679506839704271E-4]1.496536 [...]
+tree STATE_10000 [&lnP=-23798.475647749827] = [&R] ((9:[&rate=0.002587292552327461]81.29742222462428,(((((15:[&rate=0.0026602804977152025]21.090100877929007,(23:[&rate=0.0023075599540264783]17.7035654612959,(22:[&rate=0.0034480138517963745]15.079093688400201,17:[&rate=0.0022730306200335632]15.079093688400201):[&rate=0.002623546715670542]2.6244717728956974):[&rate=0.0029800247491623974]3.3865354166331088):[&rate=0.00220359811325661]15.11803368628745,((18:[&rate=0.0019081053726881265]27.79 [...]
+tree STATE_20000 [&lnP=-22805.468029114094] = [&R] (9:[&rate=0.001973721613397192]109.37266383291771,(((((((((11:[&rate=0.0021268438991882033]10.994337153481313,12:[&rate=0.0019615055367387954]10.994337153481313):[&rate=0.002029219389099665]18.93621301462366,(7:[&rate=0.0028332893029337394]21.376539476192505,13:[&rate=0.0018900333750611837]21.376539476192505):[&rate=0.002007926065619389]8.554010691912467):[&rate=0.0018900333750611837]4.827729539394369,(3:[&rate=0.002203311346256093]16.96 [...]
+tree STATE_30000 [&lnP=-22443.221371319945] = [&R] (9:[&rate=0.0015841470945638424]157.75007677339667,((14:[&rate=0.002696739850443594]93.18994023904179,((((((((11:[&rate=0.0021932262963005583]8.15449187544126,12:[&rate=0.002077775503850595]8.15449187544126):[&rate=0.001658757926633238]24.679296470612563,(7:[&rate=0.0025654122270598614]24.30442032890907,13:[&rate=0.0018115850694915225]24.30442032890907):[&rate=0.0017899345784732046]8.529368017144751):[&rate=0.0016894390842042315]6.393092 [...]
+tree STATE_40000 [&lnP=-22403.34423422354] = [&R] (9:[&rate=0.0029644229355470656]117.4054827309376,(33:[&rate=0.0024521766219901156]90.5537014031759,(((((16:[&rate=0.002465536986188385]36.549848474152796,((((11:[&rate=0.0026721988245245205]6.292961520267456,12:[&rate=0.002465536986188385]6.292961520267456):[&rate=0.0023963574361747205]18.791164043705834,(7:[&rate=0.003337811797269114]20.432466700417887,13:[&rate=0.0025175543361175747]20.432466700417887):[&rate=0.002645610758205654]4.651 [...]
+tree STATE_50000 [&lnP=-22327.71929086986] = [&R] (9:[&rate=0.0022395150296089634]126.15974930714476,((14:[&rate=0.00297167403399309]76.2142576718422,((((16:[&rate=0.0023626625528740523]36.511915421269514,(((3:[&rate=0.0023125645374332615]15.107432952090326,5:[&rate=0.0027654458481722903]15.107432952090326):[&rate=0.0023626625528740523]17.772328432043395,((11:[&rate=0.0028253548135480956]7.571134709204523,12:[&rate=0.002378781470657024]7.571134709204523):[&rate=0.002277176979159128]20.82 [...]
+tree STATE_60000 [&lnP=-22287.153051092297] = [&R] (9:[&rate=0.002958496987838825]124.56418651612026,((((((((23:[&rate=0.0027979144068101137]11.31861283681363,17:[&rate=0.002246654649477568]11.31861283681363):[&rate=0.0025076268331979405]12.019116965760515,(22:[&rate=0.003230282280223132]21.08035600936676,(24:[&rate=0.003438794701529451]14.15015821850061,((21:[&rate=0.00256578632521623]6.117300729913947,(20:[&rate=0.003277549629679835]4.043558769224742,19:[&rate=0.002642894978876736]4.04 [...]
+tree STATE_70000 [&lnP=-22270.30664008203] = [&R] (9:[&rate=0.003818171114823852]95.85024416463715,(33:[&rate=0.0034854440834527233]73.97607967127202,(14:[&rate=0.0037225099672688606]64.44656368938307,(((((((23:[&rate=0.0034204555907127463]9.027207085785982,17:[&rate=0.0035364192336117275]9.027207085785982):[&rate=0.0034068815507448695]8.292645171130186,(24:[&rate=0.0036112575162542547]10.61821612579086,((19:[&rate=0.003699625917873131]2.867174600669231,(21:[&rate=0.0035903197550050023]2 [...]
+tree STATE_80000 [&lnP=-22212.920566991284] = [&R] (9:[&rate=0.0028906072961344055]126.14794341902932,((((((16:[&rate=0.002412795402659326]34.687861353341006,((8:[&rate=0.003361719184092607]21.882910305785682,10:[&rate=0.002727895617947268]21.882910305785682):[&rate=0.0036893445213264263]10.568537280705389,((3:[&rate=0.0033935558822111482]13.158822872488981,5:[&rate=0.0028300085271494484]13.158822872488981):[&rate=0.0025738227948206606]17.17577242082676,((11:[&rate=0.00230655037338062]6. [...]
+tree STATE_90000 [&lnP=-22218.230643840416] = [&R] (9:[&rate=0.002291647318917423]146.77005485244942,((14:[&rate=0.0031147493945619213]83.31355518895859,((((((18:[&rate=0.002742967031089654]25.145415445218514,(26:[&rate=0.0026932594630090434]14.566531774989539,25:[&rate=0.0030334672814004214]14.566531774989539):[&rate=0.002291647318917423]10.578883670228976):[&rate=0.0029598539208833996]4.822327361432123,(2:[&rate=0.003396961437084882]25.38769454697333,((1:[&rate=0.0024882949215091505]6. [...]
+tree STATE_100000 [&lnP=-22220.174921451224] = [&R] ((33:[&rate=0.0014618717701735822]140.08162797082414,(14:[&rate=0.0018892865434001645]114.05816691455738,((((((18:[&rate=0.0018636115370228115]31.02351575016931,(26:[&rate=0.002322477661202314]18.258036888417074,25:[&rate=0.002322477661202314]18.258036888417074):[&rate=0.0021709810795611066]12.765478861752236):[&rate=0.0023619984850281207]7.533117521350263,(2:[&rate=0.002322477661202314]34.37105456782485,((1:[&rate=0.002184048863692479] [...]
+tree STATE_110000 [&lnP=-22198.760340159693] = [&R] ((33:[&rate=0.0025528866552113733]102.71407937517,(14:[&rate=0.003172456564236317]82.00707886367954,(((((16:[&rate=0.0033975804778201666]25.613925925255685,(8:[&rate=0.004736045929507425]12.654517902274618,10:[&rate=0.004736045929507425]12.654517902274618):[&rate=0.003899245588771143]12.959408022981068):[&rate=0.0027013839600628716]3.4944065444440007,((3:[&rate=0.004074090808244449]8.95680158358202,5:[&rate=0.004074090808244449]8.956801 [...]
+tree STATE_120000 [&lnP=-22216.694826504354] = [&R] ((14:[&rate=0.002650015386118206]100.53265986885305,(((((16:[&rate=0.0025359822328770284]32.87664411010843,(((3:[&rate=0.002820059227303037]12.501431608986481,5:[&rate=0.0025359822328770284]12.501431608986481):[&rate=0.0032967550944477623]16.248508011185045,((11:[&rate=0.003246317862247342]6.444982595258955,12:[&rate=0.0029104857078926464]6.444982595258955):[&rate=0.0028717248115064766]18.145897620068034,(7:[&rate=0.0031448708585633143] [...]
+tree STATE_130000 [&lnP=-22217.334277745606] = [&R] ((33:[&rate=0.0022310795574612616]105.98594335127632,(14:[&rate=0.0023946087928769714]94.4599047783933,((((16:[&rate=0.002441172504916555]41.98484555515172,(((3:[&rate=0.0019971580683051772]16.379130047822176,5:[&rate=0.0025869843014017907]16.379130047822176):[&rate=0.0023184785600080552]19.462609513385853,((11:[&rate=0.002403892854476761]8.898199353229726,12:[&rate=0.0024991684016875478]8.898199353229726):[&rate=0.0022310795574612616]2 [...]
+tree STATE_140000 [&lnP=-22215.793737380256] = [&R] ((33:[&rate=0.002152908460442245]140.42905849380938,(14:[&rate=0.002541742198356325]112.33806805755354,((((((23:[&rate=0.002343316185209707]12.55615280526506,17:[&rate=0.002285698729925499]12.55615280526506):[&rate=0.0026491323974814013]13.909496783482872,(22:[&rate=0.0025655206200484847]24.73497628492939,(24:[&rate=0.0027212930835672253]16.828448477272133,(15:[&rate=0.001973542480901908]10.24392963899461,((20:[&rate=0.00245927645889955 [...]
+tree STATE_150000 [&lnP=-22211.141648074907] = [&R] ((33:[&rate=0.0033162264082199537]90.8645967106229,((((((8:[&rate=0.0033976131216234143]18.84690504987591,10:[&rate=0.003358661800434368]18.84690504987591):[&rate=0.0032553887152133305]11.288489800060233,(((3:[&rate=0.003358661800434368]11.283659415735181,5:[&rate=0.003262744325320284]11.283659415735181):[&rate=0.00350929969528222]15.758435838742743,((11:[&rate=0.0033082742015016833]6.240097358599602,12:[&rate=0.0031075628559107523]6.24 [...]
+tree STATE_160000 [&lnP=-22213.320025278495] = [&R] ((33:[&rate=0.0035157021892190626]73.215861171187,(14:[&rate=0.0033915399954290563]63.93668825484818,((((((8:[&rate=0.003753124753766342]16.100925145807775,10:[&rate=0.003148380896688705]16.100925145807775):[&rate=0.003602530517694449]11.973645062696296,((3:[&rate=0.003476816697825334]11.225621756137112,5:[&rate=0.003591002642078195]11.225621756137112):[&rate=0.0030168808096415495]15.217145995974338,((11:[&rate=0.0033915399954290563]6.5 [...]
+tree STATE_170000 [&lnP=-22218.592639736507] = [&R] ((33:[&rate=0.0023187563631270262]114.89217777296408,(((((((3:[&rate=0.002003058627496098]19.14518783743251,5:[&rate=0.0022737265336566998]19.14518783743251):[&rate=0.0023804682146822513]19.733808134042633,(7:[&rate=0.0026422709084548347]32.43925970416962,((11:[&rate=0.0024802713339847774]7.0150536164718496,12:[&rate=0.002902244072853671]7.0150536164718496):[&rate=0.0024627755807631897]17.785657198523644,13:[&rate=0.002229058996143988]2 [...]
+tree STATE_180000 [&lnP=-22225.6102525927] = [&R] (((33:[&rate=0.002195589525828242]119.14030017001468,((((((23:[&rate=0.0020857390487689915]13.273416862511308,17:[&rate=0.002240985142773187]13.273416862511308):[&rate=0.002234334735310431]15.890057220370501,((24:[&rate=0.0021690944636781192]18.38022718515574,(((20:[&rate=0.0022444427787333683]5.005839367994753,21:[&rate=0.0022311209790925246]5.005839367994753):[&rate=0.002141863629659682]1.316627933829741,19:[&rate=0.0021595467418212655] [...]
+tree STATE_190000 [&lnP=-22216.99059106544] = [&R] ((33:[&rate=0.0024121176594219303]113.71400761406589,(((((((23:[&rate=0.002443851173668381]12.82141288206584,17:[&rate=0.0023272135815478085]12.82141288206584):[&rate=0.0022586480453117936]12.117379194373122,((24:[&rate=0.0029009207763769904]14.638246550911228,(((20:[&rate=0.0021620776173843772]3.1112892263896312,21:[&rate=0.0022336495199034336]3.1112892263896312):[&rate=0.002454548965982883]2.4566025626178747,19:[&rate=0.002162077617384 [...]
+tree STATE_200000 [&lnP=-22217.073009285024] = [&R] (((14:[&rate=0.0027706900202125236]106.36674712300545,((((((23:[&rate=0.0025308683034001313]12.44314281455364,17:[&rate=0.0018072296804894348]12.44314281455364):[&rate=0.0019907435814274553]15.164300014136641,(22:[&rate=0.0023791752279916866]24.34694866173409,(15:[&rate=0.002358715743541939]15.280795569988147,(24:[&rate=0.0028375789258609915]13.4585160399611,(19:[&rate=0.0019676505805759476]3.982879009141336,(21:[&rate=0.002602981067108 [...]
+tree STATE_210000 [&lnP=-22227.808543064253] = [&R] (((((((((24:[&rate=0.0020679897535962276]17.470379122482715,((19:[&rate=0.0018451493023379428]3.922283998514288,(20:[&rate=0.001955583363794081]3.4547810108574217,21:[&rate=0.002103693426565171]3.4547810108574217):[&rate=0.002481727332572326]0.46750298765686615):[&rate=0.002291569391000023]9.289873522801024,15:[&rate=0.0022012298989772397]13.212157521315312):[&rate=0.002156312418152963]4.258221601167403):[&rate=0.002085983735712439]10.3 [...]
+tree STATE_220000 [&lnP=-22205.14956628116] = [&R] (((33:[&rate=0.0038503444481631528]61.24709780323468,(((((((3:[&rate=0.0035719146800461655]11.348244884147578,5:[&rate=0.0034600964033550505]11.348244884147578):[&rate=0.004424559690266329]12.084043567703839,((11:[&rate=0.0048945007310201415]2.968214390845333,12:[&rate=0.00541900240744392]2.968214390845333):[&rate=0.004001894254330058]15.764089744118522,(7:[&rate=0.004754544824518477]16.494160670985604,13:[&rate=0.002825133100086872]16.4 [...]
+tree STATE_230000 [&lnP=-22227.848892135095] = [&R] ((((((((8:[&rate=0.0025566017656347344]28.364390903195783,10:[&rate=0.0025529989535899985]28.364390903195783):[&rate=0.0025482484398040186]10.44128259602639,(((3:[&rate=0.002557515945345144]12.641393650167082,5:[&rate=0.00254644298166747]12.641393650167082):[&rate=0.0025497239261802174]19.929815765422312,((11:[&rate=0.002557843653828441]6.892755795007335,12:[&rate=0.0025585454359803132]6.892755795007335):[&rate=0.0025534838075066037]20. [...]
+tree STATE_240000 [&lnP=-22215.295930019507] = [&R] ((((((((((3:[&rate=0.0024114063560463405]17.04313222413649,5:[&rate=0.002314912144449185]17.04313222413649):[&rate=0.002314912144449185]20.53670255795878,((11:[&rate=0.003419120465196053]7.08263489411465,12:[&rate=0.0026376856664669735]7.08263489411465):[&rate=0.002427590331965292]24.545146111715486,(7:[&rate=0.002657628610687249]24.67608774254433,13:[&rate=0.0021617383759205346]24.67608774254433):[&rate=0.00279350616569777]6.9516932632 [...]
+tree STATE_250000 [&lnP=-22215.42462803647] = [&R] (9:[&rate=0.0037594364968165134]102.58619971390505,((14:[&rate=0.004329151487339889]62.82939207060808,(((((((23:[&rate=0.004355780862092784]7.514405813878033,17:[&rate=0.004942989786608167]7.514405813878033):[&rate=0.0033259137978919745]8.94168171931234,(22:[&rate=0.0038123812805459056]14.901384025606454,(24:[&rate=0.004277285738925216]8.583552048603366,((19:[&rate=0.003426779378461903]3.342888277429036,(20:[&rate=0.003888325786996666]3. [...]
+tree STATE_260000 [&lnP=-22208.660538139393] = [&R] (9:[&rate=0.0032243909543156948]105.77043363970056,(33:[&rate=0.0032243909543156948]82.36790888224718,(14:[&rate=0.003135233909393846]68.5075171171226,(((((22:[&rate=0.0030924285864175147]22.215670869550944,((23:[&rate=0.0028212521496309424]10.529896149794087,17:[&rate=0.0029676109952780933]10.529896149794087):[&rate=0.0030088815146496707]8.178757129435287,((15:[&rate=0.0030924285864175147]7.142244997564739,(21:[&rate=0.0028637966382108 [...]
+tree STATE_270000 [&lnP=-22224.654863471307] = [&R] (9:[&rate=0.002497175474698335]149.84617197431035,(33:[&rate=0.0022964553379171684]110.53645729691729,(14:[&rate=0.002658391754078189]86.68106008943766,((((16:[&rate=0.002394991942733317]44.80628835418556,(((3:[&rate=0.0028709369474891845]16.433667891950908,5:[&rate=0.0022213089707617283]16.433667891950908):[&rate=0.0026132633230010072]19.097943195172082,(7:[&rate=0.0026351550972565415]29.93849831551541,((11:[&rate=0.0025340937009354467 [...]
+tree STATE_280000 [&lnP=-22223.196763987675] = [&R] (9:[&rate=0.002935476125732373]114.19893119565387,(33:[&rate=0.0026610468462899555]90.20522789897831,(14:[&rate=0.002629448695287838]80.26453395765546,((((16:[&rate=0.0024348120979521656]37.133462880284746,(((3:[&rate=0.0019049645078592346]12.88801933004509,5:[&rate=0.0027453379394398697]12.88801933004509):[&rate=0.002763441356150118]20.75957436291793,(8:[&rate=0.0028013425355075273]18.2940370647684,10:[&rate=0.0036706562430703993]18.29 [...]
+tree STATE_290000 [&lnP=-22228.211042286388] = [&R] (9:[&rate=0.002210344295549751]153.71553276599016,((14:[&rate=0.001980382633813809]107.76173767551964,((((16:[&rate=0.0018364035640045645]49.30945458475419,(((3:[&rate=0.0022341619763982757]17.61421799212239,5:[&rate=0.002170058752612417]17.61421799212239):[&rate=0.0020903771133866096]23.469639032680718,((11:[&rate=0.0017619363839648514]7.922832617635707,12:[&rate=0.002021646406698469]7.922832617635707):[&rate=0.002119994014182057]28.42 [...]
+tree STATE_300000 [&lnP=-22216.109058854076] = [&R] (9:[&rate=0.002263433936857291]147.2235951442027,(14:[&rate=0.0025466867681592084]109.20475797063983,((((((8:[&rate=0.0025733659091911826]25.78031952038859,10:[&rate=0.0025336338498303623]25.78031952038859):[&rate=0.002832273037858825]14.02842612313243,((3:[&rate=0.0019177259874135104]20.023853745511946,5:[&rate=0.002201033387703482]20.023853745511946):[&rate=0.0027096890277344956]16.558269053965798,((11:[&rate=0.0029171703465245623]7.3 [...]
+tree STATE_310000 [&lnP=-22209.089393725848] = [&R] (9:[&rate=0.004002824217107025]87.52200847868252,(33:[&rate=0.003302313541138282]69.93334141247175,(14:[&rate=0.004635812555775996]59.05637334927789,(((((8:[&rate=0.004052215778526941]16.251071497596524,10:[&rate=0.0039567147790034955]16.251071497596524):[&rate=0.003717986686056745]11.512314605586461,((3:[&rate=0.0037551285651999415]10.824553866260818,5:[&rate=0.003302313541138282]10.824553866260818):[&rate=0.0036081897140625976]13.1631 [...]
+tree STATE_320000 [&lnP=-22211.324577535113] = [&R] (9:[&rate=0.003760574596957784]121.0014200837589,(33:[&rate=0.0022940184568047343]102.81217582466505,(((((((22:[&rate=0.002237702612424592]25.427792931378946,((15:[&rate=0.001708502720872506]11.486148544502473,(19:[&rate=0.0032877228251959037]3.881475080530376,(20:[&rate=0.0032504518600143598]2.4675040528658236,21:[&rate=0.002911812532776156]2.4675040528658236):[&rate=0.0026316896916557483]1.4139710276645525):[&rate=0.002959651460529089 [...]
+tree STATE_330000 [&lnP=-22221.762504759776] = [&R] (9:[&rate=0.002854538138488399]120.07426079800865,((33:[&rate=0.0029958709614649583]83.60414538570045,14:[&rate=0.002793901190464077]83.60414538570045):[&rate=0.003061830308748822]11.025018932248472,(((((22:[&rate=0.002494033094645822]24.05502693592182,((23:[&rate=0.003247488688435253]6.8336989685714356,17:[&rate=0.003348871149559326]6.8336989685714356):[&rate=0.0028704546592179464]15.142902772764081,((15:[&rate=0.002524909070289808]11. [...]
+tree STATE_340000 [&lnP=-22215.96067422495] = [&R] (9:[&rate=0.0036294116095136704]111.12981392001731,((33:[&rate=0.003091971771953279]68.81072985375968,14:[&rate=0.003056312277157044]68.81072985375968):[&rate=0.0035100809466775632]5.565697172987598,((((((23:[&rate=0.0035321538485815376]8.67675048175943,17:[&rate=0.0030196004820502603]8.67675048175943):[&rate=0.002941322995736869]12.515288192727155,(22:[&rate=0.0027716949698488955]18.035417185763595,((15:[&rate=0.003555005695629599]7.480 [...]
+tree STATE_350000 [&lnP=-22211.65733718958] = [&R] (9:[&rate=0.0029604239866502986]128.44940508548015,((14:[&rate=0.0030539959688720966]75.57301122365124,((((((23:[&rate=0.0031611199038086785]8.346785830980634,17:[&rate=0.003012875712813023]8.346785830980634):[&rate=0.0024625950869271757]12.468806803407839,(((15:[&rate=0.0031283370003504676]9.263992824285875,((19:[&rate=0.003512780827178205]2.7729812729136696,20:[&rate=0.003097501015445489]2.7729812729136696):[&rate=0.0031444536354718637 [...]
+tree STATE_360000 [&lnP=-22226.68157790626] = [&R] (9:[&rate=0.0024290758142819385]163.7157287536476,((14:[&rate=0.0024290758142819385]106.92147334407417,((((((23:[&rate=0.0017631930138312982]18.60239668191336,17:[&rate=0.0018899558149027239]18.60239668191336):[&rate=0.0020324542221829595]9.978938524618602,(22:[&rate=0.0023378372491542248]26.71201438512346,((15:[&rate=0.0024290758142819385]13.702586993431085,(21:[&rate=0.0019597255576636217]3.7663116008001665,(20:[&rate=0.002482574146701 [...]
+tree STATE_370000 [&lnP=-22206.47355370548] = [&R] (9:[&rate=0.0024454910627839544]127.99687053914681,((33:[&rate=0.0028429786333350705]78.4071305384393,((((16:[&rate=0.0027653165340474455]33.89838946475689,((((11:[&rate=0.00323790417700433]5.985626306746782,12:[&rate=0.0026145221463916694]5.985626306746782):[&rate=0.0028429786333350705]18.966804194515323,(7:[&rate=0.0033616585753011817]21.000379515339823,13:[&rate=0.0024454910627839544]21.000379515339823):[&rate=0.0035534547911435675]3. [...]
+tree STATE_380000 [&lnP=-22233.565617586475] = [&R] (9:[&rate=0.0014595811128089928]218.1894258284716,(((((((((23:[&rate=0.002028119623738656]11.93970752006327,17:[&rate=0.002642081540782292]11.93970752006327):[&rate=0.0024178163091714325]17.48930536614777,(22:[&rate=0.0016490925322420742]28.310521558851434,((15:[&rate=0.001729637285276902]11.423777462531957,(21:[&rate=0.0023730231894004954]3.230540821325229,(20:[&rate=0.0027001200374208545]1.6865815830824786,19:[&rate=0.0031660779291459 [...]
+tree STATE_390000 [&lnP=-22221.333116639584] = [&R] (9:[&rate=0.0025020078126158996]140.23732570406148,((14:[&rate=0.0027282775360986857]82.32041484528355,((((((8:[&rate=0.0027435320053300683]22.757662722307803,10:[&rate=0.00271353035046402]22.757662722307803):[&rate=0.00271353035046402]14.463708566013178,((3:[&rate=0.002478815810881562]12.74900401769898,5:[&rate=0.0023289239583834543]12.74900401769898):[&rate=0.0023428437771676955]23.160548132774878,((11:[&rate=0.0024904257112851004]6.6 [...]
+tree STATE_400000 [&lnP=-22216.745781299924] = [&R] (9:[&rate=0.002152163126230145]163.92535157961316,((14:[&rate=0.0021623669257549365]114.04493656771771,((((((18:[&rate=0.0023139326917238218]31.18126989667569,(26:[&rate=0.002379346832252043]18.873347433936047,25:[&rate=0.00257746468064091]18.873347433936047):[&rate=0.0022902675342128757]12.307922462739644):[&rate=0.0022565396784111538]7.254316101671915,(2:[&rate=0.0021828165761785963]33.62087901617594,((1:[&rate=0.0027768198335460484]7 [...]
+tree STATE_410000 [&lnP=-22219.538578929325] = [&R] (9:[&rate=0.002632500764524288]142.11127901871154,(33:[&rate=0.002260541540699198]106.91273199931467,(((((((3:[&rate=0.0025875011543849175]17.178574958696736,5:[&rate=0.001562411487333851]17.178574958696736):[&rate=0.002903361398094471]17.504709257169793,((11:[&rate=0.0020541510539929466]9.427010877486593,12:[&rate=0.0017793792524054392]9.427010877486593):[&rate=0.0021896680465119775]21.490372112996518,(7:[&rate=0.002423607058076795]26. [...]
+tree STATE_420000 [&lnP=-22218.320583949655] = [&R] (9:[&rate=0.002139631338973317]153.73700202782862,(14:[&rate=0.0024084254834461864]108.3802564099649,((((((8:[&rate=0.002912329601773311]24.55312041856651,10:[&rate=0.002620503655414562]24.55312041856651):[&rate=0.0029354667065767306]15.401658438790317,((3:[&rate=0.0024363309921108206]16.182896012699253,5:[&rate=0.002169443898522559]16.182896012699253):[&rate=0.002987151057841835]19.46294509617268,((11:[&rate=0.0024767390954147663]7.954 [...]
+tree STATE_430000 [&lnP=-22209.190190744877] = [&R] (9:[&rate=0.004221705828425102]89.27071975978346,((14:[&rate=0.003669291855473712]66.44767985059387,((((((18:[&rate=0.0032922463383381704]22.76672471482093,(26:[&rate=0.004668259788004618]12.107086714259646,25:[&rate=0.0034416721939272456]12.107086714259646):[&rate=0.0034188285792356938]10.659638000561285):[&rate=0.0032324786727469567]3.152213420766259,(2:[&rate=0.00348575031691197]20.44712605561893,((1:[&rate=0.004027641169039265]6.605 [...]
+tree STATE_440000 [&lnP=-22209.51221207074] = [&R] ((33:[&rate=0.002248002757444377]108.93513214842855,(14:[&rate=0.0024109505686581655]91.65248438034473,((((((18:[&rate=0.002310509550335723]27.89405188226948,(26:[&rate=0.0029783296651726045]14.543617302348752,25:[&rate=0.0030311024013039167]14.543617302348752):[&rate=0.0026982392442508165]13.350434579920728):[&rate=0.002510211711710358]5.086283764319866,(2:[&rate=0.0028031594896634022]29.69453521437394,((1:[&rate=0.0030311024013039167]7 [...]
+tree STATE_450000 [&lnP=-22217.77482781918] = [&R] ((33:[&rate=0.002129821967857161]122.03465061221355,(((((((18:[&rate=0.002303992911422396]32.56772693692885,(26:[&rate=0.0032817434424233603]15.382696411556598,25:[&rate=0.0031678384442020324]15.382696411556598):[&rate=0.002266768082822498]17.18503052537225):[&rate=0.002303992911422396]2.5298540065983133,(2:[&rate=0.0024661593934270382]27.692365744719506,((1:[&rate=0.0028043641495891737]9.080961631990368,6:[&rate=0.0017928369689776521]9. [...]
+tree STATE_460000 [&lnP=-22217.394610564297] = [&R] ((33:[&rate=0.002207885193856712]103.51990125340897,(14:[&rate=0.0030710033493150757]88.2140964938815,((((((18:[&rate=0.0023057386794222094]29.304087486258677,(26:[&rate=0.002952797819941438]17.08645498682275,25:[&rate=0.002290618454413313]17.08645498682275):[&rate=0.0023057386794222094]12.217632499435926):[&rate=0.0028437301888043992]3.8835608598588927,(2:[&rate=0.002610997182875965]28.9777512256239,((1:[&rate=0.0023057386794222094]8.1 [...]
+tree STATE_470000 [&lnP=-22218.701833701147] = [&R] ((33:[&rate=0.003636445999416438]65.9787134297128,((((((((8:[&rate=0.005862631028468843]11.756283791024762,10:[&rate=0.005090814995748244]11.756283791024762):[&rate=0.00598507149107092]9.628476367095525,(3:[&rate=0.004428621985672304]8.692533702537343,5:[&rate=0.0037002130895215516]8.692533702537343):[&rate=0.0040537299087411716]12.692226455582944):[&rate=0.003979920957801902]1.1184715474620752,((11:[&rate=0.004219323479321451]4.9784916 [...]
+tree STATE_480000 [&lnP=-22228.613557032284] = [&R] ((14:[&rate=0.002937956366790199]87.70983847444471,(33:[&rate=0.003129247284339269]83.60429198716761,((((((22:[&rate=0.0029985374681705057]19.43676084096768,(24:[&rate=0.0029493586091976194]12.867570261899555,((21:[&rate=0.002842271179639841]3.4205317394984216,(19:[&rate=0.0031365362825174835]2.4736591295868458,20:[&rate=0.003300360138993347]2.4736591295868458):[&rate=0.0033896275792688908]0.9468726099115758):[&rate=0.002970336947714183 [...]
+tree STATE_490000 [&lnP=-22215.470844317122] = [&R] ((33:[&rate=0.0018258791340983148]145.06334258052752,(14:[&rate=0.002032657724639782]114.70684597681624,(((((((11:[&rate=0.0018717022816151642]10.207135877220749,12:[&rate=0.0013237062376289092]10.207135877220749):[&rate=0.0024470687848107443]23.935674018328108,(7:[&rate=0.002187242949975405]29.69543273667076,13:[&rate=0.0018870779238144033]29.69543273667076):[&rate=0.002490006511410725]4.447377158878098):[&rate=0.002105341031048956]7.6 [...]
+tree STATE_500000 [&lnP=-22210.24034973055] = [&R] (9:[&rate=0.002754557943438197]114.84802320174346,(33:[&rate=0.00231221030973594]97.28197695156351,(14:[&rate=0.0029590967487755034]80.68480716558706,((((((23:[&rate=0.0017513363268880653]13.954183443121462,17:[&rate=0.002643312621282586]13.954183443121462):[&rate=0.0034790009427244444]7.413237027819701,(22:[&rate=0.003110219338864968]21.226674928197404,(24:[&rate=0.002754557943438197]12.665037799678611,(15:[&rate=0.0028212724154772113]8 [...]
+tree STATE_510000 [&lnP=-22230.589736936934] = [&R] (9:[&rate=0.0025939437634930765]147.08146940172418,(33:[&rate=0.002132051366453193]122.63255185923016,(14:[&rate=0.002519014466281653]109.2069771441825,(((((16:[&rate=0.002376881359655736]36.993835201605904,(8:[&rate=0.002488144874160917]23.464477623400423,10:[&rate=0.0025428282224169026]23.464477623400423):[&rate=0.0027357622278144124]13.529357578205481):[&rate=0.002526867283369716]0.8811513232245574,(((11:[&rate=0.0024958058907759333] [...]
+tree STATE_520000 [&lnP=-22225.191408776067] = [&R] (9:[&rate=0.0023843689258892524]158.18559011170404,(33:[&rate=0.002210075712366078]96.21129919182599,(14:[&rate=0.002918299403632904]84.50713834043655,(((((((18:[&rate=0.002261482100689157]31.030760798580367,(26:[&rate=0.002797757506185816]17.93840335809698,25:[&rate=0.002556926941652114]17.93840335809698):[&rate=0.002626982702116148]13.092357440483386):[&rate=0.00228761154319119]3.99247868899549,(2:[&rate=0.0021194511625145175]28.64382 [...]
+tree STATE_530000 [&lnP=-22218.035929522262] = [&R] (9:[&rate=0.0024548506012240607]161.34619422132184,(14:[&rate=0.0026369256644486875]104.97660619235506,(33:[&rate=0.0027228624471665602]92.72376577580735,((((((18:[&rate=0.0027531090453034863]29.050851197023658,(26:[&rate=0.002784572602416502]17.628008649412077,25:[&rate=0.002693563546898371]17.628008649412077):[&rate=0.002784572602416502]11.42284254761158):[&rate=0.0023424583728493065]7.1380023173021065,(2:[&rate=0.002784572602416502]2 [...]
+tree STATE_540000 [&lnP=-22223.726505165312] = [&R] (9:[&rate=0.0029427794259200067]138.81758204668924,(14:[&rate=0.002629678003674605]115.06404770173984,(33:[&rate=0.0027314434035892437]96.48346880747096,((((((18:[&rate=0.0029259885764263112]26.37176449120773,(26:[&rate=0.0031730444119045325]14.639918043620295,25:[&rate=0.0033683253861293487]14.639918043620295):[&rate=0.002715020910199745]11.731846447587436):[&rate=0.0031290177113168565]1.9841772841870338,(2:[&rate=0.0029597865111327835 [...]
+tree STATE_550000 [&lnP=-22221.454108893755] = [&R] (9:[&rate=0.0023277840518576894]151.2756964607012,(14:[&rate=0.0024324470665739654]102.74905812419455,(33:[&rate=0.002898519596032391]94.88159418396043,(((((8:[&rate=0.002704718531542261]23.34533684716554,10:[&rate=0.0024324470665739654]23.34533684716554):[&rate=0.002813376228512513]17.758414549592423,((3:[&rate=0.0022687572591706916]17.13438143519176,5:[&rate=0.0025417182591544623]17.13438143519176):[&rate=0.0023448446394579896]20.1144 [...]
+tree STATE_560000 [&lnP=-22219.383068006493] = [&R] (9:[&rate=0.002736979250782542]135.19094532445695,(33:[&rate=0.00282201793493959]102.5712073141336,(((((((18:[&rate=0.0030797439471539436]26.045526401045382,(26:[&rate=0.00282201793493959]15.192335421179065,25:[&rate=0.003037749143773631]15.192335421179065):[&rate=0.003057678878510871]10.853190979866318):[&rate=0.0028522294198971942]2.3973058247030465,(2:[&rate=0.002718407882058005]25.30267616201858,((1:[&rate=0.0027930475336175498]8.55 [...]
+tree STATE_570000 [&lnP=-22218.320341795337] = [&R] (9:[&rate=0.0029033090554138026]137.56386827576466,((((((((18:[&rate=0.002767000432830332]31.531096336108693,(26:[&rate=0.002952764421028371]16.3024262843136,25:[&rate=0.0029271668115650714]16.3024262843136):[&rate=0.00197074772350987]15.228670051795092):[&rate=0.0029033090554138026]2.9471137821749913,(2:[&rate=0.0029033090554138026]29.484060062748608,((1:[&rate=0.0026733688713048926]8.26257330745526,6:[&rate=0.00304458983974814]8.26257 [...]
+tree STATE_580000 [&lnP=-22216.74663004975] = [&R] (9:[&rate=0.0018200436953847896]161.07671684593183,((((((((18:[&rate=0.002207345267477675]29.87415895379925,(26:[&rate=0.00267430079467112]16.41978385526941,25:[&rate=0.002516490702108112]16.41978385526941):[&rate=0.002207345267477675]13.454375098529837):[&rate=0.002431468313656889]5.7860665015408905,(2:[&rate=0.0021977299522712547]30.433390784674994,((1:[&rate=0.002235358918810543]11.249230977941698,6:[&rate=0.002187935987663026]11.2492 [...]
+tree STATE_590000 [&lnP=-22213.176229331384] = [&R] (9:[&rate=0.0029970419750816146]120.74728943126784,(33:[&rate=0.0027509752156548066]94.11440708486444,(14:[&rate=0.0036418447293577625]74.64238936150652,((((((18:[&rate=0.0031168627551324]20.957224434704422,(26:[&rate=0.0031038214298184335]14.163553631916914,25:[&rate=0.003156092520029876]14.163553631916914):[&rate=0.0030244777424743904]6.793670802787508):[&rate=0.003077669599783657]4.774385646077853,(2:[&rate=0.003156092520029876]23.65 [...]
+tree STATE_600000 [&lnP=-22212.71518859828] = [&R] (9:[&rate=0.003395795731309661]98.68585331146588,(33:[&rate=0.003309774704822011]79.89545779428904,((((((((18:[&rate=0.0032924140341587613]21.13989423103386,(26:[&rate=0.004676034320726312]10.835659798120561,25:[&rate=0.0032031085756303117]10.835659798120561):[&rate=0.0035009005875396323]10.304234432913299):[&rate=0.0032394754611218437]2.727911438771372,(2:[&rate=0.003146047345515649]20.021105304764593,((1:[&rate=0.003447764836678317]5.0 [...]
+tree STATE_610000 [&lnP=-22204.859710368535] = [&R] (9:[&rate=0.0029741620193530615]123.84868453093033,((14:[&rate=0.003538569140002049]74.73113419510767,(((((8:[&rate=0.00305638739799555]23.899428406736682,10:[&rate=0.002548983851380211]23.899428406736682):[&rate=0.004109767491585379]9.994844861647891,(((3:[&rate=0.003538569140002049]9.968661416846826,5:[&rate=0.0034964013988389732]9.968661416846826):[&rate=0.0029542272954186264]20.388762093770907,((11:[&rate=0.0032423469016603624]6.578 [...]
+tree STATE_620000 [&lnP=-22212.10734174061] = [&R] ((9:[&rate=0.0034396522439967578]123.39500267767939,33:[&rate=0.0021349541362306428]123.39500267767939):[&rate=0.0036342951804247785]7.444378484891118,(((((((18:[&rate=0.0026038840268816205]28.112015793528066,(26:[&rate=0.002387670232406431]19.879021005257396,25:[&rate=0.0021092725277070965]19.879021005257396):[&rate=0.0029805896316964916]8.23299478827067):[&rate=0.0027913520884437298]8.53716351996804,(2:[&rate=0.0021857235650409286]28.9 [...]
+tree STATE_630000 [&lnP=-22213.758589233712] = [&R] (((14:[&rate=0.0034722466007164735]68.78021440850611,((((16:[&rate=0.003432893232069501]28.180243554550028,((8:[&rate=0.0036017356621674632]13.670416638824667,10:[&rate=0.00277458267660656]13.670416638824667):[&rate=0.0034722466007164735]13.241661001771439,((3:[&rate=0.002807020612761308]13.549086669567007,5:[&rate=0.0025119267741224296]13.549086669567007):[&rate=0.0033211585962317804]12.383239874058027,((11:[&rate=0.002939195030077637] [...]
+tree STATE_640000 [&lnP=-22205.88300242595] = [&R] (((14:[&rate=0.0037817761877430955]68.52834403113286,(((16:[&rate=0.0030074842731559756]34.22717281320857,((((22:[&rate=0.0037817761877430955]17.540412387711882,((15:[&rate=0.003049058351214427]9.36683207878859,((21:[&rate=0.0028360738591072616]2.8085544030809926,20:[&rate=0.00393080187219056]2.8085544030809926):[&rate=0.0030691495050515416]1.2948529047100665,19:[&rate=0.002736143847195708]4.103407307791059):[&rate=0.0035481699086062175] [...]
+tree STATE_650000 [&lnP=-22220.627154459267] = [&R] (((14:[&rate=0.0030578018715425427]89.23519597779531,((((16:[&rate=0.002650296853715757]39.91464314531966,((((11:[&rate=0.0035971202781590874]7.0929189607886345,12:[&rate=0.00183503331069774]7.0929189607886345):[&rate=0.002325836725594552]19.93465012723325,(7:[&rate=0.004141465303112889]19.124031488984343,13:[&rate=0.002325836725594552]19.124031488984343):[&rate=0.0018888979443784932]7.903537599037541):[&rate=0.0019769103759698457]9.996 [...]
+tree STATE_660000 [&lnP=-22215.57174149218] = [&R] (((14:[&rate=0.0028242605837789704]95.13734954414011,(((((((22:[&rate=0.0023657284056039125]24.665042009046182,((15:[&rate=0.0022595041909874193]12.203079768500928,(21:[&rate=0.0029903356338127772]5.291996275834368,(19:[&rate=0.0027634480282693586]3.8806614709151432,20:[&rate=0.002165194906398648]3.8806614709151432):[&rate=0.002165194906398648]1.4113348049192247):[&rate=0.003420000969120403]6.91108349266656):[&rate=0.002783737025244729]3 [...]
+tree STATE_670000 [&lnP=-22207.34754357909] = [&R] (((14:[&rate=0.0026609428989976497]89.62912180254973,33:[&rate=0.002764045985365311]89.62912180254973):[&rate=0.003304939456538356]9.471559555400162,((((((((15:[&rate=0.003372034731223461]8.458465680061298,((21:[&rate=0.0029253689663159754]2.8728400444414,20:[&rate=0.0024674142703128114]2.8728400444414):[&rate=0.0029996867274546205]0.7450751728873759,19:[&rate=0.002902785670611566]3.617915217328776):[&rate=0.0022459465036363006]4.8405504 [...]
+tree STATE_680000 [&lnP=-22212.620333136612] = [&R] (((14:[&rate=0.0029010396609041016]84.04239012735914,((((((8:[&rate=0.002747973225817531]19.516652859750042,10:[&rate=0.003113562626177873]19.516652859750042):[&rate=0.002202076983709005]19.77461257444431,(((11:[&rate=0.0023516592023705954]5.760577674900372,12:[&rate=0.0029010396609041016]5.760577674900372):[&rate=0.0022528729171085705]21.818085021618792,(7:[&rate=0.0028739508254217176]24.615570650353334,13:[&rate=0.0020881162772584797] [...]
+tree STATE_690000 [&lnP=-22221.616171241432] = [&R] (((14:[&rate=0.003380469503433678]79.17125415017209,((((((7:[&rate=0.003303902962005053]25.562948744947317,((11:[&rate=0.0025859222245349566]8.311544402176892,12:[&rate=0.0027800720869697922]8.311544402176892):[&rate=0.004095284434107435]13.236219471429965,13:[&rate=0.0028366133004653358]21.547763873606858):[&rate=0.0030930810791312815]4.015184871340459):[&rate=0.003151622826327202]5.526920466780986,((3:[&rate=0.0030001500180900573]11.5 [...]
+tree STATE_700000 [&lnP=-22213.74347106024] = [&R] ((33:[&rate=0.0025492699195959483]103.66292400860252,((((((((3:[&rate=0.0028141149616851126]12.910217168003795,5:[&rate=0.0028860080394079975]12.910217168003795):[&rate=0.0026070462281723903]16.538749026045934,((11:[&rate=0.0027394283221916474]5.9299998722029015,12:[&rate=0.003038827656342562]5.9299998722029015):[&rate=0.003067750804930418]18.976957030966354,(13:[&rate=0.002777426115542824]18.966946654919013,7:[&rate=0.002910099631346267 [...]
+tree STATE_710000 [&lnP=-22221.697303268378] = [&R] ((33:[&rate=0.0025117462294804117]108.53338084360358,((((((((3:[&rate=0.00311784270055826]14.130037927112472,5:[&rate=0.002637973024564146]14.130037927112472):[&rate=0.0028028948149832794]16.110551507911328,((7:[&rate=0.003726010495012994]18.615987319807598,13:[&rate=0.002727514877688708]18.615987319807598):[&rate=0.0034345866491057447]2.3540733271952305,(11:[&rate=0.002937917174885969]6.726779418843845,12:[&rate=0.0028983371776634846]6 [...]
+tree STATE_720000 [&lnP=-22211.99700800224] = [&R] ((33:[&rate=0.002262329088162653]96.1855724128769,(14:[&rate=0.0033535636494014024]76.49626943861377,(((((((3:[&rate=0.00282682472862826]13.714088426828694,5:[&rate=0.0024800633115829992]13.714088426828694):[&rate=0.002200858799048685]18.85346517963832,(8:[&rate=0.0035643192624027877]21.484862316403056,10:[&rate=0.0025428675925053316]21.484862316403056):[&rate=0.0024139738616057362]11.08269129006396):[&rate=0.0020363201103479754]2.733695 [...]
+tree STATE_730000 [&lnP=-22222.08243462933] = [&R] ((33:[&rate=0.0020746798171896036]123.7259428799053,((((((((((15:[&rate=0.002260612628444283]11.144500363924273,(19:[&rate=0.0022281216870054152]4.51531704329673,(20:[&rate=0.0025507849776424535]3.5468799953468313,21:[&rate=0.002377728851673691]3.5468799953468313):[&rate=0.0024029445714301044]0.9684370479498989):[&rate=0.0021469580687644815]6.629183320627543):[&rate=0.0020746798171896036]4.13117296053283,24:[&rate=0.002209405509975118]15 [...]
+tree STATE_740000 [&lnP=-22221.61367975397] = [&R] ((((((((((11:[&rate=0.003015785658816797]5.531278502758377,12:[&rate=0.002436571852843089]5.531278502758377):[&rate=0.002420912890686278]19.106151655767,13:[&rate=0.002544332038105954]24.637430158525376):[&rate=0.0031244888187104422]2.3035969394410323,7:[&rate=0.002005623376232835]26.94102709796641):[&rate=0.002436571852843089]9.798031702656775,((8:[&rate=0.0027054835786301666]21.184150662960306,10:[&rate=0.0022869145083313494]21.1841506 [...]
+tree STATE_750000 [&lnP=-22232.41263353742] = [&R] ((14:[&rate=0.002239580488247039]114.94773476311332,(33:[&rate=0.0024831112737602897]96.96673192685398,((((16:[&rate=0.0024694551585828202]40.49372726450659,(((((11:[&rate=0.0025264482839825277]7.750973222098498,12:[&rate=0.002360955808190004]7.750973222098498):[&rate=0.002416195446329413]17.714018337094725,13:[&rate=0.002252580356562498]25.464991559193223):[&rate=0.002416195446329413]3.490411741221127,7:[&rate=0.0021649760762333654]28.9 [...]
+tree STATE_760000 [&lnP=-22207.751841009096] = [&R] (9:[&rate=0.0022715489891943852]146.355775799299,((((((16:[&rate=0.0022478858712799456]35.47783767977414,((8:[&rate=0.0027577549935545956]19.78958589952805,10:[&rate=0.0032413924616281303]19.78958589952805):[&rate=0.003392702187685754]14.70606716633742,(((11:[&rate=0.0030360253078199166]5.706846873959149,12:[&rate=0.0024184526432131192]5.706846873959149):[&rate=0.0025684930630641986]21.312296905957467,(13:[&rate=0.0028847756400422276]22 [...]
+tree STATE_770000 [&lnP=-22202.70035911605] = [&R] (9:[&rate=0.004355951486661834]94.55803910405484,((33:[&rate=0.0033903984763156685]65.86596831070749,((((((18:[&rate=0.0033138050654715695]20.759834949382828,(26:[&rate=0.0036679180997144]11.622541190884762,25:[&rate=0.003836131647332792]11.622541190884762):[&rate=0.0026269695730202543]9.137293758498066):[&rate=0.004278356425317927]4.193309908991324,(2:[&rate=0.003604523977256956]20.47125405625631,((1:[&rate=0.004355951486661834]5.694468 [...]
+tree STATE_780000 [&lnP=-22213.095139652953] = [&R] (9:[&rate=0.0026126742504430658]145.71026883377237,(((((((8:[&rate=0.0031849753503976623]20.547765946941144,10:[&rate=0.0034989569690465103]20.547765946941144):[&rate=0.0034989569690465103]13.499694739722624,(((3:[&rate=0.003608505275559757]10.632249113982779,5:[&rate=0.003145917687388397]10.632249113982779):[&rate=0.0029932226412274793]18.86171190186954,((11:[&rate=0.0027816709871654275]7.140219303689029,12:[&rate=0.002631717024823741] [...]
+tree STATE_790000 [&lnP=-22214.616834128883] = [&R] (9:[&rate=0.003503942454510313]98.07820200061711,(33:[&rate=0.003421310823781727]74.58623898822964,(((((16:[&rate=0.003579406406395234]28.2134424555036,((8:[&rate=0.003475027250592252]16.336281150729263,10:[&rate=0.0035655991541304463]16.336281150729263):[&rate=0.0034692417769785692]10.552377605018421,((3:[&rate=0.003475027250592252]9.305132119765847,5:[&rate=0.0036183335966377852]9.305132119765847):[&rate=0.003261242827493226]15.451862 [...]
+tree STATE_800000 [&lnP=-22227.225113156783] = [&R] (9:[&rate=0.0018431781600041423]171.73385114161596,(((((((8:[&rate=0.0017195324068327033]30.349298727125724,10:[&rate=0.0021791493002408266]30.349298727125724):[&rate=0.0025265212947577728]15.237203653793923,(((3:[&rate=0.002365652052877306]18.031642281783704,5:[&rate=0.002108063079055209]18.031642281783704):[&rate=0.0021791493002408266]21.55714069430882,(((11:[&rate=0.0018264004342330128]7.599524872519377,12:[&rate=0.002166988914834636 [...]
+tree STATE_810000 [&lnP=-22220.902350913842] = [&R] (9:[&rate=0.002678733757263769]132.0864466140215,((((((16:[&rate=0.0023813420531844896]36.07084979910707,(((3:[&rate=0.002582432900480012]16.136103429326873,5:[&rate=0.002625561268027385]16.136103429326873):[&rate=0.0030735944777247996]13.467390517775879,((7:[&rate=0.0036832690360372963]18.76439264500809,13:[&rate=0.002625561268027385]18.76439264500809):[&rate=0.0032094920090648417]5.513594146328124,(11:[&rate=0.003509902461498102]6.704 [...]
+tree STATE_820000 [&lnP=-22228.054911464125] = [&R] (9:[&rate=0.0026580127451561204]125.5434632152601,((14:[&rate=0.0029932908466173674]87.67486366796736,(((((3:[&rate=0.002857427071076188]13.04529959096499,5:[&rate=0.0020809943769700147]13.04529959096499):[&rate=0.0021589556897728254]20.3005192941154,((7:[&rate=0.003764027290298212]20.949976234860067,13:[&rate=0.002461801386169315]20.949976234860067):[&rate=0.0021222894846786803]7.4360193133688455,(11:[&rate=0.0031273982483385507]6.4408 [...]
+tree STATE_830000 [&lnP=-22220.743115714682] = [&R] (33:[&rate=0.002030371370008717]115.55385549889407,((((((((18:[&rate=0.0029512625140845662]21.39522547238377,(26:[&rate=0.002927115208780239]15.233942344192462,25:[&rate=0.002879871744320461]15.233942344192462):[&rate=0.0036670388656973437]6.161283128191309):[&rate=0.0025639209467691578]8.07595679891298,(2:[&rate=0.002277269472881394]24.8872492342028,((1:[&rate=0.00278842874184144]8.928520511798887,6:[&rate=0.001875220668931838]8.928520 [...]
+tree STATE_840000 [&lnP=-22208.088416598926] = [&R] (9:[&rate=0.0021238552927772777]151.86091604249026,(((((16:[&rate=0.002998772183134493]34.193555737488126,((((18:[&rate=0.003153327569763512]23.591942276644897,(26:[&rate=0.0036935041964547177]14.52037396137304,25:[&rate=0.0031764055550910124]14.52037396137304):[&rate=0.003793292313502143]9.071568315271858):[&rate=0.003085897303702732]6.061894835152124,(2:[&rate=0.002734943238098869]26.22805372247372,((1:[&rate=0.004388493919754034]5.28 [...]
+tree STATE_850000 [&lnP=-22224.760644081252] = [&R] (9:[&rate=0.0018444028871751582]161.61877691683176,(14:[&rate=0.002217872275561991]114.70722703847778,(33:[&rate=0.002418033125429675]105.4666613135244,((((((((7:[&rate=0.0029996697003255328]23.34190137602063,13:[&rate=0.001978842821912068]23.34190137602063):[&rate=0.0020236676335484032]6.76770310668606,(11:[&rate=0.002418033125429675]9.221771321238233,12:[&rate=0.0017726812436614304]9.221771321238233):[&rate=0.0020236676335484032]20.88 [...]
+tree STATE_860000 [&lnP=-22215.988880914218] = [&R] (9:[&rate=0.0024424158011962546]151.44023425350147,((((((((18:[&rate=0.0026711210755763735]27.10107138931484,(26:[&rate=0.0029897376818069973]14.837748180705564,25:[&rate=0.0025071777184304954]14.837748180705564):[&rate=0.0023571467917514804]12.263323208609275):[&rate=0.0025978740472720795]8.376002919922662,(2:[&rate=0.0023571467917514804]32.34446096912763,((1:[&rate=0.002535941217210215]8.704740235425504,6:[&rate=0.0026090615553026435] [...]
+tree STATE_870000 [&lnP=-22217.44628197513] = [&R] (9:[&rate=0.002238400256359545]141.24678448128148,((33:[&rate=0.0027132946893986755]102.58003319495029,((((((18:[&rate=0.002596269964329745]28.88283962671715,(26:[&rate=0.0025456447914350418]16.43264630603878,25:[&rate=0.0026964308802966248]16.43264630603878):[&rate=0.0024003894949058534]12.45019332067837):[&rate=0.002730277031027132]4.410035894363048,(2:[&rate=0.002493565193149406]28.65867481535326,((1:[&rate=0.002238400256359545]7.3958 [...]
+tree STATE_880000 [&lnP=-22207.473657109484] = [&R] ((33:[&rate=0.0023978577929397163]96.86098960274124,(14:[&rate=0.004025576463921129]81.5191045983517,(((((8:[&rate=0.0041739556094286916]17.014788987257806,10:[&rate=0.0031664755856925728]17.014788987257806):[&rate=0.0029418964314561914]12.396219110119286,((7:[&rate=0.002795365350783594]24.389630932372167,(13:[&rate=0.002682943886088315]20.379482406586753,(11:[&rate=0.0029649247772336627]5.354521261322418,12:[&rate=0.0030550997273313557 [...]
+tree STATE_890000 [&lnP=-22233.46317281377] = [&R] (((((((((18:[&rate=0.0020268117064871556]35.85768982267346,(26:[&rate=0.0019546415066656318]24.181483853730125,25:[&rate=0.0018998510883870452]24.181483853730125):[&rate=0.002319871014888455]11.676205968943332):[&rate=0.002141763217290859]7.990819085774255,(2:[&rate=0.002160343466691744]34.15864860715553,((1:[&rate=0.002566714445140676]11.46833024982155,6:[&rate=0.001238904550056337]11.46833024982155):[&rate=0.0019407709768146607]17.5426 [...]
+tree STATE_900000 [&lnP=-22222.28844685189] = [&R] (((((((((((11:[&rate=0.0020273953889211826]8.171764473418076,12:[&rate=0.0022826011437489585]8.171764473418076):[&rate=0.002242619959058393]22.532207256230205,(13:[&rate=0.0021914867788294505]26.49718910070653,7:[&rate=0.00231982845237225]26.49718910070653):[&rate=0.0023478679468029273]4.206782628941749):[&rate=0.002179534671042643]5.266835152142512,(3:[&rate=0.002133263263380093]17.006388363963897,5:[&rate=0.002333407817722309]17.006388 [...]
+tree STATE_910000 [&lnP=-22207.789614454116] = [&R] (((((((((8:[&rate=0.0028569495750607587]18.98547318056926,10:[&rate=0.0033351480624429263]18.98547318056926):[&rate=0.00292215720616887]14.604553493055118,((3:[&rate=0.0025729458506816238]13.977809121822103,5:[&rate=0.0026405936707315646]13.977809121822103):[&rate=0.0027900061623968406]16.40736229552075,((7:[&rate=0.003071472371297849]22.914470038002904,13:[&rate=0.0022532033304038905]22.914470038002904):[&rate=0.002700336428632682]3.28 [...]
+tree STATE_920000 [&lnP=-22207.376551855035] = [&R] ((((((((((18:[&rate=0.002506603383677741]26.387254222743476,(26:[&rate=0.0032559533099406762]14.594476241865422,25:[&rate=0.002674825433461395]14.594476241865422):[&rate=0.0029789094806785487]11.792777980878054):[&rate=0.0020693300324190226]3.016212691164199,(2:[&rate=0.002915161943210021]22.291845635131672,((1:[&rate=0.0034326945421540786]6.041238598890651,6:[&rate=0.0029789094806785487]6.041238598890651):[&rate=0.002915161943210021]12 [...]
+tree STATE_930000 [&lnP=-22232.459434013905] = [&R] ((33:[&rate=0.0022487915015251157]104.20102579714644,(((((((8:[&rate=0.002563115841493142]22.37174398037147,10:[&rate=0.0024022884774494904]22.37174398037147):[&rate=0.002771598170520151]13.096959468769569,((3:[&rate=0.002646537252679117]14.121601183401255,5:[&rate=0.002494328006607744]14.121601183401255):[&rate=0.0025772080778119306]18.307549763410222,(7:[&rate=0.0023629000934216942]28.595919643034538,(13:[&rate=0.0024511353075187623]2 [...]
+tree STATE_940000 [&lnP=-22199.118603646566] = [&R] ((33:[&rate=0.0038448130223417663]75.79505697725418,(14:[&rate=0.003774896799134837]61.54930884179394,((((((18:[&rate=0.0033006037322471198]20.055237571757598,(26:[&rate=0.003510715767734296]13.553103180523788,25:[&rate=0.0033781714560199797]13.553103180523788):[&rate=0.004041620278976685]6.50213439123381):[&rate=0.004171032080401646]2.6837200991159165,(2:[&rate=0.0034950639270316056]19.260364089332004,((1:[&rate=0.0037100231136276236]5 [...]
+tree STATE_950000 [&lnP=-22217.292583525636] = [&R] ((((((16:[&rate=0.0024037228416024787]38.282738123068945,((((18:[&rate=0.0032554586547892425]20.859464516503287,(26:[&rate=0.003203665015500676]13.511636198677811,25:[&rate=0.003071964924860852]13.511636198677811):[&rate=0.002709366910291251]7.347828317825476):[&rate=0.0029429464891630418]6.339023652768972,(2:[&rate=0.002640028161762305]22.492137158369534,((1:[&rate=0.003313554935623533]8.162649969475577,6:[&rate=0.0023415057079184797]8 [...]
+tree STATE_960000 [&lnP=-22210.664677483765] = [&R] (((((((16:[&rate=0.0024830452152141075]40.007240740040004,((8:[&rate=0.0029120080463328015]21.69189353376558,10:[&rate=0.002823478763497979]21.69189353376558):[&rate=0.0025272244436209805]17.463368455303844,((7:[&rate=0.0028441265590319225]30.13930832562415,(13:[&rate=0.0019751546575531116]26.33168826380275,(11:[&rate=0.002888241984807421]8.382072723280809,12:[&rate=0.0020943393057872425]8.382072723280809):[&rate=0.0024977809331922034]1 [...]
+tree STATE_970000 [&lnP=-22215.775808218874] = [&R] (((14:[&rate=0.002620694368465951]88.23517062242456,((((16:[&rate=0.0024833268512927126]41.15423165653851,((8:[&rate=0.0025656849849018133]24.86904161810134,10:[&rate=0.0023965043063847273]24.86904161810134):[&rate=0.0029675737011079706]12.03334596730043,(((11:[&rate=0.00294305646487686]6.523160984006346,12:[&rate=0.002737463394035477]6.523160984006346):[&rate=0.0025656849849018133]17.784041629498912,(13:[&rate=0.002455186705652428]18.3 [...]
+tree STATE_980000 [&lnP=-22221.095620855474] = [&R] (((((((((((11:[&rate=0.0021906310985246777]7.926158307842666,12:[&rate=0.0017920031432307763]7.926158307842666):[&rate=0.002280650026360413]19.275864789538765,(13:[&rate=0.0019324657161414194]23.26098347573468,7:[&rate=0.002825135262715988]23.26098347573468):[&rate=0.00265485621617028]3.941039621646752):[&rate=0.002400926569675407]6.859812026224336,(3:[&rate=0.002280650026360413]15.162576248602882,5:[&rate=0.0025425326698304595]15.16257 [...]
+tree STATE_990000 [&lnP=-22212.085404868572] = [&R] ((33:[&rate=0.002543023899878182]91.72792779904103,(14:[&rate=0.003127150278728214]74.38171790434467,((((((8:[&rate=0.003458155251605374]24.04674658209703,10:[&rate=0.002641810002686132]24.04674658209703):[&rate=0.004177456708278905]8.410587689146752,(((11:[&rate=0.0024860804252994258]5.696351457243267,12:[&rate=0.0029854796333423444]5.696351457243267):[&rate=0.0030650261155023245]19.897944363081702,(13:[&rate=0.002420583983099965]21.56 [...]
+tree STATE_1000000 [&lnP=-22224.951181548564] = [&R] ((33:[&rate=0.001901444609344081]123.45634373697749,(((((((8:[&rate=0.0025570700334853836]25.07536688113865,10:[&rate=0.002735306082625947]25.07536688113865):[&rate=0.0025570700334853836]16.95148425185708,((7:[&rate=0.0025570700334853836]27.985920548783916,(13:[&rate=0.0026161605553414588]22.25349091552463,(11:[&rate=0.0021662069775439164]7.218545167730472,12:[&rate=0.002735306082625947]7.218545167730472):[&rate=0.0023250012224586337]1 [...]
+tree STATE_1010000 [&lnP=-22219.038717382897] = [&R] ((33:[&rate=0.0025063946406304566]107.51394672544033,(((((16:[&rate=0.002053961809392279]37.47182460183289,((8:[&rate=0.002747560340742578]26.195150418181026,10:[&rate=0.0023539388009019887]26.195150418181026):[&rate=0.0027055870721555404]9.839222016393865,((7:[&rate=0.002388160848571485]26.25880466897152,(13:[&rate=0.002113845784143693]24.397231410879527,(11:[&rate=0.0029300153360054394]4.811467664242415,12:[&rate=0.002950718611848009 [...]
+tree STATE_1020000 [&lnP=-22217.670200176042] = [&R] ((33:[&rate=0.0018613148599485943]111.2545081142623,(((((((18:[&rate=0.0021698396156976983]30.137270326485027,(26:[&rate=0.002271518591740879]19.08819895202065,25:[&rate=0.0023157022782201004]19.08819895202065):[&rate=0.0024731865774529115]11.049071374464376):[&rate=0.00181049302780006]6.994885108025333,(2:[&rate=0.002227224176457965]32.153733280157276,((1:[&rate=0.0022494556986007866]8.895224225486118,6:[&rate=0.0024872756252693265]8. [...]
+tree STATE_1030000 [&lnP=-22215.00132059364] = [&R] ((((((((((3:[&rate=0.0031494620626276466]13.36194750852715,5:[&rate=0.0022472958055692474]13.36194750852715):[&rate=0.002555860090284893]19.05346695441888,((7:[&rate=0.0034908741664094007]23.187282809942293,13:[&rate=0.0022472958055692474]23.187282809942293):[&rate=0.0023480761502916014]5.0722853473713485,(11:[&rate=0.0022742654104518116]6.3742105219842005,12:[&rate=0.002671090164279309]6.3742105219842005):[&rate=0.002370916897682825]21 [...]
+tree STATE_1040000 [&lnP=-22209.50394984119] = [&R] ((((((((((3:[&rate=0.0027151285927224525]16.822527350847935,5:[&rate=0.0023470211025588593]16.822527350847935):[&rate=0.0032830853712288556]15.855067362521488,((7:[&rate=0.002612966391479191]24.76619675688793,13:[&rate=0.002121686844867685]24.76619675688793):[&rate=0.0032499051037243164]2.043596582641829,(11:[&rate=0.0033585506337684423]6.676210330610536,12:[&rate=0.0032190235766425715]6.676210330610536):[&rate=0.00279956524172235]20.13 [...]
+tree STATE_1050000 [&lnP=-22235.6377919209] = [&R] (((33:[&rate=0.00246496784520375]93.79846687668962,(((16:[&rate=0.002355387379892502]44.988423038028444,((((18:[&rate=0.002453607222032684]31.502642701306073,(26:[&rate=0.002592444415440836]18.85587948383212,25:[&rate=0.0024029811781379052]18.85587948383212):[&rate=0.002187593393706106]12.646763217473953):[&rate=0.0025204373696504543]4.53430387779887,(2:[&rate=0.0024900787035592583]29.839298111765526,((1:[&rate=0.002497043180611244]8.005 [...]
+tree STATE_1060000 [&lnP=-22215.71627210204] = [&R] ((14:[&rate=0.0024758846216749284]101.34892380833871,(33:[&rate=0.0029871244459043723]94.10512229465037,((((16:[&rate=0.0022008353002113622]39.600392001975315,((8:[&rate=0.002712075197669664]23.579173391577115,10:[&rate=0.002305404024315504]23.579173391577115):[&rate=0.002754505361053672]13.968899983667818,(((7:[&rate=0.0026095634952380737]24.021849160529,13:[&rate=0.00254904117490015]24.021849160529):[&rate=0.0028344227257036357]2.8371 [...]
+tree STATE_1070000 [&lnP=-22207.551119331307] = [&R] ((33:[&rate=0.0024204905531972714]96.66731141446724,(14:[&rate=0.002598776184467774]83.02934833843688,((((16:[&rate=0.002499041257844095]36.394036345040064,(((3:[&rate=0.003309973296307814]11.081878518324908,5:[&rate=0.003122035184845244]11.081878518324908):[&rate=0.002649476812429155]18.05423758738859,(((11:[&rate=0.003034027301699556]6.233808972906679,12:[&rate=0.002868965617187473]6.233808972906679):[&rate=0.0035201937985466017]12.8 [...]
+tree STATE_1080000 [&lnP=-22221.262158359346] = [&R] (((((((((18:[&rate=0.002606103642748578]27.04865937651578,(26:[&rate=0.0032761573794555738]16.523259940206298,25:[&rate=0.003012995902766647]16.523259940206298):[&rate=0.002549493474875957]10.525399436309481):[&rate=0.0027270477027983454]5.996967474377975,(2:[&rate=0.002403036044611692]29.280582785903132,((1:[&rate=0.0029500430239666367]8.77712770581755,6:[&rate=0.002568172444202189]8.77712770581755):[&rate=0.0027487018934941325]17.902 [...]
+tree STATE_1090000 [&lnP=-22209.631437864224] = [&R] (((((((((18:[&rate=0.0029237524584788373]25.320424005970967,(26:[&rate=0.0031256369302010245]13.799909771117452,25:[&rate=0.0026757998777629166]13.799909771117452):[&rate=0.00262400359356259]11.520514234853515):[&rate=0.0025237205451744712]6.202263585203024,(2:[&rate=0.0023901066447589384]27.87564245940789,((1:[&rate=0.003209872235014728]7.166812020857647,6:[&rate=0.002880877669725518]7.166812020857647):[&rate=0.0026630554425682664]14. [...]
+tree STATE_1100000 [&lnP=-22235.12792572717] = [&R] ((((((((8:[&rate=0.00228717300923616]27.309413640022346,10:[&rate=0.0017682450809791074]27.309413640022346):[&rate=0.0021889217804985156]19.911987543814117,((3:[&rate=0.0022195983317735927]17.436923549568647,5:[&rate=0.0016181722607893987]17.436923549568647):[&rate=0.0017329510486323763]25.855349180983204,((7:[&rate=0.0023253285512107712]27.16139478939586,13:[&rate=0.0017682450809791074]27.16139478939586):[&rate=0.0023058000217393904]8. [...]
+tree STATE_1110000 [&lnP=-22207.051370219837] = [&R] ((14:[&rate=0.002258689973594426]114.41209262810445,(((((((18:[&rate=0.002542195901797187]30.788513219628516,(26:[&rate=0.0028853755132555716]15.562276281408915,25:[&rate=0.0026821740091985975]15.562276281408915):[&rate=0.0022310416541915816]15.226236938219602):[&rate=0.0020296889957180063]4.57509132391424,(2:[&rate=0.0023337372503613963]30.470268238752354,((1:[&rate=0.0026211619302099623]8.763328939037404,6:[&rate=0.002201252553055185 [...]
+tree STATE_1120000 [&lnP=-22224.027437875717] = [&R] (9:[&rate=0.003016702383522308]110.8577709624467,(33:[&rate=0.0030220657655082883]96.73051464398274,(((((((18:[&rate=0.002889054550833374]26.167023453376196,(26:[&rate=0.0030700716008532853]16.33094333286604,25:[&rate=0.002994777714827334]16.33094333286604):[&rate=0.002852751912621235]9.836080120510157):[&rate=0.0030810457268418277]4.981468065318513,(2:[&rate=0.002889054550833374]26.598145475195064,((1:[&rate=0.003000349884093558]7.731 [...]
+tree STATE_1130000 [&lnP=-22219.641376044045] = [&R] (9:[&rate=0.0034497359162750586]108.40041105665486,(33:[&rate=0.003015094594457187]78.21917142682406,(14:[&rate=0.0033585294868050983]69.78612027726902,(((((18:[&rate=0.0027715825527768684]23.728588240994792,(26:[&rate=0.0031154888578654194]13.175840659145424,25:[&rate=0.003075156767102906]13.175840659145424):[&rate=0.0030352239835016465]10.552747581849369):[&rate=0.002824855765190783]5.479001601041709,(2:[&rate=0.003025186388134167]23 [...]
+tree STATE_1140000 [&lnP=-22222.6028354862] = [&R] (9:[&rate=0.002639151353993908]141.02078056657732,(14:[&rate=0.0020964956268462553]102.92774757166401,(33:[&rate=0.0027481080342862036]88.1187747785868,((((((8:[&rate=0.0027097197110326487]23.01082424877269,10:[&rate=0.0023222994847188604]23.01082424877269):[&rate=0.002285857743148391]16.429427909161166,((3:[&rate=0.0026854643751655386]13.341488790918545,5:[&rate=0.002761584881198008]13.341488790918545):[&rate=0.0026167385419764862]20.83 [...]
+tree STATE_1150000 [&lnP=-22224.459271368778] = [&R] ((14:[&rate=0.001747305913164421]109.97556254869104,(33:[&rate=0.002451737240039789]95.48498895362847,(((((((18:[&rate=0.0025876432681234364]27.68129967032824,(26:[&rate=0.002661296681181817]18.911692257492465,25:[&rate=0.0024957842884451094]18.911692257492465):[&rate=0.0029287211879862143]8.769607412835775):[&rate=0.001896118030274661]7.670799995262573,(2:[&rate=0.002408533475739957]27.852729842358645,((1:[&rate=0.0031967479662433703] [...]
+tree STATE_1160000 [&lnP=-22222.68274773967] = [&R] (9:[&rate=0.002722473724191048]125.87563496220285,(33:[&rate=0.002806805272221685]97.08286667589253,(14:[&rate=0.0031072878183110563]82.65058555356391,((((16:[&rate=0.002708007867424961]35.384813751027934,(((3:[&rate=0.003240894486510487]12.884623975913,5:[&rate=0.0027436884075385842]12.884623975913):[&rate=0.0026855297173587573]16.40415607638281,((13:[&rate=0.0029432237032080647]18.79462297469537,7:[&rate=0.002626289561138295]18.794622 [...]
+tree STATE_1170000 [&lnP=-22214.706865270546] = [&R] (9:[&rate=0.0030631655562914323]123.48228001651965,(33:[&rate=0.0020931608906947833]107.38059931008674,(((((16:[&rate=0.002345796948163479]40.373490717578534,((3:[&rate=0.0019455185805259056]17.927912510604084,5:[&rate=0.0020640861636738667]17.927912510604084):[&rate=0.0027517981774007874]18.979339087963094,((13:[&rate=0.0018809121217570656]26.33677874006631,7:[&rate=0.002832866360660498]26.33677874006631):[&rate=0.001763697106416909]6 [...]
+tree STATE_1180000 [&lnP=-22218.146543606417] = [&R] (9:[&rate=0.002103267185398358]164.4445901031777,(33:[&rate=0.001600717428329877]127.00249638497097,(((((((18:[&rate=0.0023229106235153883]33.07254027561174,(26:[&rate=0.001960807603626218]19.896110515174314,25:[&rate=0.0018156659784358121]19.896110515174314):[&rate=0.0019944562141787406]13.17642976043743):[&rate=0.002171516971520284]6.3383645832942435,(2:[&rate=0.0019944562141787406]30.575621544367365,((1:[&rate=0.002220960641356102]1 [...]
+tree STATE_1190000 [&lnP=-22219.822673891813] = [&R] (9:[&rate=0.0023773993588878887]138.54444060638207,(33:[&rate=0.0019572126503885046]105.57660606361523,((((((((18:[&rate=0.002756679137913557]25.07851720630596,(26:[&rate=0.00331445864208674]12.447495109515291,25:[&rate=0.0036458302341954595]12.447495109515291):[&rate=0.0025315061353361206]12.63102209679067):[&rate=0.00331445864208674]5.5175449771121485,(2:[&rate=0.0029083360376038567]27.45444692853708,((1:[&rate=0.0032049536336685525] [...]
+tree STATE_1200000 [&lnP=-22222.432556940275] = [&R] (9:[&rate=0.0019371096509093685]197.92032367452106,((33:[&rate=0.0023478150345815603]107.54624908644017,(((((((8:[&rate=0.0024162402074246496]24.828931108264985,10:[&rate=0.0023051874677637615]24.828931108264985):[&rate=0.002436582360995773]17.00894567914872,(3:[&rate=0.0019729307163247754]18.196677858362683,5:[&rate=0.002127604529671533]18.196677858362683):[&rate=0.002047713152897067]23.641198929051022):[&rate=0.0019371096509093685]2. [...]
+tree STATE_1210000 [&lnP=-22213.671793003457] = [&R] (9:[&rate=0.0027379651081162106]147.11166733139856,((33:[&rate=0.003251899815295765]88.45095032946452,(((((((3:[&rate=0.003851900664833477]11.857399209402605,5:[&rate=0.0026344411799542446]11.857399209402605):[&rate=0.0026178618537528037]17.905014134552403,((13:[&rate=0.0025362064003259643]19.415591143766232,7:[&rate=0.0034657593298756282]19.415591143766232):[&rate=0.0028755155342505816]6.353763183430299,(11:[&rate=0.002685153863706902 [...]
+tree STATE_1220000 [&lnP=-22218.960196713535] = [&R] (((((((((((((19:[&rate=0.0025381847825420344]2.108080517103887,20:[&rate=0.003611126138319179]2.108080517103887):[&rate=0.00296969010039156]2.4690836247066903,21:[&rate=0.002381786485214266]4.577164141810577):[&rate=0.0026772611122089307]4.629380922106774,15:[&rate=0.0026772611122089307]9.206545063917352):[&rate=0.00314538231788997]2.785972751658809,24:[&rate=0.0030615761946172094]11.99251781557616):[&rate=0.0030369125910540353]10.0282 [...]
+tree STATE_1230000 [&lnP=-22214.581892348106] = [&R] (((33:[&rate=0.002337178312794517]108.53207587611287,((((((22:[&rate=0.0022901218686661095]28.484261100455402,((((21:[&rate=0.0021561746540594783]3.300523126992434,20:[&rate=0.0018896928373765927]3.300523126992434):[&rate=0.0019695439560506478]1.317992204204392,19:[&rate=0.0022059262959332124]4.618515331196826):[&rate=0.002236484927547205]6.7871870639811105,15:[&rate=0.0022160212546727368]11.405702395177936):[&rate=0.002042766117721969 [...]
+tree STATE_1240000 [&lnP=-22223.626606888545] = [&R] (((14:[&rate=0.0020361374410669112]120.05036731702926,((((((23:[&rate=0.001734947664260463]14.391147949125841,17:[&rate=0.0019198706879089923]14.391147949125841):[&rate=0.002407138894208072]13.258788005065153,(22:[&rate=0.0020791223845424106]27.09696306040332,((((21:[&rate=0.001758676635533266]2.7926297640221693,20:[&rate=0.0022050333693764868]2.7926297640221693):[&rate=0.002183071918011883]0.20292443960573747,19:[&rate=0.0021238936769 [...]
+tree STATE_1250000 [&lnP=-22212.98947900918] = [&R] (((((((((22:[&rate=0.0032861035740857068]19.073667362711088,((15:[&rate=0.002770282686216776]7.515524164371948,(21:[&rate=0.00311265155668396]3.4444663892582996,(19:[&rate=0.0028155040801495926]1.4143087676974095,20:[&rate=0.0035008602934636134]1.4143087676974095):[&rate=0.003146753696608289]2.03015762156089):[&rate=0.0023044756694776394]4.0710577751136485):[&rate=0.003335397430300444]5.847124089693343,24:[&rate=0.002974243876243369]13. [...]
+tree STATE_1260000 [&lnP=-22227.056769044408] = [&R] (((33:[&rate=0.0027358274253450497]91.54936084138502,14:[&rate=0.002782963703590252]91.54936084138502):[&rate=0.0028095503825520804]5.488163532311802,((((((24:[&rate=0.0027623815708090397]13.31429762601947,(((21:[&rate=0.002750199902452722]3.3865498956647504,20:[&rate=0.0026792787997311097]3.3865498956647504):[&rate=0.0027415017124095362]0.5174425866062582,19:[&rate=0.0026925279809254313]3.9039924822710086):[&rate=0.0027046618777889575 [...]
+tree STATE_1270000 [&lnP=-22210.088603618584] = [&R] (((14:[&rate=0.003315774103169511]77.72300291007676,((((((22:[&rate=0.0027081171904970013]24.662307525824914,(24:[&rate=0.00229525753805451]16.43501670573635,(((21:[&rate=0.002633911216032921]3.3293293703943414,20:[&rate=0.0022080735124957373]3.3293293703943414):[&rate=0.003063465372981722]0.3505062828122072,19:[&rate=0.0018073591561707093]3.6798356532065486):[&rate=0.002391421472483477]9.850176543384936,15:[&rate=0.002128181953161761] [...]
+tree STATE_1280000 [&lnP=-22218.472430786882] = [&R] (((14:[&rate=0.0028398311439604626]92.83228049384894,((((16:[&rate=0.00260795068701308]39.5903396144824,((((15:[&rate=0.0026829182813066666]13.067557895303299,(24:[&rate=0.002981614541450676]11.744308909988986,((21:[&rate=0.0029046133513874473]3.3334417752111585,20:[&rate=0.0022587502820789374]3.3334417752111585):[&rate=0.002981614541450676]0.40003742881833126,19:[&rate=0.0031212519348813076]3.7334792040294897):[&rate=0.002239204985947 [...]
+tree STATE_1290000 [&lnP=-22213.926075349016] = [&R] ((9:[&rate=0.0029541815991642735]136.7467271099451,(((((((23:[&rate=0.0018245238614400638]14.379442749203339,17:[&rate=0.0019517124143922171]14.379442749203339):[&rate=0.0020626841894087473]15.146944275032627,((24:[&rate=0.002433755037943107]15.687387792731979,(15:[&rate=0.0024778741874496744]9.373868336525765,(19:[&rate=0.0018912522631647274]4.9942774711479,(20:[&rate=0.003020048968622363]2.6317024664245317,21:[&rate=0.002524917286830 [...]
+tree STATE_1300000 [&lnP=-22221.246350668822] = [&R] ((33:[&rate=0.0024677507398888103]107.26698494382512,((((((8:[&rate=0.003474523401837703]20.417382854116294,10:[&rate=0.0028507035236395237]20.417382854116294):[&rate=0.002822681006028718]16.147681966246925,(((13:[&rate=0.0024477189744913906]21.806764367815923,7:[&rate=0.0030241544627006216]21.806764367815923):[&rate=0.0031505288500267925]5.151798117009097,(11:[&rate=0.0027679229372363397]7.11335418617473,12:[&rate=0.002740839852624292 [...]
+tree STATE_1310000 [&lnP=-22214.36789545236] = [&R] ((33:[&rate=0.002572101560794949]88.22644482459253,(14:[&rate=0.0030712461791319885]73.36372465090327,(((((((24:[&rate=0.0029035648468784658]11.272270664092142,(((20:[&rate=0.002945906591250871]2.44361395026271,21:[&rate=0.002704936873605999]2.44361395026271):[&rate=0.0030712461791319885]1.2958935185687794,19:[&rate=0.0026296266823629872]3.7395074688314893):[&rate=0.003713747037949297]6.238769146343762,15:[&rate=0.003271271775884195]9.9 [...]
+tree STATE_1320000 [&lnP=-22216.561037810712] = [&R] (((14:[&rate=0.0021894910069029452]102.4958215758272,(((((((24:[&rate=0.002211296822580198]17.710949693516298,(((20:[&rate=0.00238807408126164]3.605108144842532,21:[&rate=0.0018677625895260374]3.605108144842532):[&rate=0.0022343568746432583]0.7554582588974776,19:[&rate=0.0022590275755134956]4.36056640374001):[&rate=0.0023001812900013915]9.4864957182633,15:[&rate=0.0021789713499959563]13.847062122003308):[&rate=0.002054517959417031]3.86 [...]
+tree STATE_1330000 [&lnP=-22209.84545558918] = [&R] (((14:[&rate=0.0034870678340844044]66.3148264627781,(((((((3:[&rate=0.003804167576163831]10.984248920051023,5:[&rate=0.0032175726750123907]10.984248920051023):[&rate=0.003466785712128978]16.005512199767995,((13:[&rate=0.003035990060634358]18.975912830075472,(11:[&rate=0.0038362903972160548]5.784781107695861,12:[&rate=0.0031984307292737754]5.784781107695861):[&rate=0.0036412121531868833]13.191131722379613):[&rate=0.003159643089467964]2.9 [...]
+tree STATE_1340000 [&lnP=-22212.815236765906] = [&R] (((((((((24:[&rate=0.0022615832573597283]16.344638517056026,(15:[&rate=0.0028517134687706938]12.22854867120325,((19:[&rate=0.002592246745134654]4.948736987164158,20:[&rate=0.0019696025078837073]4.948736987164158):[&rate=0.0022615832573597283]1.7626391660601302,21:[&rate=0.0016955402243701689]6.711376153224288):[&rate=0.003208363547693471]5.5171725179789615):[&rate=0.002450057332983472]4.116089845852777):[&rate=0.0022091640801305086]9.4 [...]
+tree STATE_1350000 [&lnP=-22210.7163854572] = [&R] (((((((16:[&rate=0.003351389859333611]30.37187791864371,(((3:[&rate=0.0036652185861990446]10.293258555711683,5:[&rate=0.003265456195384354]10.293258555711683):[&rate=0.00307882241475903]14.764211365352283,((13:[&rate=0.0032792330539063753]16.328745414843233,(11:[&rate=0.003952045653349526]4.786184793083152,12:[&rate=0.003132513112305946]4.786184793083152):[&rate=0.0034688003514373586]11.542560621760082):[&rate=0.0030511262715157807]6.117 [...]
+tree STATE_1360000 [&lnP=-22213.718890448745] = [&R] ((((((((((24:[&rate=0.003128905283813689]14.46510803125743,(15:[&rate=0.0029421595565251816]11.882551083036867,(19:[&rate=0.003144713411331306]4.4995212633906565,(21:[&rate=0.0033846226025162824]2.856249087386351,20:[&rate=0.0025713799950348466]2.856249087386351):[&rate=0.0031608933513427224]1.6432721760043054):[&rate=0.002814627627125256]7.383029819646211):[&rate=0.0026423799080548017]2.582556948220562):[&rate=0.0033341569455463066]7. [...]
+tree STATE_1370000 [&lnP=-22210.966558708507] = [&R] ((33:[&rate=0.0022200719293338682]111.35124388357323,(((((((23:[&rate=0.001963321583937342]13.88340773598166,17:[&rate=0.002241719639666918]13.88340773598166):[&rate=0.0024067061474182605]12.663062891739944,((24:[&rate=0.003353224679281429]15.310403317170822,(15:[&rate=0.002570964407507297]10.849035394826094,(21:[&rate=0.0029801740941898434]4.360367177653969,(20:[&rate=0.0029160196658857603]3.090040809863682,19:[&rate=0.002857466311508 [...]
+tree STATE_1380000 [&lnP=-22212.44789763194] = [&R] ((9:[&rate=0.003760689354920353]117.09383447279535,33:[&rate=0.0018109732046892896]117.09383447279535):[&rate=0.0018109732046892896]0.5824627851999225,(14:[&rate=0.0029030685553513486]90.97583953404282,(((((22:[&rate=0.0025519355738324668]22.605265449308007,((24:[&rate=0.0030270686277282773]16.67935457362274,(15:[&rate=0.0022498689294771913]12.93299066811399,(19:[&rate=0.002024229936917385]3.1948784520054505,(21:[&rate=0.003174498126265 [...]
+tree STATE_1390000 [&lnP=-22207.067500917165] = [&R] (((14:[&rate=0.004168908679039305]76.95775999304286,((((((23:[&rate=0.0027431723466189656]12.793851583199423,17:[&rate=0.00291955931329245]12.793851583199423):[&rate=0.0026539470971041713]11.378416032815089,((24:[&rate=0.0025488165074275424]14.003581532101935,(15:[&rate=0.002763995170913595]12.334248702632088,((21:[&rate=0.003721909360413335]3.200737764803349,20:[&rate=0.0035173999937591757]3.200737764803349):[&rate=0.00303183991813576 [...]
+tree STATE_1400000 [&lnP=-22214.638494003637] = [&R] (((((((16:[&rate=0.0024027206272236373]35.16413620607719,((8:[&rate=0.003445353010580956]21.028679820339796,10:[&rate=0.002934104302720685]21.028679820339796):[&rate=0.003361082500655224]13.967999813765175,((((11:[&rate=0.0030838563675911483]5.005055932485996,12:[&rate=0.003593694788449703]5.005055932485996):[&rate=0.0029920768379079012]17.225770195804753,13:[&rate=0.002482261504837261]22.230826128290747):[&rate=0.0032277758436706195]4 [...]
+tree STATE_1410000 [&lnP=-22201.818204933297] = [&R] (((((((((8:[&rate=0.004104279101535381]16.393787827101683,10:[&rate=0.003276623650148705]16.393787827101683):[&rate=0.003425386217285503]15.028391975419222,(((7:[&rate=0.003933162104357965]15.35565697285669,13:[&rate=0.0031131277845120207]15.35565697285669):[&rate=0.0025594311952388377]6.012047806937904,(11:[&rate=0.004494024254682457]4.225321671070383,12:[&rate=0.0031362801636495795]4.225321671070383):[&rate=0.00318264416829404]17.142 [...]
+tree STATE_1420000 [&lnP=-22224.81945863889] = [&R] (9:[&rate=0.0025967719631623445]126.40397652169078,(33:[&rate=0.00225610296376633]102.46925579528849,(((((((22:[&rate=0.002696399232749091]21.745275954410438,(((19:[&rate=0.0022224046089340827]3.975919821534841,(21:[&rate=0.002383211462520412]2.1012798984925833,20:[&rate=0.00248300878053394]2.1012798984925833):[&rate=0.0021640134132377893]1.8746399230422575):[&rate=0.0026101286684960985]8.1147868665076,15:[&rate=0.0018536456691721109]12 [...]
+tree STATE_1430000 [&lnP=-22212.704989579663] = [&R] (9:[&rate=0.0025510740587710405]141.99138820666886,((((((16:[&rate=0.0026826260102671634]35.6190118796021,((8:[&rate=0.002737305494063892]19.194144145639893,10:[&rate=0.0028861995047116773]19.194144145639893):[&rate=0.003768356802725197]13.373650835842863,(((7:[&rate=0.002400909877511586]21.739872389829443,13:[&rate=0.0020825658798581128]21.739872389829443):[&rate=0.002490338362211426]5.447403572293474,(11:[&rate=0.0029254362219894884] [...]
+tree STATE_1440000 [&lnP=-22226.971482629953] = [&R] (9:[&rate=0.0025432704123632315]148.44269968071544,((14:[&rate=0.0027821378346483613]85.2412673057709,((((((((11:[&rate=0.0026260040211991322]7.778069975365698,12:[&rate=0.0022900134393383617]7.778069975365698):[&rate=0.0024582952054154937]16.580649808445635,13:[&rate=0.0021933598919845693]24.358719783811335):[&rate=0.0022900134393383617]6.153579389133569,7:[&rate=0.0023268386407376334]30.512299172944903):[&rate=0.0027821378346483613]7 [...]
+tree STATE_1450000 [&lnP=-22224.732998390726] = [&R] (9:[&rate=0.0022866005957760563]162.7712502766177,(33:[&rate=0.002339664288196603]118.86343094941373,(14:[&rate=0.0025923411836459276]104.05233532982086,((((((8:[&rate=0.0024133782669345026]27.65953723470724,10:[&rate=0.002230314962832215]27.65953723470724):[&rate=0.0024245039643991844]13.771606861765061,(((7:[&rate=0.002115775638577745]27.528382868464558,13:[&rate=0.0020063817436762727]27.528382868464558):[&rate=0.002339664288196603]3 [...]
+tree STATE_1460000 [&lnP=-22206.903521059092] = [&R] (9:[&rate=0.004344445141859469]88.26566573999173,((14:[&rate=0.003967427059984181]62.4544069672158,(((((((((11:[&rate=0.0041808013905020695]4.651402592158112,12:[&rate=0.0033779324328622714]4.651402592158112):[&rate=0.005288415855560982]10.030102826685866,13:[&rate=0.003533521126535763]14.681505418843978):[&rate=0.0034010058557260754]3.247217001434864,7:[&rate=0.004450167921336432]17.92872242027884):[&rate=0.004113122215513828]3.040822 [...]
+tree STATE_1470000 [&lnP=-22226.149242217336] = [&R] (9:[&rate=0.0026890337813284017]144.4724043625765,(33:[&rate=0.0028116126002164926]107.9023841137247,(14:[&rate=0.0027266567094098512]97.17132415336104,((((16:[&rate=0.0022384980835585024]40.530688218907784,((((7:[&rate=0.002619149459984718]26.87465677841422,13:[&rate=0.0026697917176099597]26.87465677841422):[&rate=0.0028953045904406708]2.1547317497992324,(11:[&rate=0.003019915066481107]4.65775814742229,12:[&rate=0.002944335320575469]4 [...]
+tree STATE_1480000 [&lnP=-22216.256991910792] = [&R] (9:[&rate=0.0018920188088737417]176.4173041328154,((14:[&rate=0.0023570893091035437]100.67870114159348,33:[&rate=0.002693450628904741]100.67870114159348):[&rate=0.0023767708063952477]13.7684812725699,(((((((23:[&rate=0.0019252136669269824]15.247752371573005,17:[&rate=0.002196446807932422]15.247752371573005):[&rate=0.0023767708063952477]11.526844302450735,((15:[&rate=0.002131944184796468]14.696157021233313,(19:[&rate=0.00344173098009120 [...]
+tree STATE_1490000 [&lnP=-22215.810721081925] = [&R] (9:[&rate=0.003555696167356009]110.79437514535313,(33:[&rate=0.002366410516406826]89.47028845690674,((((((8:[&rate=0.0034404934352122306]20.59987282807,10:[&rate=0.0029687869370659285]20.59987282807):[&rate=0.002908505727576321]13.49413426909489,(16:[&rate=0.0028934239716513436]33.010318926288996,((3:[&rate=0.003199829393109433]10.720895440216383,5:[&rate=0.0034404934352122306]10.720895440216383):[&rate=0.003199829393109433]17.09473839 [...]
+tree STATE_1500000 [&lnP=-22218.432691949183] = [&R] (9:[&rate=0.002381707295318723]166.89564550006153,((14:[&rate=0.0025770779119511167]105.33138263219024,(((16:[&rate=0.002040108951945609]40.02071222007906,((((22:[&rate=0.002445596208844468]26.651268533356,(((15:[&rate=0.0027693120670091737]9.897778586015331,((19:[&rate=0.002706025682693658]4.396524902847496,21:[&rate=0.0027474716560738147]4.396524902847496):[&rate=0.002648264801034154]0.4150740228872305,20:[&rate=0.0020660233964442903 [...]
+tree STATE_1510000 [&lnP=-22209.348431582126] = [&R] ((33:[&rate=0.0024615784324679294]103.13123692585914,(((((16:[&rate=0.0024615784324679294]38.08926369985782,(((8:[&rate=0.0032364820270225263]20.72258307708947,10:[&rate=0.0027709072404240487]20.72258307708947):[&rate=0.002869456706629654]12.599929538749539,(3:[&rate=0.0029258148628790353]13.600665147204168,5:[&rate=0.002598774254819416]13.600665147204168):[&rate=0.002713781407898465]19.72184746863484):[&rate=0.003192970769260186]1.545 [...]
+tree STATE_1520000 [&lnP=-22222.742786383737] = [&R] ((33:[&rate=0.0026442468478500005]107.40945687829469,((((((((11:[&rate=0.0022514083645782754]8.575895140036172,12:[&rate=0.0027370479502898857]8.575895140036172):[&rate=0.0026597436695699585]17.131499197435225,(13:[&rate=0.002309971343176942]22.305071707396262,7:[&rate=0.0031088153958099512]22.305071707396262):[&rate=0.0027683374149691698]3.402322630075137):[&rate=0.002690627966129228]5.377660030825634,(3:[&rate=0.002581270937560241]16 [...]
+tree STATE_1530000 [&lnP=-22223.91543155582] = [&R] ((33:[&rate=0.002361632521180539]111.35586527182896,(((((16:[&rate=0.002472369714471557]44.422294221130215,((8:[&rate=0.002480788660091357]24.733053653333304,10:[&rate=0.0024162133320690332]24.733053653333304):[&rate=0.0023773131936052167]17.39479810061898,((((24:[&rate=0.002644324466725676]14.934382984464195,(((21:[&rate=0.002629091167281961]3.058113851039378,20:[&rate=0.0023294784382901844]3.058113851039378):[&rate=0.00221340894356394 [...]
+tree STATE_1540000 [&lnP=-22212.70681904742] = [&R] (((33:[&rate=0.003653161243813256]71.44216353750923,(((16:[&rate=0.0026819188871391292]33.42262279165505,((((7:[&rate=0.003121718291058446]25.442407997434433,(13:[&rate=0.0025012987360312645]21.392968047428297,(11:[&rate=0.0034090627451926854]5.9826648894576895,12:[&rate=0.002648975537790543]5.9826648894576895):[&rate=0.0027140720329186787]15.410303157970608):[&rate=0.002614922356887494]4.0494399500061355):[&rate=0.0026819188871391292]3 [...]
+tree STATE_1550000 [&lnP=-22210.223527455768] = [&R] ((14:[&rate=0.002728162191155748]87.62467130952508,((((((22:[&rate=0.003949010712623096]19.34869092278444,(((((20:[&rate=0.003176546373981989]1.390176864869071,21:[&rate=0.004012589091537619]1.390176864869071):[&rate=0.0030834343788330087]0.8602461505657304,19:[&rate=0.0035946939903576895]2.2504230154348015):[&rate=0.0043777881882297014]3.8387134586369784,15:[&rate=0.003671079620550441]6.08913647407178):[&rate=0.0036969670254949727]3.1 [...]
+tree STATE_1560000 [&lnP=-22222.043331092136] = [&R] ((33:[&rate=0.0026487103664537253]100.96181533793188,(((((((23:[&rate=0.002778496245600574]10.236625971780269,17:[&rate=0.0030369926486669063]10.236625971780269):[&rate=0.0027696598080513973]11.767053239229122,(((((20:[&rate=0.003195944429511395]3.054717986200166,21:[&rate=0.0026487103664537253]3.054717986200166):[&rate=0.0026331497246168682]1.2070293611260094,19:[&rate=0.002844690552937069]4.2617473473261756):[&rate=0.0030602743469444 [...]
+tree STATE_1570000 [&lnP=-22202.055094831154] = [&R] ((((((((23:[&rate=0.004033346241652671]6.431419942770713,17:[&rate=0.003599676100601145]6.431419942770713):[&rate=0.0029529801334280854]13.602700270279389,(((((20:[&rate=0.0035184223332757982]2.5504468943434944,21:[&rate=0.0035380352392714288]2.5504468943434944):[&rate=0.0031233727328445924]1.1321340873580055,19:[&rate=0.0034252420663494617]3.6825809817015):[&rate=0.0036213628361163267]3.209975372443241,15:[&rate=0.003336970618675101]6 [...]
+tree STATE_1580000 [&lnP=-22226.633588108667] = [&R] ((33:[&rate=0.001868184781885301]114.49665366952836,9:[&rate=0.0029825957687771507]114.49665366952836):[&rate=0.0022430907711856497]10.95476152169988,(14:[&rate=0.0020198249753790825]105.71606463022411,(((((22:[&rate=0.0020775780300171607]27.44351133178328,((23:[&rate=0.0025698343536243932]13.115896781658165,17:[&rate=0.0019151374582233808]13.115896781658165):[&rate=0.002317432421839148]12.190335748272467,((((20:[&rate=0.00346032759702 [...]
+tree STATE_1590000 [&lnP=-22229.387974684956] = [&R] ((33:[&rate=0.0019386470566763374]154.119004095964,(((((((8:[&rate=0.0018983506300703408]28.43758576296723,10:[&rate=0.001813597492260805]28.43758576296723):[&rate=0.0020185263402056545]22.357107295558258,((3:[&rate=0.0022616019147305307]18.587621349298033,5:[&rate=0.0016316679581024746]18.587621349298033):[&rate=0.0020870975670488523]29.560512127409197,((7:[&rate=0.002838103769747544]31.40517499470945,13:[&rate=0.0016059679119415236]3 [...]
+tree STATE_1600000 [&lnP=-22221.073900205243] = [&R] (((((((16:[&rate=0.0018410114133508475]47.5091655032005,((((11:[&rate=0.0022699075728428884]7.851784482853209,12:[&rate=0.0018294371135209125]7.851784482853209):[&rate=0.0020826210867164923]25.97396358296987,(13:[&rate=0.0019259882251877513]26.894518494280362,7:[&rate=0.0022108883729558726]26.894518494280362):[&rate=0.0017067169203245317]6.9312295715427155):[&rate=0.001754920234393212]6.924192873411805,(3:[&rate=0.002027313169802713]18 [...]
+tree STATE_1610000 [&lnP=-22231.17824664142] = [&R] ((((((((((24:[&rate=0.0017887428328081077]16.31619367458218,(((20:[&rate=0.0020402518159824664]3.4565924849388225,21:[&rate=0.001878257943388141]3.4565924849388225):[&rate=0.0021047339699314617]2.36846023308232,19:[&rate=0.0017887428328081077]5.825052718021142):[&rate=0.0016653510813416906]6.8368875747815725,15:[&rate=0.002218443517073199]12.661940292802715):[&rate=0.0017637627445029388]3.6542533817794656):[&rate=0.0019521324796731468]1 [...]
+tree STATE_1620000 [&lnP=-22200.644569796263] = [&R] ((9:[&rate=0.004605571899557503]118.18135635667596,((((16:[&rate=0.003544698837218353]32.386775652448804,((((22:[&rate=0.0028298219197360483]22.238457245210313,(24:[&rate=0.003408377319331631]14.383580713575947,(15:[&rate=0.0033417105935177995]9.0254738571183,((20:[&rate=0.004605571899557503]1.8989966945288876,21:[&rate=0.005762003256168058]1.8989966945288876):[&rate=0.001848189058540193]1.2973512888614969,19:[&rate=0.00209642667788291 [...]
+tree STATE_1630000 [&lnP=-22224.12668747969] = [&R] ((33:[&rate=0.0018109424813673666]146.76642308336113,(((((16:[&rate=0.0017735478751304229]45.51385451540636,(((3:[&rate=0.0024162502556796233]15.233184685514889,5:[&rate=0.0027518726506865205]15.233184685514889):[&rate=0.0020348826386016577]20.374374857017717,((7:[&rate=0.0034387479346856924]18.59805879356019,13:[&rate=0.0026441946463768413]18.59805879356019):[&rate=0.0024374780707373616]4.569846570145991,(11:[&rate=0.002669837057099209 [...]
+tree STATE_1640000 [&lnP=-22224.448931730123] = [&R] ((33:[&rate=0.0018827995357694118]137.04978082560737,(14:[&rate=0.002177107987007359]111.58572790229252,((((((((23:[&rate=0.002264922918111713]10.710402523539218,17:[&rate=0.0025035982928723957]10.710402523539218):[&rate=0.0019243154403140875]15.273158793684315,22:[&rate=0.00252707074851153]25.983561317223533):[&rate=0.0023966033047580554]1.4248259345424295,(24:[&rate=0.0026313471254386213]12.68441101969775,(15:[&rate=0.002037367343507 [...]
+tree STATE_1650000 [&lnP=-22213.729208888984] = [&R] ((33:[&rate=0.002579154681493089]101.39868147994015,(14:[&rate=0.0025318766938440844]91.70195725502872,(((((22:[&rate=0.0025076940370193173]21.531091465512493,((23:[&rate=0.00280666848607152]8.181628819046924,17:[&rate=0.0025318766938440844]8.181628819046924):[&rate=0.002457886879565753]12.340907814784934,((((21:[&rate=0.003355411894531369]2.39778964009253,20:[&rate=0.0017217079365956654]2.39778964009253):[&rate=0.0031583077402046723]1 [...]
+tree STATE_1660000 [&lnP=-22217.64491823031] = [&R] ((33:[&rate=0.002600536281498951]97.66588669199061,(((((16:[&rate=0.002074097444765522]39.759089811141095,((8:[&rate=0.0025159237575895404]21.530285997109317,10:[&rate=0.0030143249838650962]21.530285997109317):[&rate=0.00264467872179618]16.531787728560573,((3:[&rate=0.0025578312154819942]13.319269476013341,5:[&rate=0.0025020295990809353]13.319269476013341):[&rate=0.0023889760812060663]23.78471624551708,((7:[&rate=0.0024602681211028776]2 [...]
+tree STATE_1670000 [&lnP=-22219.111359742456] = [&R] (((((((((23:[&rate=0.002459638212943807]10.247020260400472,17:[&rate=0.0026883334287178297]10.247020260400472):[&rate=0.0023503508340299954]14.354834140756601,(22:[&rate=0.0027215276209222776]24.32465959968597,(24:[&rate=0.0029509475305829385]12.123241405275216,(15:[&rate=0.0023249502054456072]10.291434090162012,((20:[&rate=0.003542395781632783]2.2007368190692524,21:[&rate=0.002439512051646965]2.2007368190692524):[&rate=0.0021985603036 [...]
+tree STATE_1680000 [&lnP=-22213.855718740575] = [&R] ((((((((((24:[&rate=0.003147553316295737]17.058511461789376,(15:[&rate=0.0026619290959087956]13.432206983204594,(19:[&rate=0.0034855301638960813]3.329157585663938,(20:[&rate=0.0032646174706148176]2.7326265492820117,21:[&rate=0.0031741564236624307]2.7326265492820117):[&rate=0.002777167438240464]0.5965310363819265):[&rate=0.0027113662699038395]10.103049397540655):[&rate=0.0030750892404695002]3.6263044785847818):[&rate=0.00229504552933361 [...]
+tree STATE_1690000 [&lnP=-22211.755952727144] = [&R] ((33:[&rate=0.0024299052521560855]113.02952290325274,(14:[&rate=0.0025993282457344117]85.76064540266549,((((((15:[&rate=0.002413690491315919]11.154411653856133,(24:[&rate=0.002937477995631905]10.01491491422494,((21:[&rate=0.0033381922746722487]3.048270228684403,20:[&rate=0.00229872827304274]3.048270228684403):[&rate=0.0034348453142283852]0.42364153833088825,19:[&rate=0.0030232644558666998]3.4719117670152913):[&rate=0.002429905252156085 [...]
+tree STATE_1700000 [&lnP=-22200.354848928393] = [&R] (((((((((23:[&rate=0.003365249642814903]8.689822827105585,17:[&rate=0.003554003445164287]8.689822827105585):[&rate=0.0044482426523918955]7.4449578001786225,((24:[&rate=0.003680723368576502]11.463425923611451,(15:[&rate=0.0034012752335036396]6.862830842959434,((20:[&rate=0.0026060247129073575]2.588917724028089,21:[&rate=0.002453821316969177]2.588917724028089):[&rate=0.005061945384749165]0.9051286802093501,19:[&rate=0.002152566297037607] [...]
+tree STATE_1710000 [&lnP=-22218.00908822339] = [&R] (((((((((23:[&rate=0.00223561035008405]13.819595522355835,17:[&rate=0.0023243386592132174]13.819595522355835):[&rate=0.0023651301986674853]13.920514690531126,(22:[&rate=0.002319606903388302]25.224994295167356,(24:[&rate=0.002298284486531456]14.094228960081388,(15:[&rate=0.002353738195469663]9.029875938079307,((20:[&rate=0.0026256619081730243]2.7839907876019487,21:[&rate=0.0023688380728031106]2.7839907876019487):[&rate=0.0024675127487044 [...]
+tree STATE_1720000 [&lnP=-22220.999914905547] = [&R] (((((((16:[&rate=0.0024204854063074837]41.53538084934339,((8:[&rate=0.0025082417094569112]22.819692940117598,10:[&rate=0.0021677845038683653]22.819692940117598):[&rate=0.0027150748670406464]16.67284119147142,((3:[&rate=0.0024035316853673757]15.921517328532321,5:[&rate=0.002624922440464107]15.921517328532321):[&rate=0.0019864981064830964]20.37322805790826,(7:[&rate=0.002887761169193471]27.89309201862826,((11:[&rate=0.002924255853726518] [...]
+tree STATE_1730000 [&lnP=-22214.662399983175] = [&R] ((33:[&rate=0.002479110809568101]92.03361443747313,((((((((3:[&rate=0.00344280846018988]12.074969472513128,5:[&rate=0.0030387731977708317]12.074969472513128):[&rate=0.0029032481885051436]15.618568279968468,((11:[&rate=0.002813606327186488]6.593493751631915,12:[&rate=0.0026733563051393743]6.593493751631915):[&rate=0.0026733563051393743]17.180090743521323,(7:[&rate=0.0033052769162468694]18.92742738093144,13:[&rate=0.0025212817968618185]1 [...]
+tree STATE_1740000 [&lnP=-22213.395816307366] = [&R] ((33:[&rate=0.001986898247144993]143.13222489400684,(((((((23:[&rate=0.0024543092610580294]11.556926677165057,17:[&rate=0.0025951805820177932]11.556926677165057):[&rate=0.0018288385868608988]17.10590720212646,(22:[&rate=0.002101344055429096]25.841980634452806,(24:[&rate=0.0029161675976663434]14.590499294195936,(15:[&rate=0.002269565419820285]11.96149860203577,((19:[&rate=0.0026936856837765253]2.629886976231206,20:[&rate=0.0025951805820 [...]
+tree STATE_1750000 [&lnP=-22225.507089507908] = [&R] ((33:[&rate=0.00241674715184514]120.67945819037844,(((((((23:[&rate=0.002340280339258855]12.218287465823876,17:[&rate=0.0025624698971455505]12.218287465823876):[&rate=0.002349730193118853]14.991726874293706,(22:[&rate=0.0025121553512941906]25.40605643171192,(24:[&rate=0.00268000809931953]14.837969712142016,(15:[&rate=0.0023686490163914377]9.331135580296845,((19:[&rate=0.002457117239405688]3.416009628250596,20:[&rate=0.00250059082163465 [...]
+tree STATE_1760000 [&lnP=-22224.659311303767] = [&R] ((14:[&rate=0.0019802977657432754]123.6145273524971,(33:[&rate=0.0028495122093017628]110.04632294171805,((((((23:[&rate=0.0020807428213230374]12.54076551348276,17:[&rate=0.0024813381051727954]12.54076551348276):[&rate=0.0016926875807297024]16.0125887101901,(22:[&rate=0.0024442977817425797]25.072026233052373,(15:[&rate=0.0018535727767035883]14.967377176621312,(24:[&rate=0.0030256754175897555]12.871362155571589,((19:[&rate=0.003086544612 [...]
+tree STATE_1770000 [&lnP=-22224.044507310507] = [&R] ((14:[&rate=0.0019388952090558837]118.23972680927778,(33:[&rate=0.002257302824679745]111.88810357173075,(((((((23:[&rate=0.002347848634019328]12.525231180061601,17:[&rate=0.0019554962480894597]12.525231180061601):[&rate=0.0018024948494164122]13.732906449280057,22:[&rate=0.0024927353617169874]26.258137629341658):[&rate=0.0022954035219995765]0.33413024895299515,(24:[&rate=0.002185845907509524]14.159307697707087,(15:[&rate=0.0024558648812 [...]
+tree STATE_1780000 [&lnP=-22223.960448454556] = [&R] (((((((((3:[&rate=0.0034813764191206696]12.888116431225932,5:[&rate=0.002817804010000746]12.888116431225932):[&rate=0.003066869241916727]18.142791139746514,(((11:[&rate=0.0026834434597122845]7.270730331387094,12:[&rate=0.003110748783882294]7.270730331387094):[&rate=0.004320437900635444]8.971129025798698,13:[&rate=0.003389944395439594]16.241859357185792):[&rate=0.0023836898767731437]6.89977778494254,7:[&rate=0.0031964561216254813]23.141 [...]
+tree STATE_1790000 [&lnP=-22219.382613792615] = [&R] ((33:[&rate=0.002238007616213122]105.10513075853629,((((((((3:[&rate=0.0032383728854695733]12.516605729809275,5:[&rate=0.002238007616213122]12.516605729809275):[&rate=0.0027368221053146484]21.335761911769023,((11:[&rate=0.0029662421497426324]5.970033813701885,12:[&rate=0.0029483023634149764]5.970033813701885):[&rate=0.002391747046243677]23.232223706572135,(13:[&rate=0.0025342499473240206]24.840183507878972,7:[&rate=0.003006774657938844 [...]
+tree STATE_1800000 [&lnP=-22226.097397679434] = [&R] ((33:[&rate=0.002329144612483289]113.35742460450595,(14:[&rate=0.0022659775498179275]96.15122869447856,((((16:[&rate=0.00236635917126326]44.047539313111336,((((24:[&rate=0.002169526119731836]16.09139283262973,(15:[&rate=0.0021047339566300466]13.521839171621407,(21:[&rate=0.0026142696144687925]3.4278485672256753,(19:[&rate=0.0025082806839384704]2.9757415005179033,20:[&rate=0.002967184806043197]2.9757415005179033):[&rate=0.00219864280149 [...]
+tree STATE_1810000 [&lnP=-22238.4853966599] = [&R] ((14:[&rate=0.0020613908835537386]141.29647023096402,(33:[&rate=0.0021316989414330418]119.69742319948301,(((((((3:[&rate=0.0019740055836444544]20.092687464512704,5:[&rate=0.0024115790728141146]20.092687464512704):[&rate=0.0023245696961070217]22.851069262167787,((11:[&rate=0.0026009995147441237]8.708214582066406,12:[&rate=0.0021446734580269824]8.708214582066406):[&rate=0.002118433603624909]23.3315713772162,(13:[&rate=0.002398596334223775] [...]
+tree STATE_1820000 [&lnP=-22222.921474736726] = [&R] ((33:[&rate=0.0020155822101654984]130.3179123492375,(14:[&rate=0.002213527035557759]109.71991463903096,((((((22:[&rate=0.002248107234019071]25.999469934601,(24:[&rate=0.002625120846615394]13.423217197087936,(15:[&rate=0.002625120846615394]12.697127898744721,((21:[&rate=0.002546404977153568]2.0935623518435564,20:[&rate=0.002241180423747008]2.0935623518435564):[&rate=0.002248107234019071]0.9584575943945794,19:[&rate=0.0022906248583025044 [...]
+tree STATE_1830000 [&lnP=-22208.32766083455] = [&R] ((33:[&rate=0.002216125245530691]105.89197608132473,(((((((23:[&rate=0.0027798805913694146]8.874048345626527,17:[&rate=0.003125924409198669]8.874048345626527):[&rate=0.002428477330185349]14.546385576464317,((24:[&rate=0.0027367266654588626]13.789558504125463,(15:[&rate=0.0029238047912841223]10.832252372339125,((19:[&rate=0.0027512034131167125]1.6936092697119163,20:[&rate=0.003167778679257785]1.6936092697119163):[&rate=0.0027367266654588 [...]
+tree STATE_1840000 [&lnP=-22208.443217072687] = [&R] (9:[&rate=0.003987575653827928]94.87515065041502,((((((((22:[&rate=0.0032397558845172186]17.607971681359373,(24:[&rate=0.0025576910625368736]11.59096440647342,(15:[&rate=0.003045793586356185]9.373082321621713,((20:[&rate=0.0027568107996094092]2.1407196938138746,21:[&rate=0.0038598937727044313]2.1407196938138746):[&rate=0.002646459900055578]2.4156707465858442,19:[&rate=0.0029146070636173493]4.556390440399719):[&rate=0.002346840476333604 [...]
+tree STATE_1850000 [&lnP=-22217.172472295235] = [&R] (9:[&rate=0.0031040541399197986]136.4256164774751,((((((((23:[&rate=0.0024410128368257717]9.547508220386899,17:[&rate=0.002737078554198156]9.547508220386899):[&rate=0.0025050138867695956]14.710033218988624,(22:[&rate=0.0026040475942602694]23.231663905550516,((15:[&rate=0.002589136154158368]10.640559979837116,((21:[&rate=0.003048832505313707]1.7158854908418655,20:[&rate=0.0031693656441174473]1.7158854908418655):[&rate=0.0024853103716129 [...]
+tree STATE_1860000 [&lnP=-22221.07432381948] = [&R] (9:[&rate=0.0032536593088716274]103.20788801106004,(33:[&rate=0.003128170852247034]71.3676865548791,(14:[&rate=0.003362475035513959]61.138756707049836,(((((((15:[&rate=0.00514044100931068]7.080540981897644,(19:[&rate=0.0038207666490400177]2.8629762110941432,(20:[&rate=0.0036287087531501217]1.4285334787184352,21:[&rate=0.003510658571268447]1.4285334787184352):[&rate=0.0024972607256078445]1.434442732375708):[&rate=0.003362475035513959]4.2 [...]
+tree STATE_1870000 [&lnP=-22218.806578478412] = [&R] (9:[&rate=0.0023407759884927857]167.09606138298943,(33:[&rate=0.0019713023918273404]126.29054476102876,(14:[&rate=0.0022303690464190716]101.41858073624164,((((((((15:[&rate=0.001998449926560728]10.653621139804944,(19:[&rate=0.0025226735177172497]5.326854411252739,(20:[&rate=0.002408109870284016]2.6518897761303615,21:[&rate=0.001873415740872988]2.6518897761303615):[&rate=0.0017792891978914618]2.674964635122378):[&rate=0.0019021565573905 [...]
+tree STATE_1880000 [&lnP=-22202.482966599113] = [&R] (9:[&rate=0.0032291671304978645]118.62568167772079,((((((((((15:[&rate=0.0027697587033937983]8.946961928011829,((19:[&rate=0.002990113774202299]2.4009979215860513,20:[&rate=0.003581982331741354]2.4009979215860513):[&rate=0.0031540477058217015]0.5933267601941181,21:[&rate=0.0030116479972075003]2.9943246817801694):[&rate=0.0030758871669791434]5.95263724623166):[&rate=0.0032429614718405076]2.679470480260319,24:[&rate=0.003321183135818162] [...]
+tree STATE_1890000 [&lnP=-22221.64595344174] = [&R] (9:[&rate=0.0027185378124609935]145.1606411742961,((((((((22:[&rate=0.0024722357360829695]22.307214235938083,(24:[&rate=0.0027493483889236585]12.357048866442769,((19:[&rate=0.0027538478666554334]2.6785949456823195,(21:[&rate=0.002840981777411972]2.3494441897018077,20:[&rate=0.0028031063212528413]2.3494441897018077):[&rate=0.002797642188879093]0.3291507559805118):[&rate=0.0028146199807710364]7.717680533962258,15:[&rate=0.0027404586144713 [...]
+tree STATE_1900000 [&lnP=-22214.593162434274] = [&R] (9:[&rate=0.0023980911386413126]176.80624732617298,(((((((22:[&rate=0.002078066221055589]27.92650242507503,((23:[&rate=0.002195813565254559]14.266425059536544,17:[&rate=0.002272794231447115]14.266425059536544):[&rate=0.0024110489162562435]12.727611784789477,(24:[&rate=0.0025544890432687463]12.498907940214036,((19:[&rate=0.0023980911386413126]4.0423759663537195,(20:[&rate=0.0024787992357633397]3.5447448718326506,21:[&rate=0.002478799235 [...]
+tree STATE_1910000 [&lnP=-22218.78103124423] = [&R] (9:[&rate=0.0025622484908251627]175.26132966688658,(33:[&rate=0.002151115622356789]124.27617641548389,((((16:[&rate=0.002283688304180099]41.25963099968169,(((((7:[&rate=0.003050734513034499]22.77763700028637,13:[&rate=0.002027681580712797]22.77763700028637):[&rate=0.0021293323918246255]6.706952670744499,(11:[&rate=0.0025963118084334296]7.316546678886313,12:[&rate=0.0034827973027436875]7.316546678886313):[&rate=0.0029777144569312648]22.1 [...]
+tree STATE_1920000 [&lnP=-22214.742260282506] = [&R] (9:[&rate=0.0023848061741526304]137.33005843372592,((33:[&rate=0.0024643841585460595]90.51681863053463,((((((18:[&rate=0.002425826777329586]25.777986343873003,(26:[&rate=0.0031254394126629843]15.95213344415273,25:[&rate=0.0027049593364245296]15.95213344415273):[&rate=0.002501137017142231]9.825852899720273):[&rate=0.0025710506170018837]7.0998880666986395,(2:[&rate=0.002482952805208892]28.772394446399762,((1:[&rate=0.002316431900441663]8 [...]
+tree STATE_1930000 [&lnP=-22212.481926899087] = [&R] (9:[&rate=0.0024627593809459112]144.53179170710874,(33:[&rate=0.002231639342574701]106.7155924305064,(((((((18:[&rate=0.0024475245394236017]31.419482795708895,(26:[&rate=0.0027649788240191727]15.917296846299925,25:[&rate=0.00231128342856045]15.917296846299925):[&rate=0.002180003682921153]15.50218594940897):[&rate=0.0032455122490965926]5.4286206504519114,(2:[&rate=0.0026255358382991844]29.895473415336884,((1:[&rate=0.0024022220211587296 [...]
+tree STATE_1940000 [&lnP=-22224.960120448235] = [&R] (9:[&rate=0.0033902792689401544]122.25222615035717,(33:[&rate=0.0021940303924656394]107.08452273563063,((((((8:[&rate=0.0037534450370933447]20.57869745134983,10:[&rate=0.002556060149409624]20.57869745134983):[&rate=0.0018950321357930446]18.340742093726742,(16:[&rate=0.0025405264078359503]36.926451415124944,((3:[&rate=0.003242022384543044]13.623016750261794,5:[&rate=0.0028750838284128926]13.623016750261794):[&rate=0.002832211797941296]1 [...]
+tree STATE_1950000 [&lnP=-22214.625309055602] = [&R] (9:[&rate=0.0027796821172630646]139.19547357666133,((14:[&rate=0.0021888881101202852]90.79400459019378,33:[&rate=0.0028736325009085005]90.79400459019378):[&rate=0.0028979426559936445]12.615288152728567,((((((8:[&rate=0.0033177609804109437]21.436246979600227,10:[&rate=0.002999945488493543]21.436246979600227):[&rate=0.0030546817490396767]16.19124650854704,((3:[&rate=0.0029478787781037634]14.668538096277002,5:[&rate=0.002531617557692457]1 [...]
+tree STATE_1960000 [&lnP=-22213.770323367335] = [&R] (33:[&rate=0.0020228855081583868]111.11565608699051,(9:[&rate=0.003402502479153751]109.80611638350825,(14:[&rate=0.002569703704327502]87.61788377067931,(((((((3:[&rate=0.0034962253271598773]12.008832751266993,5:[&rate=0.0025526846695759645]12.008832751266993):[&rate=0.002467768970354012]21.362058802445283,(7:[&rate=0.0026924508099641737]26.25981398176652,(13:[&rate=0.0020927542409462344]23.321487289543636,(11:[&rate=0.00250180696789349 [...]
+tree STATE_1970000 [&lnP=-22212.861149855235] = [&R] ((33:[&rate=0.002106489142458103]119.02518186511844,(14:[&rate=0.0022891669920447515]100.65982454909695,((((((3:[&rate=0.002522052074476792]16.418453073205818,5:[&rate=0.0019988787229114993]16.418453073205818):[&rate=0.002767166409972531]20.069206328909843,(7:[&rate=0.002667910025197184]27.75712418344554,(13:[&rate=0.0019988787229114993]26.17738384156564,(11:[&rate=0.003170669208052051]7.00432734931828,12:[&rate=0.002429519764967712]7. [...]
+tree STATE_1980000 [&lnP=-22204.837265295526] = [&R] ((33:[&rate=0.002052361896188378]107.9563690332261,(((((((8:[&rate=0.003386516450341626]18.218471702111206,10:[&rate=0.003283822482563602]18.218471702111206):[&rate=0.0027408436458231005]11.931538184311492,((3:[&rate=0.0027953245206918825]14.796221689494416,5:[&rate=0.002115381074874266]14.796221689494416):[&rate=0.003251438775429186]13.857359262695425,((11:[&rate=0.0034607842547714706]6.43726217734803,12:[&rate=0.0026856939497433767]6 [...]
+tree STATE_1990000 [&lnP=-22213.798652324633] = [&R] ((33:[&rate=0.0024610640379451676]102.31216393643254,((((((((22:[&rate=0.0030797947921528293]20.894725032494005,(24:[&rate=0.0028281692615067322]12.466153072025708,(((20:[&rate=0.0029572877339658905]2.5547923274532107,19:[&rate=0.0032223518941766815]2.5547923274532107):[&rate=0.003123346122571371]1.3784599339249577,21:[&rate=0.002837340775196427]3.9332522613781684):[&rate=0.0024610640379451676]6.379271348339904,15:[&rate=0.003019251076 [...]
+tree STATE_2000000 [&lnP=-22222.73681969999] = [&R] (((33:[&rate=0.001836053271507376]117.62089348979383,(((((((3:[&rate=0.00243260320532603]16.53157625434024,5:[&rate=0.0023499315741151456]16.53157625434024):[&rate=0.0021013727987575425]20.33361984821224,((11:[&rate=0.0021961483978154544]7.729749834542474,12:[&rate=0.0024778580448159278]7.729749834542474):[&rate=0.002221833263416007]24.31568187842101,(13:[&rate=0.0020058410498675016]26.968599022840113,7:[&rate=0.002544984358704703]26.96 [...]
+tree STATE_2010000 [&lnP=-22217.85364219446] = [&R] ((14:[&rate=0.002213463050299688]121.16262783472159,(((((16:[&rate=0.0022676726256435535]46.979645666144464,((8:[&rate=0.0027476785977178244]28.98360036872638,10:[&rate=0.002325225405048207]28.98360036872638):[&rate=0.002187111818678286]16.712503953575705,(((11:[&rate=0.0018704049184163972]10.379739617213746,12:[&rate=0.001956043320218662]10.379739617213746):[&rate=0.0028714030944137535]21.56495423256103,(13:[&rate=0.0015160574287566243 [...]
+tree STATE_2020000 [&lnP=-22227.792369807004] = [&R] ((33:[&rate=0.002494109488133638]114.75058026180078,((((((((11:[&rate=0.00260948551126243]8.259835278513204,12:[&rate=0.0025078875835460464]8.259835278513204):[&rate=0.0026029217814385166]19.497430155116955,(13:[&rate=0.0025894353297368855]21.726493476075735,7:[&rate=0.0025542858999873788]21.726493476075735):[&rate=0.0025309505373633827]6.030771957554425):[&rate=0.002574481990480557]3.3931139632680747,(3:[&rate=0.0025973822298467155]16 [...]
+tree STATE_2030000 [&lnP=-22221.178805904157] = [&R] ((33:[&rate=0.0020963641830061045]124.09443663595889,((((((((24:[&rate=0.002748219672327209]11.781866273139993,(((20:[&rate=0.002509311741004982]3.7383964134089513,21:[&rate=0.0027765860097399226]3.7383964134089513):[&rate=0.0031069718025255886]0.15455369121750673,19:[&rate=0.0024090433734401547]3.892950104626458):[&rate=0.003080759386258831]4.801127539177514,15:[&rate=0.002991028831296767]8.694077643803972):[&rate=0.002820522420537780 [...]
+tree STATE_2040000 [&lnP=-22217.11790247683] = [&R] ((14:[&rate=0.002797975154969472]91.92528377352122,(33:[&rate=0.002438095037133218]81.05469840555838,(((((((((15:[&rate=0.002438095037133218]9.641420966517462,((20:[&rate=0.0029054815113880974]1.5257442706727593,21:[&rate=0.002797975154969472]1.5257442706727593):[&rate=0.00303201285429041]1.6993829169961638,19:[&rate=0.002797975154969472]3.225127187668923):[&rate=0.0028570066160978516]6.416293778848539):[&rate=0.0025921222499702325]3.26 [...]
+tree STATE_2050000 [&lnP=-22228.42914282005] = [&R] (((14:[&rate=0.0024832479024947717]102.99479899205451,((((((((15:[&rate=0.002190721089327244]12.693490784239001,((19:[&rate=0.002258116299107589]4.152115190577586,20:[&rate=0.002070627994223179]4.152115190577586):[&rate=0.0021666096402527643]3.047518685092765,21:[&rate=0.002258116299107589]7.199633875670351):[&rate=0.0021537724948734152]5.493856908568651):[&rate=0.0023610752760062186]4.7231497342472135,24:[&rate=0.002184845628143199]17. [...]
+tree STATE_2060000 [&lnP=-22215.109067752637] = [&R] ((14:[&rate=0.0028371646724099616]88.18889230161994,(((((((22:[&rate=0.002707654018740516]22.075379763872157,((((21:[&rate=0.0029820952112443286]2.8181074782172564,20:[&rate=0.0029539850197298143]2.8181074782172564):[&rate=0.0031498288254377263]0.2744089261350249,19:[&rate=0.0026726595727172434]3.0925164043522813):[&rate=0.002750882469174493]6.6964068628159,15:[&rate=0.0027574722813669093]9.788923267168181):[&rate=0.002750882469174493] [...]
+tree STATE_2070000 [&lnP=-22227.886479901408] = [&R] ((14:[&rate=0.003228297874174566]92.29246826181193,(33:[&rate=0.0032244333040020437]77.01392244812693,(((((8:[&rate=0.003180295300546525]19.729892282053868,10:[&rate=0.003237010726030996]19.729892282053868):[&rate=0.0032609200846397107]12.38512744090167,(((7:[&rate=0.0032725636017054555]18.911817823746805,13:[&rate=0.0031780675376825145]18.911817823746805):[&rate=0.003142261679376408]2.9083662410750257,(11:[&rate=0.0031924603752913473] [...]
+tree STATE_2080000 [&lnP=-22220.143460477466] = [&R] ((33:[&rate=0.0022373078091273016]100.80277000753973,(((((((8:[&rate=0.002694345542799342]24.640503596566507,10:[&rate=0.002453026728109804]24.640503596566507):[&rate=0.002497082319379826]13.249220574484962,16:[&rate=0.002552277921101778]37.88972417105147):[&rate=0.0024333462624876526]0.5176557471177858,(((7:[&rate=0.0029570984721469747]24.82778127261508,13:[&rate=0.0023594996365084516]24.82778127261508):[&rate=0.0025825200074161107]2. [...]
+tree STATE_2090000 [&lnP=-22218.07851969766] = [&R] (((14:[&rate=0.002625364773004045]99.30336471564375,((((16:[&rate=0.002468774811737053]42.91089123289912,((((11:[&rate=0.0021265069357812743]11.502504415241358,12:[&rate=0.001903859512321794]11.502504415241358):[&rate=0.0033163418496352584]18.851363903087062,(13:[&rate=0.0020556075996165045]23.97631053163576,7:[&rate=0.0030998428490757917]23.97631053163576):[&rate=0.0021599439885653035]6.377557786692659):[&rate=0.0020918771384339277]4.9 [...]
+tree STATE_2100000 [&lnP=-22215.981227864904] = [&R] (((14:[&rate=0.0028192791665001726]88.34299582113817,((((16:[&rate=0.002687694817621412]34.75482355099568,((8:[&rate=0.002687694817621412]22.202244817082,10:[&rate=0.0022634564697129453]22.202244817082):[&rate=0.003036430610461578]12.081427631267747,(((11:[&rate=0.0029032769831348407]7.355166839133428,12:[&rate=0.002499044783903048]7.355166839133428):[&rate=0.0028751922214221813]17.409122560963453,(13:[&rate=0.00237458804193544]22.1195 [...]
+tree STATE_2110000 [&lnP=-22216.556862186455] = [&R] (((33:[&rate=0.0038684855700233402]73.64796050702202,((((16:[&rate=0.0033973400169675684]31.64527022856763,((8:[&rate=0.003454959108169135]17.880851364035962,10:[&rate=0.003214341054707576]17.880851364035962):[&rate=0.0037439380380131625]12.628787305026052,(((11:[&rate=0.0033433913892778997]5.561900484140051,12:[&rate=0.003227274715188696]5.561900484140051):[&rate=0.002847402738361005]16.60414861981807,(13:[&rate=0.0034255646063515396] [...]
+tree STATE_2120000 [&lnP=-22210.24882078634] = [&R] (((14:[&rate=0.002770617858120312]76.94554774896461,((((((23:[&rate=0.0021327684975128427]10.250756416739979,17:[&rate=0.0028293965691328594]10.250756416739979):[&rate=0.0028487334833687404]12.281235346041289,(22:[&rate=0.003466998253379714]20.777267991578736,(24:[&rate=0.0031478188294207323]13.67133855810135,(15:[&rate=0.003003487970934197]8.4823305078719,(19:[&rate=0.002551265038380442]5.264125285204812,(21:[&rate=0.002525215315676377 [...]
+tree STATE_2130000 [&lnP=-22215.996018232752] = [&R] (((14:[&rate=0.0020046789519330387]103.8853291410683,((((((23:[&rate=0.0018823308615132663]11.918692977309124,17:[&rate=0.002215465488140206]11.918692977309124):[&rate=0.0018471285698198436]16.573419528381798,(22:[&rate=0.002465194193415431]26.95493630213227,(24:[&rate=0.002465194193415431]16.619014624603626,(15:[&rate=0.0016659861008180305]12.69830818533833,(19:[&rate=0.0019679132165449343]4.8067842891601344,(21:[&rate=0.0021927821467 [...]
+tree STATE_2140000 [&lnP=-22222.48764387818] = [&R] ((((((((22:[&rate=0.0027257468152877187]23.439199946667824,((23:[&rate=0.0026056139294889603]8.99473565299326,17:[&rate=0.0024620787341294295]8.99473565299326):[&rate=0.0022079320294857065]13.175375829560064,(24:[&rate=0.0023868193484450967]13.039100118007632,(15:[&rate=0.0029176264241169707]11.49494849339276,(19:[&rate=0.0025636115952107174]4.9611295193473675,(21:[&rate=0.0023633599407514028]3.5725432846563745,20:[&rate=0.0025949285020 [...]
+tree STATE_2150000 [&lnP=-22217.882369022533] = [&R] (((14:[&rate=0.0030828497582804097]76.52419126104677,((((16:[&rate=0.0030367968740868576]31.830989640832517,((8:[&rate=0.003158264693403213]21.392738378820876,10:[&rate=0.0031523014891417346]21.392738378820876):[&rate=0.0031281377266352047]9.678984233000172,(((11:[&rate=0.0034052773920576606]5.4885387523141445,12:[&rate=0.0031761205123587736]5.4885387523141445):[&rate=0.0031761205123587736]17.956672322712013,(13:[&rate=0.00328126727899 [...]
+tree STATE_2160000 [&lnP=-22211.038173586596] = [&R] (((14:[&rate=0.0027431088872197002]78.00954006283821,(((((((24:[&rate=0.0024046227500687982]14.087827002201074,(15:[&rate=0.002446978153420011]9.976214726245766,(21:[&rate=0.0024261208549861664]2.7535916241501663,(19:[&rate=0.0018331955245090412]2.093470613140643,20:[&rate=0.003179329302857757]2.093470613140643):[&rate=0.0028935554285927393]0.6601210110095233):[&rate=0.0019706302708761255]7.2226231020956):[&rate=0.002446978153420011]4. [...]
+tree STATE_2170000 [&lnP=-22217.391177643007] = [&R] (((14:[&rate=0.0034234550588920416]74.9338595021896,((((((23:[&rate=0.0027101322734017497]12.743232542793354,17:[&rate=0.0023568779363479526]12.743232542793354):[&rate=0.0023812246042043536]12.184212308138257,((24:[&rate=0.0029515618883259025]17.649275358969398,(15:[&rate=0.0030040064883252613]11.340997643181403,((19:[&rate=0.0027497390833899635]3.302828772330956,20:[&rate=0.002919312149414244]3.302828772330956):[&rate=0.00287387210487 [...]
+tree STATE_2180000 [&lnP=-22216.950500410705] = [&R] (((14:[&rate=0.0025705081630883354]110.96564256874589,(((((((((19:[&rate=0.0027520958171995493]4.735427069920651,(20:[&rate=0.001826557122313325]3.2824925403490575,21:[&rate=0.0026706272266449503]3.2824925403490575):[&rate=0.0026706272266449503]1.4529345295715936):[&rate=0.002454043928065708]7.584556587739297,15:[&rate=0.0021625711402281315]12.319983657659948):[&rate=0.0020138434467512527]4.612017974888408,24:[&rate=0.00252303801135533 [...]
+tree STATE_2190000 [&lnP=-22227.316347617485] = [&R] ((((((((((23:[&rate=0.002165806694529433]11.491595612583119,17:[&rate=0.002506608153637148]11.491595612583119):[&rate=0.0021214628747370456]18.530036668336475,((((21:[&rate=0.0022091897250711453]3.5218050146630437,20:[&rate=0.002471477226870163]3.5218050146630437):[&rate=0.0023574074710219265]2.2515180750311394,19:[&rate=0.002023547850665061]5.773323089694183):[&rate=0.001963989840028999]7.782015776297215,15:[&rate=0.002864049322116668 [...]
+tree STATE_2200000 [&lnP=-22228.563825593712] = [&R] ((((((((((((19:[&rate=0.0021540669799935]5.4352540575821875,(20:[&rate=0.0021540669799935]3.951686221228455,21:[&rate=0.0021459406118951147]3.951686221228455):[&rate=0.002360531017812966]1.4835678363537323):[&rate=0.0021540669799935]4.54477036552012,15:[&rate=0.002367055109403893]9.980024423102307):[&rate=0.0024539658019657436]4.054544318687986,24:[&rate=0.002224344572218313]14.034568741790293):[&rate=0.0023542063478051802]8.9459641917 [...]
+tree STATE_2210000 [&lnP=-22217.253178581133] = [&R] (((14:[&rate=0.002624441772208722]94.58217311028994,((((16:[&rate=0.0025141794032655404]39.520869954748,((((7:[&rate=0.0026569220449746043]22.686023618301327,13:[&rate=0.002599340396793939]22.686023618301327):[&rate=0.002581999801258623]6.694593968218829,(11:[&rate=0.002721739190863563]7.141541594393427,12:[&rate=0.0026649606154988533]7.141541594393427):[&rate=0.002398497796606323]22.23907599212673):[&rate=0.002599340396793939]3.501600 [...]
+tree STATE_2220000 [&lnP=-22219.183114444353] = [&R] (((((((((23:[&rate=0.0029146646149430205]11.030843125607397,17:[&rate=0.0029146646149430205]11.030843125607397):[&rate=0.002788463328906718]12.515918857494981,(((((20:[&rate=0.002951696785408338]2.265552921142292,21:[&rate=0.0030909668720405575]2.265552921142292):[&rate=0.002951696785408338]2.1176324608954706,19:[&rate=0.0030282054699947163]4.383185382037762):[&rate=0.0028807289253522973]7.71073274713425,15:[&rate=0.0028073189073283774 [...]
+tree STATE_2230000 [&lnP=-22212.96429291782] = [&R] (((14:[&rate=0.0027287425101509034]80.78319098772462,(((16:[&rate=0.0025918214785970208]38.8979974861732,(((7:[&rate=0.0025918214785970208]26.759622407604965,(13:[&rate=0.0024333476760963406]20.76277773119268,(11:[&rate=0.0027287425101509034]6.466563626556661,12:[&rate=0.002312112673737231]6.466563626556661):[&rate=0.0030725792082371803]14.296214104636022):[&rate=0.002414057369011912]5.996844676412284):[&rate=0.002684061975683847]7.0007 [...]
+tree STATE_2240000 [&lnP=-22228.174360311226] = [&R] (((14:[&rate=0.0024171006147242144]119.93084276681243,(((((8:[&rate=0.002552613220200722]25.97466488915282,10:[&rate=0.002552613220200722]25.97466488915282):[&rate=0.0035214172971956828]11.089732660584765,(((11:[&rate=0.002660767822454501]7.946783245921259,12:[&rate=0.0020025614417980052]7.946783245921259):[&rate=0.003802458863478763]15.710054035193671,(13:[&rate=0.0022143593557499446]19.106091240685586,7:[&rate=0.0037206204725822043]1 [...]
+tree STATE_2250000 [&lnP=-22209.423860832685] = [&R] (((14:[&rate=0.002628314724706907]93.24785903083081,((((((8:[&rate=0.0029881130452030997]23.986155206463724,10:[&rate=0.0022186433734611272]23.986155206463724):[&rate=0.003856684064111099]10.033688328304994,16:[&rate=0.0029577988933321133]34.01984353476872):[&rate=0.0024507993004606685]1.8299387631876556,(((7:[&rate=0.0031230962947714283]22.435492521992042,13:[&rate=0.0022919933243397105]22.435492521992042):[&rate=0.0028727472816379]4. [...]
+tree STATE_2260000 [&lnP=-22205.86769752728] = [&R] ((33:[&rate=0.0027818375966127346]94.87501325689875,(14:[&rate=0.003227527497329849]75.24249063446032,((((((8:[&rate=0.0033188298663453077]17.712312235990808,10:[&rate=0.0032944934933374945]17.712312235990808):[&rate=0.0030090351590654285]12.991836365856571,(((11:[&rate=0.003708002031616541]4.420024715959323,12:[&rate=0.002674405123289011]4.420024715959323):[&rate=0.0028315752158690416]19.07404468324525,(13:[&rate=0.002863996153418041]2 [...]
+tree STATE_2270000 [&lnP=-22224.008426201242] = [&R] ((33:[&rate=0.002337004962832085]110.11387875134936,(14:[&rate=0.0028159558893325054]96.7535471539396,((((((8:[&rate=0.002680815002119424]23.54948831961617,10:[&rate=0.0024799511352706703]23.54948831961617):[&rate=0.002583946036814577]11.469467256235387,16:[&rate=0.0023207545412377655]35.01895557585156):[&rate=0.002459693266801426]2.9525842750056555,(((11:[&rate=0.0023520868414718633]6.9558505453737345,12:[&rate=0.002546814980988138]6. [...]
+tree STATE_2280000 [&lnP=-22221.51385938854] = [&R] ((33:[&rate=0.0022479315723025165]125.09749630337242,((((((((24:[&rate=0.002540410057674535]18.169220585554754,((19:[&rate=0.002407741910465339]5.228724547061802,(20:[&rate=0.002738459465013377]3.3847972306241045,21:[&rate=0.002883570984098664]3.3847972306241045):[&rate=0.002233610482016307]1.8439273164376973):[&rate=0.002721013326649205]6.125447357142178,15:[&rate=0.002344962469455915]11.35417190420398):[&rate=0.0027571432831784344]6.8 [...]
+tree STATE_2290000 [&lnP=-22205.18489495885] = [&R] ((((((((((24:[&rate=0.0031086736218077964]11.546718454501558,((21:[&rate=0.003810190657278328]3.756218984808186,(19:[&rate=0.0036828867168892474]2.8719873486201726,20:[&rate=0.002907036745285094]2.8719873486201726):[&rate=0.004091508835693187]0.8842316361880136):[&rate=0.005086877453136126]6.433742690140971,15:[&rate=0.0028596547697117237]10.189961674949156):[&rate=0.0031793246406248673]1.356756779552402):[&rate=0.005685054633605763]5.2 [...]
+tree STATE_2300000 [&lnP=-22211.99732445363] = [&R] ((33:[&rate=0.002529382843868242]103.37648175847191,9:[&rate=0.0037398554023861]103.37648175847191):[&rate=0.0031218069112679114]3.205508503976361,(((((16:[&rate=0.002326340704279767]38.67632917423048,((((((19:[&rate=0.0030145607141188876]4.02929464996425,(21:[&rate=0.002754559335898122]3.637224727967612,20:[&rate=0.002513270437232203]3.637224727967612):[&rate=0.0028519700776848274]0.3920699219966375):[&rate=0.0024424887152917]5.9625914 [...]
+tree STATE_2310000 [&lnP=-22235.017159715364] = [&R] (9:[&rate=0.002217278528802954]169.41233561751483,(33:[&rate=0.002055999510553375]123.29934110763038,((((((((23:[&rate=0.0020263463666137394]13.990249647342354,17:[&rate=0.0021747614790238994]13.990249647342354):[&rate=0.002046413398980888]13.646621882603553,22:[&rate=0.002288026853918396]27.636871529945907):[&rate=0.0020365604531277378]2.1253727780844898,((((19:[&rate=0.002074673780415085]3.6948019903093754,20:[&rate=0.002308818038811 [...]
+tree STATE_2320000 [&lnP=-22235.787677413995] = [&R] (9:[&rate=0.002719994949844018]138.80362406288808,((((((((22:[&rate=0.002564185982821366]25.321156613907654,(((19:[&rate=0.0025765423576220095]3.507530858088082,(21:[&rate=0.00267867424184567]3.054778348926113,20:[&rate=0.0026440706995642]3.054778348926113):[&rate=0.0026259011117300054]0.45275250916196885):[&rate=0.002618191720941524]3.7859920517513515,15:[&rate=0.002759750775603305]7.293522909839433):[&rate=0.002462546881965336]7.0111 [...]
+tree STATE_2330000 [&lnP=-22216.59144133235] = [&R] (((14:[&rate=0.002676303301857492]104.15253148000909,(((((22:[&rate=0.002479873872324151]25.78163221255525,((((19:[&rate=0.0030371303679634204]2.91031797293441,(20:[&rate=0.0029336125877986985]2.0718360807933824,21:[&rate=0.0027558644293304837]2.0718360807933824):[&rate=0.0023852564762386046]0.8384818921410275):[&rate=0.0023377759279278702]4.859159390550966,15:[&rate=0.002676303301857492]7.769477363485376):[&rate=0.0023615640925862902]5 [...]
+tree STATE_2340000 [&lnP=-22223.58117197035] = [&R] ((14:[&rate=0.0023779204939975195]105.05849633391645,(33:[&rate=0.0027446478241992104]97.26279255189363,((((((3:[&rate=0.0023891103957018487]13.64702815281056,5:[&rate=0.002508923197031685]13.64702815281056):[&rate=0.0022692779047234738]22.910314300596237,((11:[&rate=0.0023291075408458837]6.2326790788146536,12:[&rate=0.0024000036283400037]6.2326790788146536):[&rate=0.00256598638223564]21.46361139945246,(13:[&rate=0.002527850581767124]23 [...]
+tree STATE_2350000 [&lnP=-22219.62237005507] = [&R] (33:[&rate=0.0015246386257204492]129.27855727684127,((14:[&rate=0.0023719535733721098]102.76498162834177,(((((16:[&rate=0.002435466391895004]36.21508498893003,(((3:[&rate=0.0033169221413240774]12.601904651468628,5:[&rate=0.003011367945457813]12.601904651468628):[&rate=0.002479117775759211]17.978168000559243,(((11:[&rate=0.0025953062226698196]7.8823738237669465,12:[&rate=0.002102679543276896]7.8823738237669465):[&rate=0.00311175177415860 [...]
+tree STATE_2360000 [&lnP=-22212.889373688216] = [&R] (((14:[&rate=0.003111624845584019]95.23973796002862,((((((18:[&rate=0.0025609808031177407]30.046803263659946,(26:[&rate=0.002793242751947809]17.77880981126264,25:[&rate=0.002258250093793211]17.77880981126264):[&rate=0.002636851255579495]12.267993452397306):[&rate=0.002904116655735262]4.191357904243617,(2:[&rate=0.002793242751947809]29.595637133226504,((1:[&rate=0.0027320059666154873]7.714816297357327,6:[&rate=0.0022779354003281804]7.71 [...]
+tree STATE_2370000 [&lnP=-22208.72421355266] = [&R] (((14:[&rate=0.0031101953109362672]84.0318959695562,((((((18:[&rate=0.002970293419824423]25.392349022828967,(26:[&rate=0.0030033316617274785]14.255512935060425,25:[&rate=0.0034994586153000904]14.255512935060425):[&rate=0.0028596795313247684]11.136836087768542):[&rate=0.002554494854740641]4.031411360761368,(2:[&rate=0.002781885118679928]26.477729285577233,((1:[&rate=0.0031496759060119887]7.216254241329174,6:[&rate=0.0027341705447765288]7 [...]
+tree STATE_2380000 [&lnP=-22216.76008977948] = [&R] ((14:[&rate=0.0030303925360715815]92.86872274620197,(33:[&rate=0.002997695793368912]82.24603028551566,((((((18:[&rate=0.0033407908146065564]21.618290719039265,(26:[&rate=0.003746248661940211]11.062751509061043,25:[&rate=0.0032468893310236757]11.062751509061043):[&rate=0.0027809383724173376]10.555539209978223):[&rate=0.002464081220213763]5.837824995843896,(2:[&rate=0.0028958690285751845]24.321151743247338,((1:[&rate=0.0031947222141878616 [...]
+tree STATE_2390000 [&lnP=-22216.652421231844] = [&R] (((((((((18:[&rate=0.0029142652886681476]25.481647376392942,(26:[&rate=0.003507956304020104]11.480087498528544,25:[&rate=0.002727979048403302]11.480087498528544):[&rate=0.0023494192437575113]14.001559877864398):[&rate=0.0031899052469688905]4.217893668509724,(2:[&rate=0.0032287206971973945]24.898034180442966,((1:[&rate=0.003507956304020104]8.207951631917904,6:[&rate=0.0025685523612833886]8.207951631917904):[&rate=0.002665209901148119]12 [...]
+tree STATE_2400000 [&lnP=-22207.86201981865] = [&R] (((33:[&rate=0.002821693016290048]85.38082583331794,((((((18:[&rate=0.002722252945559226]23.454140534288552,(26:[&rate=0.0037570190637181635]12.50372338842527,25:[&rate=0.0030222802038659844]12.50372338842527):[&rate=0.0033170519926893362]10.950417145863282):[&rate=0.002940429687362585]4.313677374947321,(2:[&rate=0.0030350896119535876]23.114865889748028,((1:[&rate=0.0029689215284415876]6.925229110179353,6:[&rate=0.002877491493568857]6.9 [...]
+tree STATE_2410000 [&lnP=-22212.71069542648] = [&R] ((33:[&rate=0.002753939630868422]96.85073440629932,(((((((18:[&rate=0.002801150291128944]27.518449554740016,(26:[&rate=0.003382006721501266]17.895590072733846,25:[&rate=0.0024591464342561668]17.895590072733846):[&rate=0.003006920535861537]9.62285948200617):[&rate=0.0024591464342561668]5.8346632005581185,(2:[&rate=0.002954794678357793]30.302421232578745,((1:[&rate=0.0031920095743153964]8.437780563852023,6:[&rate=0.002571305738987409]8.43 [...]
+tree STATE_2420000 [&lnP=-22230.043920588767] = [&R] (((((((((18:[&rate=0.002176591616741499]32.575675413602816,(26:[&rate=0.002176591616741499]18.805454303193322,25:[&rate=0.0019727834957276066]18.805454303193322):[&rate=0.0021080358241506005]13.770221110409494):[&rate=0.0019974657736681824]10.079571281069796,(2:[&rate=0.001880619912245855]33.10588562910269,((1:[&rate=0.0019138244197624275]9.76920405062609,6:[&rate=0.001962352080348524]9.76920405062609):[&rate=0.0020211579841439005]19.6 [...]
+tree STATE_2430000 [&lnP=-22214.748853198267] = [&R] ((33:[&rate=0.0025251328362134833]94.69814225861207,(14:[&rate=0.0029239395221776366]73.15183270171835,((((((18:[&rate=0.0026554195447469533]22.065285741833936,(26:[&rate=0.0031405872689036106]12.599395255917132,25:[&rate=0.0029239395221776366]12.599395255917132):[&rate=0.003070082064540865]9.465890485916804):[&rate=0.002587383109339257]5.1659928168231986,(2:[&rate=0.0029641672529258393]24.64766512089211,((1:[&rate=0.002747182600565789 [...]
+tree STATE_2440000 [&lnP=-22208.72679455807] = [&R] ((33:[&rate=0.001998814384960522]105.14653820677628,(14:[&rate=0.0031161420302652423]87.70389819973937,((((((18:[&rate=0.0028961814083826413]29.65534137945688,(26:[&rate=0.003267445085602969]15.72090635415883,25:[&rate=0.0031161420302652423]15.72090635415883):[&rate=0.0023292291706568393]13.934435025298049):[&rate=0.002497645431855422]6.3671376297605065,(2:[&rate=0.002742637836783291]29.405616028060372,((1:[&rate=0.003041182986167325]8. [...]
+tree STATE_2450000 [&lnP=-22215.230155406836] = [&R] ((14:[&rate=0.0025238778867664345]95.20294935258805,(33:[&rate=0.003099909812963536]79.90831215611388,((((((18:[&rate=0.0025048488744359677]25.424464099490084,(26:[&rate=0.002626831376302768]15.654356100630885,25:[&rate=0.003144289243063165]15.654356100630885):[&rate=0.003220365796156741]9.7701079988592):[&rate=0.002419055650689425]4.301420602599659,(2:[&rate=0.0031934501028415573]23.381201319098068,((1:[&rate=0.0032805372162079066]7.2 [...]
+tree STATE_2460000 [&lnP=-22216.31042037198] = [&R] ((33:[&rate=0.002069466987908468]106.16910383682915,(((((((18:[&rate=0.0030008333789417257]23.742796778409943,(26:[&rate=0.002738560455686607]16.374459892809217,25:[&rate=0.002345752935637865]16.374459892809217):[&rate=0.0025497789427750732]7.368336885600726):[&rate=0.0026933863214515397]6.779708985124614,(2:[&rate=0.0029117194252753003]25.011479014085406,((1:[&rate=0.002769864443873943]8.685672206055692,6:[&rate=0.002430582173167919]8. [...]
+tree STATE_2470000 [&lnP=-22217.732340063976] = [&R] (((((16:[&rate=0.0027689216622850494]38.7451389404797,(((8:[&rate=0.003022185311039852]23.728086510248424,10:[&rate=0.002754972390866018]23.728086510248424):[&rate=0.002875243788330313]12.695451158861744,((3:[&rate=0.002796212742488452]13.80694422553841,5:[&rate=0.002980655363781873]13.80694422553841):[&rate=0.0034150469910354227]15.904107917155512,(((11:[&rate=0.003266639054885733]5.576106491731112,12:[&rate=0.0027407737875708884]5.57 [...]
+tree STATE_2480000 [&lnP=-22211.74007691098] = [&R] (((14:[&rate=0.0023963516818005567]100.30216386789549,((((((((7:[&rate=0.0023762183188189264]24.234767656012345,13:[&rate=0.0020547083856334468]24.234767656012345):[&rate=0.0027440793378494953]6.1158373026691315,(11:[&rate=0.00254983441617437]7.630741962554655,12:[&rate=0.002317524819782267]7.630741962554655):[&rate=0.0022419865860483093]22.71986299612682):[&rate=0.0028141857587352962]7.2788991313569475,(3:[&rate=0.00312479189085747]12. [...]
+tree STATE_2490000 [&lnP=-22214.650168407457] = [&R] ((33:[&rate=0.0022809700697860673]106.52274470768418,(14:[&rate=0.0028925567379400096]86.68734134370378,(((((((3:[&rate=0.002452441014708207]15.374253046955321,5:[&rate=0.0023736118659981736]15.374253046955321):[&rate=0.0027330818923341164]15.966805333951317,((11:[&rate=0.0027330818923341164]7.369187850437667,12:[&rate=0.0026372571237352404]7.369187850437667):[&rate=0.003269878182118092]18.190274008856747,(13:[&rate=0.00283182016680244 [...]
+tree STATE_2500000 [&lnP=-22207.87512468531] = [&R] (((33:[&rate=0.004014478086168371]67.63505887018505,((((((8:[&rate=0.0036362785683544995]18.115484041988164,10:[&rate=0.0030929220077581623]18.115484041988164):[&rate=0.0030929220077581623]13.577306758248987,((3:[&rate=0.003285842034003208]12.86403578953691,5:[&rate=0.002847925188456996]12.86403578953691):[&rate=0.002847925188456996]16.6851770286828,((11:[&rate=0.002435857347606754]7.982000083583131,12:[&rate=0.0023171508079380047]7.982 [...]
+tree STATE_2510000 [&lnP=-22230.26470576068] = [&R] (((14:[&rate=0.002364137973507571]104.60201552879917,((((((8:[&rate=0.002595395169634804]23.227555361220123,10:[&rate=0.002426213944028098]23.227555361220123):[&rate=0.002397969969908599]17.62227053431673,((3:[&rate=0.0023032135408204667]14.717116312037355,5:[&rate=0.002234069361321707]14.717116312037355):[&rate=0.002464501950649173]21.34869256350695,(7:[&rate=0.0019882163724948775]29.908661712386166,(13:[&rate=0.0023032135408204667]25. [...]
+tree STATE_2520000 [&lnP=-22212.399348841165] = [&R] (((((((16:[&rate=0.00242329702642115]36.70490425229953,(((18:[&rate=0.003072714359796553]24.17499326495068,(26:[&rate=0.002860475065978891]13.336965434486721,25:[&rate=0.0026279409834224694]13.336965434486721):[&rate=0.0024713611454987225]10.838027830463957):[&rate=0.003160673356522449]4.877619335236137,(2:[&rate=0.0026740533778035667]24.990494447160508,((1:[&rate=0.0027859127223714615]6.1952249780268875,6:[&rate=0.0026279409834224694] [...]
+tree STATE_2530000 [&lnP=-22221.981514930714] = [&R] (((((((16:[&rate=0.0026018014271464796]37.436010273177175,(((3:[&rate=0.00227200906939245]14.336141033239292,5:[&rate=0.002675038611561812]14.336141033239292):[&rate=0.0023417321045371073]19.383775297168334,((11:[&rate=0.002363675013021694]7.9893427103559045,12:[&rate=0.0023852853934746994]7.9893427103559045):[&rate=0.0026018014271464796]22.165523323264864,(13:[&rate=0.001955008142279626]27.085686525767024,7:[&rate=0.002296128642419813 [...]
+tree STATE_2540000 [&lnP=-22208.84656170913] = [&R] ((33:[&rate=0.0026587220301327315]94.50055939042475,(14:[&rate=0.003694465855746843]69.81911276138561,((((((18:[&rate=0.003173952144110054]24.34000598195339,(26:[&rate=0.0030273745587923533]16.312696143590095,25:[&rate=0.0023822368839065316]16.312696143590095):[&rate=0.002798929922357153]8.027309838363294):[&rate=0.003213750737302741]4.310091872145563,(2:[&rate=0.003009889751634407]23.834008653754417,((1:[&rate=0.0031936296303950655]6.2 [...]
+tree STATE_2550000 [&lnP=-22216.384021855647] = [&R] ((14:[&rate=0.002697337273977902]95.81411461365012,(33:[&rate=0.002722910069538818]78.96388832329457,((((((18:[&rate=0.0031300911900468123]22.569660978534476,(26:[&rate=0.002697337273977902]12.851138559407476,25:[&rate=0.0029773914906876]12.851138559407476):[&rate=0.00299365015713542]9.718522419127):[&rate=0.002894407133601176]4.994943503361341,(2:[&rate=0.0028870386477230776]23.302627371475864,((1:[&rate=0.0029616702077773457]7.345667 [...]
+tree STATE_2560000 [&lnP=-22208.147501128788] = [&R] (((((((((18:[&rate=0.0036532131931568037]24.699731955013945,(26:[&rate=0.0035886839927080304]16.72812324270155,25:[&rate=0.0025102771202322907]16.72812324270155):[&rate=0.0030038277145045386]7.9716087123123955):[&rate=0.003103727101663534]6.136433667448621,(2:[&rate=0.0028904893185021977]25.891191096029335,((1:[&rate=0.0030038277145045386]7.138749262078419,6:[&rate=0.002336922558482981]7.138749262078419):[&rate=0.00253253863607269]14.3 [...]
+tree STATE_2570000 [&lnP=-22225.193529953813] = [&R] (((14:[&rate=0.002563120643129948]85.00990212743035,33:[&rate=0.0024349286508579537]85.00990212743035):[&rate=0.0029895287302013074]15.186574893062598,((((((18:[&rate=0.0025103063998489967]30.557020992792573,(26:[&rate=0.0024735214020452593]18.48992715765909,25:[&rate=0.002764870593751856]18.48992715765909):[&rate=0.002697580465495346]12.067093835133484):[&rate=0.002664271573790288]4.959328915881507,(2:[&rate=0.002614138913718341]27.91 [...]
+tree STATE_2580000 [&lnP=-22215.75546549469] = [&R] (((((((((18:[&rate=0.0026020359987508406]29.18884363090457,(26:[&rate=0.002700983723771341]16.954007416528917,25:[&rate=0.0022997759757525946]16.954007416528917):[&rate=0.002624900529235096]12.234836214375655):[&rate=0.00197138336610854]8.544596674458113,(2:[&rate=0.002161304358071119]31.54385163984278,((1:[&rate=0.003246454557555482]9.279357237632237,6:[&rate=0.001523336325490733]9.279357237632237):[&rate=0.0029178690618450326]16.53998 [...]
+tree STATE_2590000 [&lnP=-22216.33563138668] = [&R] (((((((((18:[&rate=0.003031494620850181]25.684928981314947,(26:[&rate=0.002722554578533525]15.190243307299264,25:[&rate=0.002864630867341715]15.190243307299264):[&rate=0.0030581678695053203]10.494685674015683):[&rate=0.0024673315027732403]4.949081236161554,(2:[&rate=0.0029009974806693227]27.517625337977723,((1:[&rate=0.0024203702533181475]7.353983780217521,6:[&rate=0.002708100236543328]7.353983780217521):[&rate=0.0025822087725543034]13. [...]
+tree STATE_2600000 [&lnP=-22212.890856894643] = [&R] (((((((16:[&rate=0.0027115224803206986]36.017911555035376,(((3:[&rate=0.0028072582558030836]13.438534641883148,5:[&rate=0.0027687982936935394]13.438534641883148):[&rate=0.0027753593408016523]16.64297564629394,((11:[&rate=0.0029799350316986987]4.672473565546657,12:[&rate=0.0028773729934334375]4.672473565546657):[&rate=0.0030567905814996438]21.602934380300702,(13:[&rate=0.0026027059957577335]21.585065948514334,7:[&rate=0.0029122687668055 [...]
+tree STATE_2610000 [&lnP=-22203.278395295652] = [&R] (((((((16:[&rate=0.0026257086476395726]33.31060923296622,((8:[&rate=0.003476399994078349]17.338488873339127,10:[&rate=0.003873184922269043]17.338488873339127):[&rate=0.003476399994078349]12.857864576646154,((3:[&rate=0.003319538275160892]13.861970926519202,5:[&rate=0.002658796516626441]13.861970926519202):[&rate=0.003873184922269043]13.640770321064632,((7:[&rate=0.003824984379478894]18.359034512820987,13:[&rate=0.002925398447138482]18. [...]
+tree STATE_2620000 [&lnP=-22212.461619981186] = [&R] (((33:[&rate=0.0024527474167977685]103.85460512029023,((((16:[&rate=0.0027886268557149353]35.660203670123984,(((((11:[&rate=0.0032483614443337677]6.672316723589646,12:[&rate=0.002063689766620072]6.672316723589646):[&rate=0.003189557187485109]14.532135934980634,13:[&rate=0.002473590585519452]21.20445265857028):[&rate=0.0022325057077251464]4.431367510512246,7:[&rate=0.002771953872134593]25.635820169082525):[&rate=0.003089479651543962]5.4 [...]
+tree STATE_2630000 [&lnP=-22214.110323303747] = [&R] (((((((16:[&rate=0.0021705833022744397]40.743327350061044,((((7:[&rate=0.0034178681834622997]19.232101108281643,13:[&rate=0.0025460207816562367]19.232101108281643):[&rate=0.0021044762524911956]5.099069192270278,(11:[&rate=0.0030884632831090606]6.790163609697803,12:[&rate=0.002459137725540705]6.790163609697803):[&rate=0.0032478725608847065]17.54100669085412):[&rate=0.0029750945142621318]8.209433722603688,(3:[&rate=0.0032478725608847065] [...]
+tree STATE_2640000 [&lnP=-22216.46104909822] = [&R] ((((((((16:[&rate=0.003203145405457931]33.12074143598125,(((11:[&rate=0.003438295516658257]4.963229156772506,12:[&rate=0.003124791340517646]4.963229156772506):[&rate=0.0024751216120437384]16.770800227608067,(13:[&rate=0.002901418800689278]17.881001992667056,7:[&rate=0.003124791340517646]17.881001992667056):[&rate=0.0026281988402076014]3.8530273917135176):[&rate=0.003140036292564677]7.703004072812753,(3:[&rate=0.0032364422426596298]13.25 [...]
+tree STATE_2650000 [&lnP=-22215.243072327827] = [&R] (((14:[&rate=0.0035130564643772776]74.33782857862003,((((16:[&rate=0.002508874792104603]37.74887982211167,((((11:[&rate=0.0031166920738280963]6.694801082178679,12:[&rate=0.002834417517216712]6.694801082178679):[&rate=0.002677825525577029]20.262208873604727,(13:[&rate=0.0023653836413085523]23.08300987779625,7:[&rate=0.00308138478802958]23.08300987779625):[&rate=0.00320054951394621]3.874000077987155):[&rate=0.002661947205573301]5.9444965 [...]
+tree STATE_2660000 [&lnP=-22206.196381399684] = [&R] ((33:[&rate=0.0030757587971529843]91.3584505254407,((((((((23:[&rate=0.002965465074283707]8.713162707754948,17:[&rate=0.003173012766888082]8.713162707754948):[&rate=0.0029018072712170264]8.937874872160569,((15:[&rate=0.003100987434069931]8.116341121889562,(19:[&rate=0.002934587095141273]2.8847487776157874,(21:[&rate=0.0040581300027361196]1.8917924508622728,20:[&rate=0.0030757587971529843]1.8917924508622728):[&rate=0.0034581610788827154 [...]
+tree STATE_2670000 [&lnP=-22213.31055528685] = [&R] ((14:[&rate=0.0022438991715495993]114.70184243033265,(33:[&rate=0.0030344571047400434]97.81269576206994,(((((22:[&rate=0.0025273048527221413]24.50098610934207,((23:[&rate=0.002691432639514435]12.52671620191955,17:[&rate=0.002907772657974832]12.52671620191955):[&rate=0.0030153055511438313]11.65380508481728,(((19:[&rate=0.0027082538771582695]5.232541328665814,(21:[&rate=0.00292501382983183]3.6601879724032274,20:[&rate=0.003616691819915039 [...]
+tree STATE_2680000 [&lnP=-22204.9368718459] = [&R] ((33:[&rate=0.002749068461650363]87.43334983619876,(14:[&rate=0.002867766994138504]80.6544297652129,((((((3:[&rate=0.004718112516555371]9.206346397127076,5:[&rate=0.0037722585905126268]9.206346397127076):[&rate=0.002575317055955458]18.456251386922148,((11:[&rate=0.004115836440709943]5.3667687072063,12:[&rate=0.002717231753634817]5.3667687072063):[&rate=0.0034666429158713924]16.37194257675965,(13:[&rate=0.0029508571011529912]17.7088337083 [...]
+tree STATE_2690000 [&lnP=-22218.66656613367] = [&R] ((33:[&rate=0.0021288053830005264]120.74867573602099,(((((((23:[&rate=0.0018807693552411818]14.252164367652052,17:[&rate=0.0023988791034963764]14.252164367652052):[&rate=0.002081560889230186]15.934449628192786,(22:[&rate=0.002015485594548802]28.87931839939586,(((19:[&rate=0.0022365516329972755]4.371007430660336,(21:[&rate=0.0018807693552411818]3.7048596751412823,20:[&rate=0.001743587242436689]3.7048596751412823):[&rate=0.001980283736868 [...]
+tree STATE_2700000 [&lnP=-22227.096064742098] = [&R] (((((((((3:[&rate=0.0024417416894745377]17.473583183001235,5:[&rate=0.0024732806398471297]17.473583183001235):[&rate=0.0025133758370174688]17.168748450418928,((11:[&rate=0.002461661826743314]6.3392348534925915,12:[&rate=0.0025189275217052127]6.3392348534925915):[&rate=0.002466129488612533]20.69879005361836,(13:[&rate=0.0024640005828869384]22.448913181353696,7:[&rate=0.0025174694906170067]22.448913181353696):[&rate=0.002474836297686245] [...]
+tree STATE_2710000 [&lnP=-22222.54988143301] = [&R] ((33:[&rate=0.0020147720710867994]131.37632984669415,(14:[&rate=0.0024229616551426666]103.986240269978,((((((23:[&rate=0.0022327570386002004]13.215474242092268,17:[&rate=0.0022551562625836683]13.215474242092268):[&rate=0.0020986139486509734]13.820598300716116,(22:[&rate=0.0025104046591776902]23.56122531462361,(((19:[&rate=0.0019557929961755488]3.578795792221648,(21:[&rate=0.001980381802428329]2.8392917257427754,20:[&rate=0.0022111140681 [...]
+tree STATE_2720000 [&lnP=-22218.915298780263] = [&R] ((33:[&rate=0.002823750256373248]98.23963518456566,(((((16:[&rate=0.0030836311292806082]39.88783352160931,((8:[&rate=0.003006153342489476]23.30434851004026,10:[&rate=0.0027700954294891276]23.30434851004026):[&rate=0.002743671985636505]15.359171984874887,((3:[&rate=0.002579781544504951]17.748935533903804,5:[&rate=0.0024014136000694093]17.748935533903804):[&rate=0.0027833768165828584]19.689642413148363,(7:[&rate=0.002730489136126713]27.0 [...]
+tree STATE_2730000 [&lnP=-22220.748742019496] = [&R] ((14:[&rate=0.002501217056621645]119.71063599920622,(33:[&rate=0.002312269902924484]104.61593540381304,(((((((3:[&rate=0.002139424830367305]17.145520579245037,5:[&rate=0.002256774676068968]17.145520579245037):[&rate=0.0020820444512601615]22.18934279318413,((11:[&rate=0.0023031432404127944]7.80107984546331,12:[&rate=0.002584333706504783]7.80107984546331):[&rate=0.0022472362675152124]25.926044548967607,(13:[&rate=0.0020820444512601615]26 [...]
+tree STATE_2740000 [&lnP=-22221.742087081722] = [&R] ((33:[&rate=0.0021632632312377593]125.30645945268958,(((((16:[&rate=0.002012821775421427]49.49033194951689,((8:[&rate=0.0024083058466150852]28.47415136488829,10:[&rate=0.002061978742006315]28.47415136488829):[&rate=0.0018881177770367992]19.69720281796965,((3:[&rate=0.0022406228173254945]17.617309753115542,5:[&rate=0.0019038189907547736]17.617309753115542):[&rate=0.002023011655454783]24.61136381005906,((11:[&rate=0.002080720335857242]9. [...]
+tree STATE_2750000 [&lnP=-22205.400999269994] = [&R] (((14:[&rate=0.003297295226060144]65.99014697196193,(((((16:[&rate=0.0033621174719728475]28.005168886942847,(8:[&rate=0.003468860672904959]15.785576518743053,10:[&rate=0.0030586623092645907]15.785576518743053):[&rate=0.0033621174719728475]12.219592368199795):[&rate=0.0033406474726721557]0.6364274336796143,(((11:[&rate=0.0035119056265411853]5.32576126723101,12:[&rate=0.0036931291114204357]5.32576126723101):[&rate=0.0038782472539519233]1 [...]
+tree STATE_2760000 [&lnP=-22225.568033770647] = [&R] ((((((((((((11:[&rate=0.002570743405447325]6.82399317960951,12:[&rate=0.0024302644804681125]6.82399317960951):[&rate=0.0029119364068235553]15.483863927325999,13:[&rate=0.0025242855220993367]22.307857106935508):[&rate=0.00234692110544544]3.1003099249251633,7:[&rate=0.0027509829591092884]25.40816703186067):[&rate=0.002132288741108235]6.700435762305943,(3:[&rate=0.002570743405447325]12.048764977602533,5:[&rate=0.0027875500250101447]12.048 [...]
+tree STATE_2770000 [&lnP=-22201.764140492385] = [&R] (((33:[&rate=0.0026255387096432736]91.11871260780104,((((16:[&rate=0.0025118752971427082]34.99081947098,((((7:[&rate=0.003382474858852751]22.491396538732808,13:[&rate=0.00199804550326245]22.491396538732808):[&rate=0.003545658815671847]3.014257300808186,(11:[&rate=0.002896281009711077]5.648226152908784,12:[&rate=0.0031210144615522457]5.648226152908784):[&rate=0.0026255387096432736]19.85742768663221):[&rate=0.0037900056606085296]3.437949 [...]
+tree STATE_2780000 [&lnP=-22230.41672661192] = [&R] (((((((((((7:[&rate=0.0023820081453844893]21.042124277954425,13:[&rate=0.002972115540465189]21.042124277954425):[&rate=0.002728822098521892]5.156939555735306,(11:[&rate=0.0024771700665137323]8.17586249214261,12:[&rate=0.002133889904969796]8.17586249214261):[&rate=0.0026827363238366687]18.02320134154712):[&rate=0.002828928457383001]8.505641923055212,(3:[&rate=0.002221851407927922]13.763174380659244,5:[&rate=0.002652894056656979]13.763174 [...]
+tree STATE_2790000 [&lnP=-22210.46450793055] = [&R] ((14:[&rate=0.0023299073314275924]98.18164369078714,(33:[&rate=0.0027867035409825185]96.54556795144909,(((16:[&rate=0.0022244881284065945]38.084656906044074,((((8:[&rate=0.0029044173743018606]22.858015759751716,10:[&rate=0.00260591933558669]22.858015759751716):[&rate=0.0038154637780316494]10.638752302425019,(3:[&rate=0.0024377725364635437]17.192155550175972,5:[&rate=0.002074201406803739]17.192155550175972):[&rate=0.0027314413577302693]1 [...]
+tree STATE_2800000 [&lnP=-22218.859933045223] = [&R] ((33:[&rate=0.002245908015723882]112.86661738056402,(14:[&rate=0.0024753683123124627]100.1164937837575,(((((((3:[&rate=0.002441691276968035]15.73863713486941,5:[&rate=0.0024642568820689758]15.73863713486941):[&rate=0.0021471639946270015]21.275085606258603,((7:[&rate=0.0025517915187506296]28.418457264066692,13:[&rate=0.0023137535392250526]28.418457264066692):[&rate=0.0024642568820689758]3.5934557105778033,(11:[&rate=0.002298310732176698 [...]
+tree STATE_2810000 [&lnP=-22220.906279633105] = [&R] (((14:[&rate=0.0024271097465637403]91.46800459696193,((((16:[&rate=0.0024271097465637403]40.0196498963875,(((8:[&rate=0.0024842491935613297]21.58613364314203,10:[&rate=0.0023776558553833833]21.58613364314203):[&rate=0.002473083275645565]14.867362935752315,(3:[&rate=0.0030574258553206575]12.54068992781497,5:[&rate=0.0029228297599825283]12.54068992781497):[&rate=0.0022536775010734453]23.912806651079375):[&rate=0.0023218485469667484]1.413 [...]
+tree STATE_2820000 [&lnP=-22213.398170839075] = [&R] (((33:[&rate=0.002379556576947075]106.06607412817404,((((((8:[&rate=0.0025176563096757447]20.503241912393154,10:[&rate=0.0024037838069712824]20.503241912393154):[&rate=0.002280036343957687]21.50177268340018,(((11:[&rate=0.0025176563096757447]6.491951670378554,12:[&rate=0.0017986073406777543]6.491951670378554):[&rate=0.002055307734546384]25.447960921220755,(13:[&rate=0.0018472380197981685]28.489023089525276,7:[&rate=0.002391529515555361 [...]
+tree STATE_2830000 [&lnP=-22212.470396302007] = [&R] (((33:[&rate=0.0028394938008213964]85.44997698218904,((((((23:[&rate=0.002574267881692921]13.411573187023974,17:[&rate=0.0026515235778146362]13.411573187023974):[&rate=0.0030841678701912616]8.193159854188773,((24:[&rate=0.0027897886126980274]11.605224180053753,((19:[&rate=0.002822954038942277]3.0418508812157583,(20:[&rate=0.0029742232120199786]2.230153952351536,21:[&rate=0.0030841678701912616]2.230153952351536):[&rate=0.003600040655187 [...]
+tree STATE_2840000 [&lnP=-22218.23923370469] = [&R] ((33:[&rate=0.001917904474652876]109.67573097940812,(14:[&rate=0.003237381675759086]96.72825527790971,((((((22:[&rate=0.002888258803750854]21.893781710962944,((15:[&rate=0.002793366737494569]11.23636618924778,(21:[&rate=0.003157240546225938]4.338123078112482,(20:[&rate=0.0022216479003506285]2.4282687168664094,19:[&rate=0.0022216479003506285]2.4282687168664094):[&rate=0.0034512147498592556]1.9098543612460728):[&rate=0.0030549513455678935 [...]
+tree STATE_2850000 [&lnP=-22222.194816670846] = [&R] (((33:[&rate=0.002861359355450761]86.07724413281757,((((((23:[&rate=0.0026948912467841587]10.781094154906492,17:[&rate=0.002479132943888348]10.781094154906492):[&rate=0.0027534782915272157]12.205422430505477,(((15:[&rate=0.00284783205191875]10.115775403149089,((21:[&rate=0.002678529525093381]3.1608485117299536,20:[&rate=0.0027920320315332646]3.1608485117299536):[&rate=0.002835380907526719]1.0420001933677692,19:[&rate=0.0027102521078885 [...]
+tree STATE_2860000 [&lnP=-22216.59820664511] = [&R] (((14:[&rate=0.0030185805229394095]79.48355693371924,((((16:[&rate=0.002904080505123716]34.912917498951046,(((7:[&rate=0.0028518544972115072]25.222309702055988,(13:[&rate=0.002719157371417216]21.7130215784111,(11:[&rate=0.0036583429602644628]5.013159901887509,12:[&rate=0.002955373677970042]5.013159901887509):[&rate=0.002872996491617367]16.699861676523593):[&rate=0.0028410968978820994]3.509288123644886):[&rate=0.002671991642756303]6.1920 [...]
+tree STATE_2870000 [&lnP=-22218.958906131615] = [&R] (((33:[&rate=0.0019628483129667256]126.76062442403801,((((16:[&rate=0.0024469413193774357]46.798953323093826,((((11:[&rate=0.0024131257344943155]9.962018401102497,12:[&rate=0.001783472501297847]9.962018401102497):[&rate=0.0021962411569221045]26.30766241014789,(13:[&rate=0.001646201656183405]32.70161997403644,7:[&rate=0.002027632928623471]32.70161997403644):[&rate=0.0021282512216069063]3.568060837213949):[&rate=0.0021827107683840157]6.0 [...]
+tree STATE_2880000 [&lnP=-22213.841089405494] = [&R] (((33:[&rate=0.003022714464672476]83.16142814540123,(((((((7:[&rate=0.002928116177998788]22.57296765755797,(13:[&rate=0.002824284299735443]16.68554481549016,(11:[&rate=0.0031080040394601653]6.312837319533433,12:[&rate=0.002946269260334698]6.312837319533433):[&rate=0.003416112700662471]10.372707495956728):[&rate=0.0035907008923306573]5.88742284206781):[&rate=0.003181076449218578]3.3342529792181494,(3:[&rate=0.0028926692143341776]12.0504 [...]
+tree STATE_2890000 [&lnP=-22204.414289961504] = [&R] (((((((((((11:[&rate=0.0031367737907422246]5.336573929704137,12:[&rate=0.003414652234217353]5.336573929704137):[&rate=0.003981814904966507]14.08972004864124,(13:[&rate=0.0025559754457183826]16.802051783105746,7:[&rate=0.003352163006189443]16.802051783105746):[&rate=0.00385231993372098]2.6242421952396313):[&rate=0.0034350103181162864]4.077823690763729,(3:[&rate=0.004320165544264433]8.975861759217652,5:[&rate=0.0034752659762349444]8.9758 [...]
+tree STATE_2900000 [&lnP=-22207.735344477984] = [&R] ((33:[&rate=0.0024487407620406914]109.97110500315014,(14:[&rate=0.0022895048137159925]99.69831254940975,((((((((11:[&rate=0.0020051427638170852]8.584112982237032,12:[&rate=0.002089472679839398]8.584112982237032):[&rate=0.00315465668563747]18.46722630998582,(13:[&rate=0.002109047523410628]22.02413797280654,7:[&rate=0.0034775567063001387]22.02413797280654):[&rate=0.0023592761066492476]5.02720131941631):[&rate=0.002430496592355385]6.41180 [...]
+tree STATE_2910000 [&lnP=-22211.153500816334] = [&R] (((((((((22:[&rate=0.0031370419534905193]19.989575295191976,(23:[&rate=0.00276453361853127]10.785605583791972,17:[&rate=0.0024402196989961223]10.785605583791972):[&rate=0.0030806548989312597]9.203969711400005):[&rate=0.002809938020714331]0.08164744248805889,(24:[&rate=0.003197176613191258]13.132826853762921,(15:[&rate=0.002809938020714331]11.738666598956502,((21:[&rate=0.0035547165101940457]1.752359275416737,20:[&rate=0.003262084399955 [...]
+tree STATE_2920000 [&lnP=-22225.716020967564] = [&R] (((14:[&rate=0.0027395947889757868]85.2755145198573,(((((22:[&rate=0.002603606201474611]24.942185038559487,((23:[&rate=0.0026630803856810526]10.059103159678221,17:[&rate=0.0023907113329460763]10.059103159678221):[&rate=0.0028528721143495648]12.968890227542213,(24:[&rate=0.0028890215581910154]13.460851044674458,(15:[&rate=0.0024650776664649683]9.215028672788517,((20:[&rate=0.0027199197093437577]2.4906892254396746,19:[&rate=0.00246507766 [...]
+tree STATE_2930000 [&lnP=-22215.006068952618] = [&R] ((33:[&rate=0.002586080996900775]103.07772719763199,((((((22:[&rate=0.0025707416470076277]23.87747946134675,((23:[&rate=0.0024131766299062777]12.541775378842683,17:[&rate=0.0024131766299062777]12.541775378842683):[&rate=0.002770350980872215]10.639492642501601,(24:[&rate=0.0026153293055372496]14.123329079895713,(15:[&rate=0.002979702973750006]11.825896814489145,((19:[&rate=0.0026697355394368553]2.2112753205796283,20:[&rate=0.00264308680 [...]
+tree STATE_2940000 [&lnP=-22217.038006728504] = [&R] (((14:[&rate=0.0021300136186739754]93.66904765147288,((((((8:[&rate=0.0022122545542172314]26.213939152204897,10:[&rate=0.0022274699417108453]26.213939152204897):[&rate=0.0021300136186739754]14.303580936742133,((((11:[&rate=0.0031387201474373004]6.277252024204196,12:[&rate=0.0027024449462841977]6.277252024204196):[&rate=0.002468225560557251]18.81713868133857,13:[&rate=0.0020725991166861556]25.094390705542764):[&rate=0.002147517459998963 [...]
+tree STATE_2950000 [&lnP=-22216.25328230731] = [&R] ((14:[&rate=0.0024807207046880046]109.3935837575948,(((((((8:[&rate=0.0025600712283733043]24.596295969815838,10:[&rate=0.0028008888633966047]24.596295969815838):[&rate=0.0030688192334787146]15.153318480572157,((3:[&rate=0.002961254812415752]16.499762158663778,5:[&rate=0.0017002189446654137]16.499762158663778):[&rate=0.002822349682014112]19.92881067607692,((7:[&rate=0.0030985763736377787]22.557939020324106,13:[&rate=0.00225950562752109]2 [...]
+tree STATE_2960000 [&lnP=-22204.323094795916] = [&R] ((33:[&rate=0.003323125298990019]73.97687783983737,(((((((23:[&rate=0.003339097979594041]8.253336521165416,17:[&rate=0.0034458341489998494]8.253336521165416):[&rate=0.0029520332392305683]11.54251029300336,((24:[&rate=0.0034008625135010686]9.096803691354806,(15:[&rate=0.0040748051549625176]6.17953673791645,((21:[&rate=0.003273275235496478]2.091910584086355,20:[&rate=0.0037315511525574693]2.091910584086355):[&rate=0.0029932177391293158]1 [...]
+tree STATE_2970000 [&lnP=-22211.847659134673] = [&R] ((33:[&rate=0.0021987133536732974]93.48180171874357,(((((((22:[&rate=0.002398348654421385]25.84829838305516,(24:[&rate=0.003284349444826839]14.440837734877025,(15:[&rate=0.0027920628630505396]11.785875331511244,(21:[&rate=0.0021109627972482167]5.5487919217510075,(20:[&rate=0.003241235692259856]3.55220911934856,19:[&rate=0.002378746820159483]3.55220911934856):[&rate=0.003473126105350415]1.9965828024024477):[&rate=0.00310897778694232]6.2 [...]
+tree STATE_2980000 [&lnP=-22218.79580794304] = [&R] ((14:[&rate=0.0023310427753642105]106.51019873848229,(((((((22:[&rate=0.0031459694265666216]22.117736892260766,(24:[&rate=0.0024601382366105633]10.138418042199332,(15:[&rate=0.0027312627244269606]9.347164675851735,((20:[&rate=0.002875248752068815]1.9957967056927837,21:[&rate=0.0032856177038926423]1.9957967056927837):[&rate=0.0032546629670086305]0.31492649041493204,19:[&rate=0.0026954401939737957]2.3107231961077157):[&rate=0.002990563903 [...]
+tree STATE_2990000 [&lnP=-22230.225890667225] = [&R] ((33:[&rate=0.0021005395845233735]115.4281723702241,(14:[&rate=0.002155212652006385]107.0731405744553,(((((((23:[&rate=0.001866038867218425]15.430234834631959,17:[&rate=0.0022592495599736526]15.430234834631959):[&rate=0.00230652331260848]13.319818913459384,(24:[&rate=0.002041579258930057]18.936437725084094,(15:[&rate=0.002403213246176178]13.331404980605877,((20:[&rate=0.002072068997886932]3.43814288156887,21:[&rate=0.002286413217780124 [...]
+tree STATE_3000000 [&lnP=-22229.504914970017] = [&R] ((33:[&rate=0.0023940949893285156]104.03013416893735,(14:[&rate=0.002304904750654169]93.22329392891582,((((((22:[&rate=0.0022535061881630665]26.48978353723367,(((19:[&rate=0.0024289061010812933]2.6252133521597827,20:[&rate=0.002461034851193743]2.6252133521597827):[&rate=0.0022952714414588987]1.1546377603624638,21:[&rate=0.002236877035630583]3.7798511125222465):[&rate=0.0023900633240036113]12.056044516001753,(24:[&rate=0.002410797662794 [...]
+tree STATE_3010000 [&lnP=-22219.250045503675] = [&R] (((14:[&rate=0.0026865041842570147]80.51234149076149,((((16:[&rate=0.002901909762148775]37.42704553519559,((8:[&rate=0.0026199072607611595]20.8179398627689,10:[&rate=0.0028624342618214556]20.8179398627689):[&rate=0.0028881911875412265]15.632558874790586,((3:[&rate=0.002569569258102785]15.101098603788838,5:[&rate=0.0023899011813037297]15.101098603788838):[&rate=0.0026770760357193273]16.30959339989341,((7:[&rate=0.0027839269880157403]19. [...]
+tree STATE_3020000 [&lnP=-22227.559370755516] = [&R] (((33:[&rate=0.001943836682429629]115.612178859287,((((16:[&rate=0.001865101899883374]44.76008935267781,(((3:[&rate=0.0024180235923166927]16.644757235524683,5:[&rate=0.00209707546218696]16.644757235524683):[&rate=0.0017756017832052846]20.695580787896365,((7:[&rate=0.002133091999040645]24.329756030093694,13:[&rate=0.002074625955052692]24.329756030093694):[&rate=0.00209707546218696]7.6690304823467415,(11:[&rate=0.0019728332998940664]8.68 [...]
+tree STATE_3030000 [&lnP=-22220.0195876856] = [&R] ((14:[&rate=0.002020132055688552]113.56647369587391,(33:[&rate=0.002385283153708466]102.88897986936901,((((((8:[&rate=0.002530799481885664]28.090352557762234,10:[&rate=0.00267715150188322]28.090352557762234):[&rate=0.003528413544206895]9.752830763582356,((3:[&rate=0.0030361842853052117]12.501714010177913,5:[&rate=0.0027370019134052483]12.501714010177913):[&rate=0.0025897071271790586]22.16150999019061,((7:[&rate=0.0031545609347019407]24.3 [...]
+tree STATE_3040000 [&lnP=-22208.575849458575] = [&R] ((33:[&rate=0.0024360019683955067]104.72851519565948,(14:[&rate=0.002691235897094774]86.3133521111971,((((((8:[&rate=0.003303230519237211]19.638174663928883,10:[&rate=0.0027959124733383566]19.638174663928883):[&rate=0.0030797143071372005]13.115618776124986,((7:[&rate=0.003540016130051065]24.852666753474544,(13:[&rate=0.0032661512252638815]20.42387320346495,(11:[&rate=0.0031981288205010132]7.073559759266692,12:[&rate=0.00238905418360434 [...]
+tree STATE_3050000 [&lnP=-22220.609320940217] = [&R] ((33:[&rate=0.0029487632180686665]97.71366858925073,((((((((23:[&rate=0.0020698186072990953]13.042440422553154,17:[&rate=0.0025699678625090904]13.042440422553154):[&rate=0.0022802478907675634]11.16921215256774,((((20:[&rate=0.002874115031244821]2.931495747843275,21:[&rate=0.002312668756279732]2.931495747843275):[&rate=0.002775969629445663]1.4313610123342522,19:[&rate=0.0027587731417923088]4.362856760177527):[&rate=0.002978119707227222] [...]
+tree STATE_3060000 [&lnP=-22218.195925226966] = [&R] ((33:[&rate=0.002624981323855955]94.9603299285283,(((((((((((21:[&rate=0.003568291726292644]3.010077663140437,20:[&rate=0.003823503083257254]3.010077663140437):[&rate=0.0034399467442688568]0.17528767956149283,19:[&rate=0.002780725586974055]3.18536534270193):[&rate=0.003346191187563358]6.835896170817302,15:[&rate=0.0028143772137954443]10.021261513519232):[&rate=0.002624981323855955]1.1987208657980162,24:[&rate=0.0031130466668955166]11.2 [...]
+tree STATE_3070000 [&lnP=-22219.8685260703] = [&R] ((33:[&rate=0.002490659424546961]102.96142398523854,(((((((8:[&rate=0.0027077570521022282]25.995196243282447,10:[&rate=0.0024522805933453445]25.995196243282447):[&rate=0.002952480600994952]10.934857671470478,((3:[&rate=0.002307413694804138]15.198131582362587,5:[&rate=0.002252669484795328]15.198131582362587):[&rate=0.002911149059728767]17.81919627501933,((7:[&rate=0.0027655641779661295]23.656214629380038,13:[&rate=0.002967063243190053]23. [...]
+tree STATE_3080000 [&lnP=-22211.18795543984] = [&R] (((33:[&rate=0.0035812965393088587]79.54077890554106,(((((((22:[&rate=0.0033082988191082906]19.494475020459433,(24:[&rate=0.003323248229479546]10.42740152003554,((19:[&rate=0.003928768467088559]3.3519363570799774,(21:[&rate=0.0030659017191213707]2.3048114577300796,20:[&rate=0.0035274420885153153]2.3048114577300796):[&rate=0.0034772088775107454]1.0471248993498978):[&rate=0.003493627529613961]4.545274192283972,15:[&rate=0.0035628565400102 [...]
+tree STATE_3090000 [&lnP=-22224.048127550515] = [&R] ((33:[&rate=0.0019740242803822487]104.08597801890208,((((((8:[&rate=0.0025154702879799772]23.884576053941277,10:[&rate=0.0029121975141992006]23.884576053941277):[&rate=0.002997085719494379]12.232521997445861,(16:[&rate=0.0025154702879799772]34.656608473842475,((3:[&rate=0.003371229925101664]14.243537011132652,5:[&rate=0.0024968902507965566]14.243537011132652):[&rate=0.003019964911639146]14.68368641708814,((7:[&rate=0.003094119633141494 [...]
+tree STATE_3100000 [&lnP=-22209.165511334835] = [&R] ((33:[&rate=0.0026645354243556955]108.18549462688918,(14:[&rate=0.0033050587622624455]79.94262480291194,((((16:[&rate=0.0028890639252862507]33.8863632047369,(((3:[&rate=0.0031959205840335764]12.945482373202953,5:[&rate=0.0028187645760979927]12.945482373202953):[&rate=0.0028716825712005204]18.623579398664102,(8:[&rate=0.002705886550811751]19.548342120072046,10:[&rate=0.002620125915887068]19.548342120072046):[&rate=0.0029236253510654744] [...]
+tree STATE_3110000 [&lnP=-22213.417882876292] = [&R] ((33:[&rate=0.002797056850511351]101.80588364416653,(14:[&rate=0.002930752487493713]87.35131139019728,(((((((3:[&rate=0.0029907477395612996]13.461793152937211,5:[&rate=0.002736223811132913]13.461793152937211):[&rate=0.003001023326398829]18.467899294864942,(((11:[&rate=0.003203653829766404]5.549630582007304,12:[&rate=0.0032216599454903995]5.549630582007304):[&rate=0.0032216599454903995]15.837138370737083,13:[&rate=0.002595456653519593]2 [...]
+tree STATE_3120000 [&lnP=-22209.6628309426] = [&R] ((33:[&rate=0.002137528048858522]101.29809807462252,(14:[&rate=0.003001878950636311]81.22413078053364,((((((((24:[&rate=0.002737424924641064]12.9232378463922,(15:[&rate=0.0021149276764287762]11.506321956923196,(19:[&rate=0.002293164358571984]5.4222431853827295,(20:[&rate=0.0017755044404759311]4.5154101214307145,21:[&rate=0.0026765131243369745]4.5154101214307145):[&rate=0.0030385322027125026]0.906833063952015):[&rate=0.0028042277332520254 [...]
+tree STATE_3130000 [&lnP=-22215.675375070114] = [&R] ((33:[&rate=0.0023533993074951716]106.58311338808738,(14:[&rate=0.0025376180167908592]93.22008676166735,((((16:[&rate=0.002290198503639436]40.61951089561571,(((3:[&rate=0.002632472508553214]14.691597407342863,5:[&rate=0.0023018949597141455]14.691597407342863):[&rate=0.002447670171003085]21.75735070748373,(((11:[&rate=0.002512479597155251]8.242286684554456,12:[&rate=0.0024961057443138625]8.242286684554456):[&rate=0.002782387425169042]15 [...]
+tree STATE_3140000 [&lnP=-22209.3921614373] = [&R] (((9:[&rate=0.005183456332354666]66.30677443730353,33:[&rate=0.003529571809738315]66.30677443730353):[&rate=0.0039109921297891224]8.373134824862689,((((16:[&rate=0.0034867101338340437]24.845523311282285,(((3:[&rate=0.004339698565814479]9.05162456622128,5:[&rate=0.0034653182800335096]9.05162456622128):[&rate=0.0039109921297891224]13.870662570932312,((7:[&rate=0.0036839701731661225]16.960296287255368,13:[&rate=0.0032674032548518374]16.9602 [...]
+tree STATE_3150000 [&lnP=-22216.519152127727] = [&R] ((14:[&rate=0.0032755545398870144]81.01304365848453,(((((((23:[&rate=0.00328989644698126]8.301553498697336,17:[&rate=0.00329992943421599]8.301553498697336):[&rate=0.0032597150887507275]10.898052475984494,(22:[&rate=0.0032799238107104308]18.251520769926834,((((21:[&rate=0.00328101071625549]2.821141081056865,20:[&rate=0.0032637921164611946]2.821141081056865):[&rate=0.003285387265086927]2.588316383477718,19:[&rate=0.003329718622531088]5.4 [...]
+tree STATE_3160000 [&lnP=-22221.051428429408] = [&R] (((14:[&rate=0.0024649881642744625]89.68270665267018,((((((((((21:[&rate=0.002087262937693701]2.1405709142278253,20:[&rate=0.003136075197147481]2.1405709142278253):[&rate=0.0033527988661017022]2.24500415455426,19:[&rate=0.0025144460620194604]4.385575068782085):[&rate=0.003745755401807416]8.097457451379489,15:[&rate=0.0018995017956801928]12.483032520161574):[&rate=0.0021736436138158895]2.5349042656849132,24:[&rate=0.002936911700527678]1 [...]
+tree STATE_3170000 [&lnP=-22223.928833719914] = [&R] (((14:[&rate=0.0027871945993925896]76.5734863404308,33:[&rate=0.0028267786279561886]76.5734863404308):[&rate=0.0029372096018431782]6.9570880946721445,((((((23:[&rate=0.0031476247326173287]9.533513157508748,17:[&rate=0.002908511339321498]9.533513157508748):[&rate=0.0024455209925541787]12.21351590130706,((24:[&rate=0.003066711808992235]12.357787706908326,(15:[&rate=0.0026236889736598996]8.906796004339565,(19:[&rate=0.002922737614415951]5 [...]
+tree STATE_3180000 [&lnP=-22219.46047372005] = [&R] (((14:[&rate=0.0024041288079134967]96.68341729765892,(((((((3:[&rate=0.0023234981599371237]15.391953975018792,5:[&rate=0.0021848225863992468]15.391953975018792):[&rate=0.002039530560839975]25.14886774172451,((11:[&rate=0.002039530560839975]8.715342508032355,12:[&rate=0.0016884649631646134]8.715342508032355):[&rate=0.002039530560839975]25.467624206789537,(13:[&rate=0.0021318259060419805]30.544227278706742,7:[&rate=0.0021848225863992468]3 [...]
+tree STATE_3190000 [&lnP=-22207.150114795113] = [&R] (((14:[&rate=0.004133534960267653]57.886494568916575,33:[&rate=0.0039509810346504025]57.886494568916575):[&rate=0.003875543771026197]4.894940565420335,((((((23:[&rate=0.0039937817497952294]7.586792646403218,17:[&rate=0.0038594917648497092]7.586792646403218):[&rate=0.003552730902944113]7.717091774029848,((24:[&rate=0.004147836343624633]8.873985739906425,(15:[&rate=0.0046435898725207874]4.822556642229268,((21:[&rate=0.004287020407733401] [...]
+tree STATE_3200000 [&lnP=-22241.74386686494] = [&R] ((33:[&rate=0.0017424854529337686]156.12912851740023,(14:[&rate=0.0017622855606260728]129.15688387053953,((((16:[&rate=0.0013890420377397014]59.380032050603546,(((((11:[&rate=0.0018032889526837614]11.591567003687498,12:[&rate=0.0016304565244873181]11.591567003687498):[&rate=0.0018485663406673605]25.629122910849762,13:[&rate=0.0014794209018404575]37.22068991453726):[&rate=0.0016221791857384288]4.460162937031974,7:[&rate=0.001638457173956 [...]
+tree STATE_3210000 [&lnP=-22219.313469762557] = [&R] ((14:[&rate=0.001999267139010275]120.51876676292059,(((((16:[&rate=0.0020305770283602018]47.15986331230295,(((((11:[&rate=0.0027814414651896008]6.814788634545791,12:[&rate=0.0024480747124286085]6.814788634545791):[&rate=0.002730587321492569]18.752410294476654,13:[&rate=0.0018557721205891156]25.567198929022446):[&rate=0.0021200540294618628]5.724852732471589,7:[&rate=0.0023510762907807408]31.292051661494035):[&rate=0.0026044327794034817] [...]
+tree STATE_3220000 [&lnP=-22206.69085204883] = [&R] (((14:[&rate=0.003206467494973671]67.54639009894272,((((16:[&rate=0.0026436555616610406]34.707085336536046,(((7:[&rate=0.002620507306148991]25.5178356062885,((11:[&rate=0.0029621017556255526]7.730078909697055,12:[&rate=0.002184659298986466]7.730078909697055):[&rate=0.0028582032064682138]14.972058872988637,13:[&rate=0.002382179703662384]22.702137782685693):[&rate=0.002909145982263805]2.8156978236028074):[&rate=0.0025275568418773606]5.159 [...]
+tree STATE_3230000 [&lnP=-22213.829933985973] = [&R] ((33:[&rate=0.003238025766203249]75.5413012272467,(14:[&rate=0.003732832207722966]72.3453314279537,(((((((23:[&rate=0.0031619695152891036]7.293505905276626,17:[&rate=0.0037941364213527726]7.293505905276626):[&rate=0.0025594222533867777]10.184936808037293,(24:[&rate=0.0036189740184090434]9.234388772770654,(15:[&rate=0.002803057829818212]8.487642975518945,(19:[&rate=0.0035918991878145113]4.882028445438004,(21:[&rate=0.0033373840895744938 [...]
+tree STATE_3240000 [&lnP=-22214.199102433602] = [&R] ((33:[&rate=0.0021641370247930895]98.0139722601835,(14:[&rate=0.0032315714499850583]84.65953547125238,(((((((23:[&rate=0.002416663883239611]9.301313363553325,17:[&rate=0.0025417582594110906]9.301313363553325):[&rate=0.0022322326217101745]15.326939146931421,((24:[&rate=0.0027497781130302393]14.751927076157028,(15:[&rate=0.002610083225538292]11.206028903996195,(19:[&rate=0.0025752119039200464]4.9077169387130155,(21:[&rate=0.0030862773976 [...]
+tree STATE_3250000 [&lnP=-22222.14813397427] = [&R] ((33:[&rate=0.0018705173219384446]136.5278668887091,(14:[&rate=0.002069711011049048]126.84683413751323,(((((((3:[&rate=0.0024817155094167434]14.649776163689587,5:[&rate=0.0017733188987758782]14.649776163689587):[&rate=0.002101568272289771]23.919917022675,((11:[&rate=0.00241458290986266]6.8038297745093,12:[&rate=0.002249534634967283]6.8038297745093):[&rate=0.002220620269314895]24.06734320552309,(7:[&rate=0.002322463694867118]28.516038844 [...]
+tree STATE_3260000 [&lnP=-22219.305618693917] = [&R] ((33:[&rate=0.0021967381807147537]121.65961550001681,(14:[&rate=0.002667169432877349]97.10969088027394,((((((3:[&rate=0.0023571466216850065]16.453116451404778,5:[&rate=0.002456725157613787]16.453116451404778):[&rate=0.002037421595092202]19.86868943481377,((11:[&rate=0.0024236202480075585]6.923469920254374,12:[&rate=0.0025407819645107002]6.923469920254374):[&rate=0.0019404528824619096]26.200123768248485,(7:[&rate=0.0028167722469871845]2 [...]
+tree STATE_3270000 [&lnP=-22216.432457194947] = [&R] ((14:[&rate=0.001857529277718954]101.27215240864209,(33:[&rate=0.0032938288349971994]75.97775062307213,((((((3:[&rate=0.0024681477769122515]16.2031154102557,5:[&rate=0.0020395661056022006]16.2031154102557):[&rate=0.0024060141931406074]15.794208630145725,((11:[&rate=0.0023897263615262967]7.394066376454647,12:[&rate=0.0026144471216975615]7.394066376454647):[&rate=0.002483134593116667]19.301560394109924,(7:[&rate=0.002856842059035317]22.2 [...]
+tree STATE_3280000 [&lnP=-22222.635794860373] = [&R] ((14:[&rate=0.002783743034226462]98.34213030395564,(33:[&rate=0.0031078848346913048]87.48728225281741,((((((23:[&rate=0.0026011123347410453]10.06201601542542,17:[&rate=0.0028276002753179633]10.06201601542542):[&rate=0.002881748679733045]11.730322188627962,((24:[&rate=0.00246491489931474]12.245885843657872,((19:[&rate=0.0022783109352228076]2.6619410768026945,(21:[&rate=0.0029962029439858664]1.9623338265863808,20:[&rate=0.002816734083250 [...]
+tree STATE_3290000 [&lnP=-22214.078901799152] = [&R] (((14:[&rate=0.0026806203446577753]87.08062521497013,(((16:[&rate=0.002850275679772025]32.74728687387322,(((8:[&rate=0.0033481839675563937]18.826245015922638,10:[&rate=0.0027330878129418457]18.826245015922638):[&rate=0.003228130245427762]10.904755269068247,((3:[&rate=0.0033909341749716864]12.21920044480662,5:[&rate=0.003031148247456231]12.21920044480662):[&rate=0.0031633890078495955]14.42741124039031,((11:[&rate=0.0033481839675563937]6 [...]
+tree STATE_3300000 [&lnP=-22215.249306806305] = [&R] (((14:[&rate=0.0022860618617049546]99.79050425810735,((((((((11:[&rate=0.0027188090086385404]6.354762960866544,12:[&rate=0.002699340123344793]6.354762960866544):[&rate=0.002504822232820313]21.7714726993175,(7:[&rate=0.0027723151758378413]24.32029190254658,13:[&rate=0.002432896343091034]24.32029190254658):[&rate=0.002455080461438464]3.805943757637465):[&rate=0.0026360415070013367]7.1797588839461355,(3:[&rate=0.002408661241159663]18.0512 [...]
+tree STATE_3310000 [&lnP=-22219.25555634369] = [&R] (((((((((23:[&rate=0.002684636925391143]12.444765817728921,17:[&rate=0.002194167802235819]12.444765817728921):[&rate=0.001981936309236425]12.40640044050695,((24:[&rate=0.002943774581694044]16.422291675976826,(((21:[&rate=0.0024528969811412524]2.119060374724645,20:[&rate=0.0026431844393563263]2.119060374724645):[&rate=0.002210149062923252]2.5457295483154856,19:[&rate=0.0019263447139282045]4.66478992304013):[&rate=0.0024685785654842826]6. [...]
+tree STATE_3320000 [&lnP=-22214.74727538757] = [&R] (((((((((8:[&rate=0.0025983332856866154]24.89069614090237,10:[&rate=0.0025522072711097257]24.89069614090237):[&rate=0.0025700952485936063]13.810375229569544,(((11:[&rate=0.0021987476793951053]8.975446707700664,12:[&rate=0.002266424633864683]8.975446707700664):[&rate=0.0023155248124181052]20.91410042195512,(7:[&rate=0.0026511727792200386]23.90057799531229,13:[&rate=0.002146066927298319]23.90057799531229):[&rate=0.002702455096664446]5.988 [...]
+tree STATE_3330000 [&lnP=-22220.874853213616] = [&R] ((14:[&rate=0.002867392294349715]98.194745827247,(33:[&rate=0.0028142454827744345]85.2132869293257,((((((((11:[&rate=0.002760143993791802]6.750834159653285,12:[&rate=0.0029026954587772365]6.750834159653285):[&rate=0.0025078622839247583]17.792276191007037,(7:[&rate=0.0037753579588542282]19.221849390999356,13:[&rate=0.002778338944410404]19.221849390999356):[&rate=0.0028850378955006493]5.321260959660965):[&rate=0.0033097533119221955]2.447 [...]
+tree STATE_3340000 [&lnP=-22214.70822013913] = [&R] ((33:[&rate=0.0027882028115083822]102.3253648366607,(14:[&rate=0.002850812248215131]87.3486354152482,((((((((11:[&rate=0.0027440687886718306]6.764769791815096,12:[&rate=0.00275538920385495]6.764769791815096):[&rate=0.0025487222978960843]19.611845848461854,(7:[&rate=0.003076797577078253]21.643246870103034,13:[&rate=0.002315780786458385]21.643246870103034):[&rate=0.0027325048916383085]4.733368770173918):[&rate=0.002570541142354105]4.85686 [...]
+tree STATE_3350000 [&lnP=-22222.615693595464] = [&R] (((14:[&rate=0.002705038357006529]103.53227760075984,((((16:[&rate=0.0023555919021217312]39.51260095000169,((((11:[&rate=0.002250014727766102]8.949729519685166,12:[&rate=0.0019285448465224111]8.949729519685166):[&rate=0.0030290431933236127]18.78692839218406,(7:[&rate=0.0024817759826411623]24.80513576967268,13:[&rate=0.0021819343705468046]24.80513576967268):[&rate=0.002437852915833959]2.931522142196542):[&rate=0.0020751855275548355]6.33 [...]
+tree STATE_3360000 [&lnP=-22223.167108239402] = [&R] (((33:[&rate=0.0026259790821153727]85.5138093943781,14:[&rate=0.0025509397480321504]85.5138093943781):[&rate=0.0024417324684414144]6.465946901442905,((((((8:[&rate=0.0029418793084049102]22.612458684461636,10:[&rate=0.0029147582202943383]22.612458684461636):[&rate=0.0024040679592476476]13.41070469650937,((7:[&rate=0.0025878796867993582]27.08073955389571,((11:[&rate=0.0028899771235566312]6.6173584241478185,12:[&rate=0.002454030424128157] [...]
+tree STATE_3370000 [&lnP=-22217.222034952116] = [&R] (((((((16:[&rate=0.00296106895265898]30.363555702040415,((((11:[&rate=0.003268926550191102]5.848229017312815,12:[&rate=0.003288027916419913]5.848229017312815):[&rate=0.004087112929203616]13.31052697627143,(7:[&rate=0.003288027916419913]17.522573597314658,13:[&rate=0.0028421647535907097]17.522573597314658):[&rate=0.003389140308245904]1.6361823962695858):[&rate=0.0023765227631358837]7.173880376580996,(3:[&rate=0.0031410653823011883]13.34 [...]
+tree STATE_3380000 [&lnP=-22220.54961054671] = [&R] ((((((((((11:[&rate=0.0031595027712932734]7.853714417300635,12:[&rate=0.0025226795871978306]7.853714417300635):[&rate=0.0024557213689136844]19.891435721847877,(7:[&rate=0.0028810588238647626]24.111047939798038,13:[&rate=0.0016796782338119]24.111047939798038):[&rate=0.0032470893837442264]3.634102199350476):[&rate=0.0018420130788897762]5.4675297560212,(3:[&rate=0.002648221960192672]16.17608862393276,5:[&rate=0.002255749157797337]16.176088 [...]
+tree STATE_3390000 [&lnP=-22223.139857176822] = [&R] (((((((((22:[&rate=0.0021669506279467345]21.99808156614131,((((20:[&rate=0.0025381488485967723]2.8742275769267533,21:[&rate=0.0026579778042315674]2.8742275769267533):[&rate=0.002151413849900681]2.633713475661771,19:[&rate=0.002094252508478543]5.507941052588524):[&rate=0.0026415986068935296]4.286827743898176,15:[&rate=0.00232254293627681]9.7947687964867):[&rate=0.0022843583621078123]5.610376326320045,24:[&rate=0.0023945428043701473]15.4 [...]
+tree STATE_3400000 [&lnP=-22217.05190710673] = [&R] (((((((((23:[&rate=0.0025215762030811662]10.45962302103143,17:[&rate=0.0021500115021747955]10.45962302103143):[&rate=0.0023136892265450963]13.324006230234275,(22:[&rate=0.0027596277761517828]22.878168211926024,((((19:[&rate=0.0023697321212077325]3.1667754737774367,20:[&rate=0.0023428722264554613]3.1667754737774367):[&rate=0.0022814538274015675]0.63436581216357,21:[&rate=0.002678029599015826]3.8011412859410068):[&rate=0.00246205428961149 [...]
+tree STATE_3410000 [&lnP=-22216.53840930539] = [&R] (((((((((23:[&rate=0.002838860615248787]11.485008744400428,17:[&rate=0.0030233717603514233]11.485008744400428):[&rate=0.0029529641559190617]10.532859677455217,((((21:[&rate=0.0026011393873813966]1.9498868907888656,20:[&rate=0.002838860615248787]1.9498868907888656):[&rate=0.0023224351721121716]1.8409168525366328,19:[&rate=0.0031107501390712526]3.7908037433254984):[&rate=0.0028056002157603433]7.376507913183063,15:[&rate=0.0030233717603514 [...]
+tree STATE_3420000 [&lnP=-22228.517465781428] = [&R] (((((((((23:[&rate=0.0019231282288069648]16.619470965378486,17:[&rate=0.001974544421930773]16.619470965378486):[&rate=0.002249121783786316]12.21546267528279,(22:[&rate=0.0021896873572227844]25.218335177571756,((((20:[&rate=0.002152192545990062]3.879861684775863,21:[&rate=0.0017770564970457574]3.879861684775863):[&rate=0.0021896873572227844]0.4028980450555024,19:[&rate=0.0024016414694103443]4.2827597298313655):[&rate=0.00213370090289928 [...]
+tree STATE_3430000 [&lnP=-22208.548843518452] = [&R] (((33:[&rate=0.003240772072671676]96.13523561856499,((((((((15:[&rate=0.002561740469655188]10.400716376057028,((20:[&rate=0.002852591542740454]2.9907516933017733,21:[&rate=0.002909956789946675]2.9907516933017733):[&rate=0.002474296597572209]0.5300878751488085,19:[&rate=0.0027520390032358425]3.5208395684505818):[&rate=0.0023431220942304177]6.879876807606447):[&rate=0.002489224505402948]4.18806950832289,24:[&rate=0.0027520390032358425]14 [...]
+tree STATE_3440000 [&lnP=-22234.10664589566] = [&R] (((14:[&rate=0.002469983793547029]100.06701463913834,((((16:[&rate=0.0024675478679208706]42.03055433838125,((((11:[&rate=0.002440143662583642]7.041365452364864,12:[&rate=0.0024493739880800795]7.041365452364864):[&rate=0.00242917738733234]21.495878655129133,(7:[&rate=0.0024693527173248603]25.284571245435142,13:[&rate=0.002474267175331711]25.284571245435142):[&rate=0.002450906421338786]3.252672862058855):[&rate=0.0024836265797568103]8.072 [...]
+tree STATE_3450000 [&lnP=-22210.741998733978] = [&R] ((33:[&rate=0.0031074620891331194]86.54358716948725,(14:[&rate=0.0030026233916062048]78.51688626996409,((((((((11:[&rate=0.0037218197028002324]6.5105276762828055,12:[&rate=0.002809432290407553]6.5105276762828055):[&rate=0.0032741842136931603]16.725434879965064,(7:[&rate=0.002987083900858227]20.230851247857057,13:[&rate=0.0030331324086619086]20.230851247857057):[&rate=0.002922118869174742]3.0051113083908128):[&rate=0.003580332865682827] [...]
+tree STATE_3460000 [&lnP=-22222.28796195672] = [&R] ((14:[&rate=0.002269175376757528]104.59867024465939,(33:[&rate=0.002852528143327912]90.58348355659317,((((((23:[&rate=0.0028963054176039094]9.133702851836004,17:[&rate=0.0024304923379224862]9.133702851836004):[&rate=0.0025660039545159557]12.938158448016418,(((((20:[&rate=0.002703930018112196]2.09374078634788,19:[&rate=0.0032323214000037686]2.09374078634788):[&rate=0.002163752880938638]3.310270262891444,21:[&rate=0.0026773563738122356]5. [...]
+tree STATE_3470000 [&lnP=-22210.95688359466] = [&R] (((33:[&rate=0.0030978781944497606]77.87615879099603,((((((((((20:[&rate=0.0029758140267121787]4.247261756940726,21:[&rate=0.00389516472327192]4.247261756940726):[&rate=0.003390427551584947]0.5954733427342669,19:[&rate=0.003625954151955809]4.842735099674993):[&rate=0.0030978781944497606]4.504884454554215,15:[&rate=0.0031855532068417107]9.347619554229208):[&rate=0.0029606654086744314]0.9502749866443345,24:[&rate=0.0032233318864536868]10. [...]
+tree STATE_3480000 [&lnP=-22210.368001664778] = [&R] (((14:[&rate=0.0021727149941776717]107.91546130783664,(((((((((19:[&rate=0.0018465015832767426]5.944124688139269,(20:[&rate=0.0027063206988528553]2.8779365513824473,21:[&rate=0.002130070079289685]2.8779365513824473):[&rate=0.0020402097113459486]3.0661881367568213):[&rate=0.0023907066758355367]6.273927050445726,15:[&rate=0.002457374634179047]12.218051738584995):[&rate=0.0023000586756647314]4.238032410363244,24:[&rate=0.00214438244622186 [...]
+tree STATE_3490000 [&lnP=-22223.77094282249] = [&R] (((33:[&rate=0.002772217012752513]96.89201931928778,((((((23:[&rate=0.0021841882541601787]9.215589061652954,17:[&rate=0.0028543138191547846]9.215589061652954):[&rate=0.0026622882749790626]12.387485822352842,(22:[&rate=0.0028860803537211375]20.962707518684095,(((19:[&rate=0.002200699219977445]4.4433180741789124,(20:[&rate=0.0029209382916840425]1.9986373187356392,21:[&rate=0.002797775249442405]1.9986373187356392):[&rate=0.0020526459602246 [...]
+tree STATE_3500000 [&lnP=-22212.17702140683] = [&R] ((33:[&rate=0.0030803952388237005]94.3150503530322,(14:[&rate=0.003184820573906232]84.01660683990909,(((16:[&rate=0.00236852169273952]40.0647375034625,((((23:[&rate=0.002759722274634047]10.378943393632504,17:[&rate=0.002839720907493168]10.378943393632504):[&rate=0.002206999571300663]11.399098398475822,(((((21:[&rate=0.002759722274634047]1.5789389305195414,20:[&rate=0.0035667133798956818]1.5789389305195414):[&rate=0.0026602587444883507]1 [...]
+tree STATE_3510000 [&lnP=-22215.616212202203] = [&R] (((14:[&rate=0.0030121527339910205]92.22653510732404,(((((((11:[&rate=0.002589646282306412]7.035763479535547,12:[&rate=0.002152286210942562]7.035763479535547):[&rate=0.0019947228558986276]23.9175269641474,(7:[&rate=0.0024589911169996762]25.33279445854979,13:[&rate=0.0017767331910848586]25.33279445854979):[&rate=0.002092248281180005]5.620495985133154):[&rate=0.003111682135714247]2.462281630854882,(3:[&rate=0.0022401266246399217]17.62339 [...]
+tree STATE_3520000 [&lnP=-22215.710251323362] = [&R] (((14:[&rate=0.0031629070644739864]75.06714772943718,((((16:[&rate=0.0028048912299437345]34.06941465721586,((8:[&rate=0.002639302234561514]22.19419648901226,10:[&rate=0.002750931686385762]22.19419648901226):[&rate=0.002907995473051728]10.292514223851281,(((11:[&rate=0.002860564235354394]6.149763160130218,12:[&rate=0.002662729121002478]6.149763160130218):[&rate=0.002883847527134669]17.71732327567605,(7:[&rate=0.0030719686259153443]18.94 [...]
+tree STATE_3530000 [&lnP=-22208.747619814167] = [&R] (((14:[&rate=0.00335636400790598]84.19804269255913,((((16:[&rate=0.0025855909718384513]35.14291870695947,((((11:[&rate=0.0031820206718582594]6.007189426756364,12:[&rate=0.002767038914246413]6.007189426756364):[&rate=0.0028805744973475125]20.3614425887533,(7:[&rate=0.0028805744973475125]20.368865570704717,13:[&rate=0.002767038914246413]20.368865570704717):[&rate=0.0025643460256254986]5.999766444804944):[&rate=0.0028161033826960506]5.070 [...]
+tree STATE_3540000 [&lnP=-22217.618114261877] = [&R] (9:[&rate=0.002539064843421591]151.58786731657693,((14:[&rate=0.0026074092639992268]90.58283380210207,((((16:[&rate=0.0034317106610781073]31.3435219604474,((8:[&rate=0.002520548489668706]21.15043953257314,10:[&rate=0.00311850093924412]21.15043953257314):[&rate=0.0032138870464947563]9.66295735955672,((7:[&rate=0.0032719451550406713]23.929125601305163,((11:[&rate=0.002921620226253678]5.149341213299207,12:[&rate=0.002654626567246895]5.149 [...]
+tree STATE_3550000 [&lnP=-22213.004612694516] = [&R] (9:[&rate=0.003496803906542857]104.65080720459737,(14:[&rate=0.0029581553074753478]87.51580508632824,(33:[&rate=0.0030518655352590844]81.32721921506997,(((16:[&rate=0.0026647609181962044]36.92136068042717,((((7:[&rate=0.0033093197722582745]26.27595456191144,((11:[&rate=0.002760342693910684]7.410214131643278,12:[&rate=0.002640219188921116]7.410214131643278):[&rate=0.003374341143637682]13.00721172333083,13:[&rate=0.0027259787047274834]20 [...]
+tree STATE_3560000 [&lnP=-22218.580799805262] = [&R] (9:[&rate=0.003531134245576768]106.23656704116601,((14:[&rate=0.003413965689098265]71.24351100566498,((((16:[&rate=0.0034272642232065394]29.45522776492804,((7:[&rate=0.0032939043973508326]23.234047501060054,((11:[&rate=0.003477129190321884]5.508795564516311,12:[&rate=0.0031600235208541585]5.508795564516311):[&rate=0.00329887407648756]13.255426343181675,13:[&rate=0.0030874079555422234]18.764221907697987):[&rate=0.0033380136709061587]4.4 [...]
+tree STATE_3570000 [&lnP=-22228.86679427258] = [&R] (9:[&rate=0.0026702098623780962]141.5227664582692,(((((16:[&rate=0.002594461767542534]36.86965559450915,((7:[&rate=0.0027701713300526017]25.074668381489037,((11:[&rate=0.003438248393832959]4.268152160324909,12:[&rate=0.0023495160925568227]4.268152160324909):[&rate=0.00295801585376995]17.62351933821173,13:[&rate=0.002372550235875827]21.89167149853664):[&rate=0.002878789089310717]3.1829968829523985):[&rate=0.002707385930271506]9.920084163 [...]
+tree STATE_3580000 [&lnP=-22230.314125447356] = [&R] (9:[&rate=0.0028259198177740426]142.56005845527662,((14:[&rate=0.0028293365769992127]96.80325891895801,((((((((11:[&rate=0.0028602754519784766]5.954100620325903,12:[&rate=0.0028564132950441527]5.954100620325903):[&rate=0.002835300375986288]23.40269481068353,(7:[&rate=0.0028602754519784766]24.28583424663058,13:[&rate=0.002851811285052158]24.28583424663058):[&rate=0.0028250510597283693]5.070961184378856):[&rate=0.0028654058310524765]4.11 [...]
+tree STATE_3590000 [&lnP=-22214.254903512454] = [&R] ((33:[&rate=0.002480716391154798]101.46973218094983,(((((16:[&rate=0.002519298599296971]38.37620897907877,((8:[&rate=0.002757046955726786]21.379717369330354,10:[&rate=0.0026188368645189163]21.379717369330354):[&rate=0.0029623235370659674]14.52975736815483,(((11:[&rate=0.0027058984525195145]9.120451133366117,12:[&rate=0.002426557210475466]9.120451133366117):[&rate=0.002677467923101715]16.67480434435071,(7:[&rate=0.0031793023464908327]20 [...]
+tree STATE_3600000 [&lnP=-22208.91281953815] = [&R] ((33:[&rate=0.0026119355919151234]98.55396931193377,(14:[&rate=0.0032207002435501886]70.03528511360715,((((16:[&rate=0.0032433490579497565]32.49974317510156,((((11:[&rate=0.0021295132004099447]9.122615612700102,12:[&rate=0.0022993165186755283]9.122615612700102):[&rate=0.003590269476611955]14.897330187019865,(7:[&rate=0.0031078191852349484]19.716549342939217,13:[&rate=0.0030621484232167834]19.716549342939217):[&rate=0.0036493327854973914 [...]
+tree STATE_3610000 [&lnP=-22216.787718549975] = [&R] (((((((((8:[&rate=0.0025055002724004077]23.216899576386393,10:[&rate=0.002291436856179404]23.216899576386393):[&rate=0.002082457640424567]16.890321518179984,(((11:[&rate=0.0029397729813877764]7.073213667973221,12:[&rate=0.0021846314735693745]7.073213667973221):[&rate=0.0023313143630484643]21.11388392700743,(7:[&rate=0.003111163881851602]22.258892544748388,13:[&rate=0.002473597556180577]22.258892544748388):[&rate=0.0020496563996094254]5 [...]
+tree STATE_3620000 [&lnP=-22212.978998016362] = [&R] (((14:[&rate=0.0034562116327942136]67.62786007263283,((((((8:[&rate=0.0029221488123565243]20.522730520848576,10:[&rate=0.003132119737471099]20.522730520848576):[&rate=0.0034399751489422007]12.354502185494574,((7:[&rate=0.0029033107431789516]24.055542346031835,(13:[&rate=0.0028598807172114087]20.90409533951139,(11:[&rate=0.0030520094145904293]6.074409586583168,12:[&rate=0.0032504552061580344]6.074409586583168):[&rate=0.00297147803042960 [...]
+tree STATE_3630000 [&lnP=-22225.33296512536] = [&R] ((((((16:[&rate=0.0021333178223941597]51.43230941542064,((8:[&rate=0.0020606700181068642]28.306623155990334,10:[&rate=0.0020436713324329084]28.306623155990334):[&rate=0.0016478374341315823]20.484244447662693,(((11:[&rate=0.001993562057528487]7.514515241300137,12:[&rate=0.002236685709171956]7.514515241300137):[&rate=0.0020018971641938004]24.355098108806423,(13:[&rate=0.0018443581271481043]25.432826267123147,7:[&rate=0.002322084477473239] [...]
+tree STATE_3640000 [&lnP=-22211.147600939206] = [&R] ((33:[&rate=0.0027806153879016415]101.15959551012601,(((((16:[&rate=0.002567647532443628]36.20800169067145,((8:[&rate=0.002865376671361652]21.571639848712756,10:[&rate=0.0026954056332536653]21.571639848712756):[&rate=0.003425319704118761]11.841926493799349,(((7:[&rate=0.0032857367286459535]20.736226634636886,13:[&rate=0.0024839028073877297]20.736226634636886):[&rate=0.0026603737837793866]3.790108005916423,(11:[&rate=0.00317857127253498 [...]
+tree STATE_3650000 [&lnP=-22213.1108606864] = [&R] ((33:[&rate=0.0030378513896487293]89.53710152662359,(14:[&rate=0.003257393384738252]69.73578226967136,((((((((23:[&rate=0.003160273110586228]9.382532167012856,17:[&rate=0.0027558195866403527]9.382532167012856):[&rate=0.003106314885720771]11.344101615113948,22:[&rate=0.0029652502930954763]20.726633782126804):[&rate=0.0029009141397529385]0.2908023003700073,(24:[&rate=0.003243083836784086]10.94393532151706,(((21:[&rate=0.0037725871879074795 [...]
+tree STATE_3660000 [&lnP=-22213.798218274715] = [&R] (((((((((((11:[&rate=0.0024225368998432237]7.210555788684056,12:[&rate=0.002454634263634856]7.210555788684056):[&rate=0.002946634643427557]13.007070662120418,13:[&rate=0.0021096228428548122]20.217626450804474):[&rate=0.002255698496340893]5.158208460977733,7:[&rate=0.002826953227381363]25.375834911782206):[&rate=0.0032275711680661004]7.233667044557471,((8:[&rate=0.003304311030305997]18.97060740886047,10:[&rate=0.003454191021187242]18.97 [...]
+tree STATE_3670000 [&lnP=-22216.578967841855] = [&R] ((14:[&rate=0.0019771195838227786]125.69557636720197,(33:[&rate=0.0020755789355284943]121.0337546224404,((((16:[&rate=0.002366222743637843]44.541313385306616,((((11:[&rate=0.0026436436726185375]8.114184347810303,12:[&rate=0.0023411559676517037]8.114184347810303):[&rate=0.0020755789355284943]25.253562226077342,(13:[&rate=0.0018153885337542919]30.12459669364485,7:[&rate=0.0017209456828346016]30.12459669364485):[&rate=0.003037437470940766 [...]
+tree STATE_3680000 [&lnP=-22218.418425017997] = [&R] ((33:[&rate=0.0021511988881719015]106.5684188729881,(((((((18:[&rate=0.002414116683424677]26.474919144743417,(26:[&rate=0.0025858357297122154]16.86279999865506,25:[&rate=0.002527592922268401]16.86279999865506):[&rate=0.002538141129863739]9.612119146088357):[&rate=0.0025076590492194317]7.219789001714915,(2:[&rate=0.0022810069767542733]29.527425752726728,((1:[&rate=0.0026146009891158915]8.215761578317583,6:[&rate=0.0025076590492194317]8. [...]
+tree STATE_3690000 [&lnP=-22220.070884836343] = [&R] ((33:[&rate=0.0021328631616540864]123.41037215732206,((((((((3:[&rate=0.0024064297709206683]19.42334536622675,5:[&rate=0.0021940733795095006]19.42334536622675):[&rate=0.0023438297926490115]18.32846847046287,(((11:[&rate=0.002499178827030276]7.855293606509739,12:[&rate=0.002352285181870126]7.855293606509739):[&rate=0.002606274870477117]20.5111865292172,13:[&rate=0.0023351050733125818]28.36648013572694):[&rate=0.0028066260332290584]3.841 [...]
+tree STATE_3700000 [&lnP=-22224.708960506967] = [&R] ((33:[&rate=0.0023027222867805204]109.85222964497324,(((((16:[&rate=0.0023027222867805204]35.36981557299448,(((3:[&rate=0.0032460020711840686]11.604354155883923,5:[&rate=0.0030622586251833764]11.604354155883923):[&rate=0.0020712068753635585]19.382520493406773,(8:[&rate=0.00310804691874753]19.881305660106918,10:[&rate=0.0026444660927477906]19.881305660106918):[&rate=0.002788981523507394]11.10556898918378):[&rate=0.00310804691874753]2.91 [...]
+tree STATE_3710000 [&lnP=-22229.986793761153] = [&R] (((14:[&rate=0.0019730070029791965]119.06149075296031,((((((18:[&rate=0.0018678438569939139]37.084942977160885,(26:[&rate=0.00213564504163591]19.398860193200147,25:[&rate=0.001999502499936028]19.398860193200147):[&rate=0.0014782303024795515]17.68608278396074):[&rate=0.00178380913458733]6.383945948507069,(2:[&rate=0.001964486418608868]37.28392072489369,((1:[&rate=0.0018757913597148125]10.16395368243955,6:[&rate=0.00178380913458733]10.16 [...]
+tree STATE_3720000 [&lnP=-22212.674593574284] = [&R] (((14:[&rate=0.0031051535857683733]75.22591745486845,((((((18:[&rate=0.003578193656086794]22.402864100026726,(26:[&rate=0.002589292231396367]16.735215371507824,25:[&rate=0.002103407394887862]16.735215371507824):[&rate=0.004032570963567961]5.667648728518902):[&rate=0.002589292231396367]6.670644024014038,(2:[&rate=0.0029194905414687694]23.902977267303214,((1:[&rate=0.004549267537865908]4.9543180335228785,6:[&rate=0.002896234901354696]4.9 [...]
+tree STATE_3730000 [&lnP=-22206.146539116053] = [&R] ((33:[&rate=0.003109332213000406]86.10002409718943,(((((16:[&rate=0.003463601018283804]28.768700294294316,((8:[&rate=0.0037460893520189524]19.336017802206342,10:[&rate=0.003397555467612778]19.336017802206342):[&rate=0.0041074952055122505]9.142755699167669,((3:[&rate=0.003463601018283804]11.804932828594765,5:[&rate=0.003408311150019414]11.804932828594765):[&rate=0.0035234234713016166]13.968595692771924,((7:[&rate=0.0036582924612844044]1 [...]
+tree STATE_3740000 [&lnP=-22217.456083601915] = [&R] ((33:[&rate=0.0016975338357416436]115.23488597476324,(((((16:[&rate=0.0024699674457118567]41.49095984713107,((8:[&rate=0.002912320267166879]25.476703682778183,10:[&rate=0.0024699674457118567]25.476703682778183):[&rate=0.0026627392438106916]13.961394302398116,((3:[&rate=0.0020907955221523846]15.842782957881353,5:[&rate=0.002155420057266124]15.842782957881353):[&rate=0.002134363299706258]20.913717955367552,((7:[&rate=0.003171744190804422 [...]
+tree STATE_3750000 [&lnP=-22214.651948195522] = [&R] ((33:[&rate=0.0020445662969376622]130.54682405052486,(14:[&rate=0.002879290165345145]100.97773920708657,((((16:[&rate=0.0025767391001809974]38.29064863751853,((8:[&rate=0.003105940985645232]22.109189693165327,10:[&rate=0.0028396607032317835]22.109189693165327):[&rate=0.0029580506343406423]12.485001242040504,((3:[&rate=0.0023951685638226375]14.435089315428499,5:[&rate=0.002418037822709506]14.435089315428499):[&rate=0.002822124255317955] [...]
+tree STATE_3760000 [&lnP=-22210.305766370002] = [&R] ((14:[&rate=0.0020861427514002663]107.30251127884097,(33:[&rate=0.00260699461323101]91.20475821927978,((((((18:[&rate=0.0025705547579522284]27.505929872145806,(26:[&rate=0.0035654279189505475]14.269916115849627,25:[&rate=0.0023626738207229363]14.269916115849627):[&rate=0.0022737940359100674]13.23601375629618):[&rate=0.003329908424229125]3.414991368934853,(2:[&rate=0.0034258796293887704]25.76887700434587,((1:[&rate=0.003255639811347457] [...]
+tree STATE_3770000 [&lnP=-22219.95521590232] = [&R] ((33:[&rate=0.002499926051722412]111.41865980976449,(((((((8:[&rate=0.0030014499817828852]20.15023850950465,10:[&rate=0.0030961359603285518]20.15023850950465):[&rate=0.0022427848848639437]15.144776629714496,((3:[&rate=0.0028641439584612726]12.033603081829108,5:[&rate=0.0030961359603285518]12.033603081829108):[&rate=0.002606675421285943]18.780262553377995,((11:[&rate=0.002658937747312028]7.324022350522118,12:[&rate=0.002783050907739195]7 [...]
+tree STATE_3780000 [&lnP=-22212.621162464166] = [&R] ((33:[&rate=0.0025004570090145623]101.89718905237896,(((((((((7:[&rate=0.002681184546339289]23.64520126140095,13:[&rate=0.0025004570090145623]23.64520126140095):[&rate=0.00271580010010388]3.229966542902897,(11:[&rate=0.002439526140091859]8.123139912490661,12:[&rate=0.002371787249379385]8.123139912490661):[&rate=0.0030833761236526893]18.752027891813185):[&rate=0.0026464121923862683]4.171555513586512,(3:[&rate=0.0028570893303173014]12.60 [...]
+tree STATE_3790000 [&lnP=-22217.20239018482] = [&R] ((33:[&rate=0.0023971098804434904]93.37625228820065,(14:[&rate=0.0027292165183401447]86.75408172106741,((((((8:[&rate=0.003102835959699914]20.473693790569182,10:[&rate=0.0026271667011359932]20.473693790569182):[&rate=0.002601915861567985]13.329765384701805,(((7:[&rate=0.003172245943643873]22.864134013390863,13:[&rate=0.0018618705878288009]22.864134013390863):[&rate=0.0024751920621547095]3.9148927187794804,(11:[&rate=0.003473746663909652 [...]
+tree STATE_3800000 [&lnP=-22219.586492429535] = [&R] ((33:[&rate=0.002804227983816313]100.50488736287913,(((((((18:[&rate=0.0030456752775105426]24.331969028623607,(26:[&rate=0.0027394657049539845]13.610309310017765,25:[&rate=0.002974828150844635]13.610309310017765):[&rate=0.002862044704197663]10.721659718605842):[&rate=0.0026290862426881353]7.3655709315541635,(2:[&rate=0.003058860792596293]27.607519941034905,((1:[&rate=0.0028943718315710782]7.7588212453399645,6:[&rate=0.00275510503818635 [...]
+tree STATE_3810000 [&lnP=-22223.881454072067] = [&R] (((14:[&rate=0.002525110042473199]101.13437092187449,((((((18:[&rate=0.0026296652931700705]28.20453706767657,(26:[&rate=0.0023966988697070027]18.653419054130584,25:[&rate=0.002430106494519809]18.653419054130584):[&rate=0.002484059529844391]9.551118013545985):[&rate=0.0027749724627031285]7.316633437424628,(2:[&rate=0.0025559626005581468]28.15205219824719,((1:[&rate=0.002648431448986444]8.454992230297252,6:[&rate=0.002249769402462006]8.4 [...]
+tree STATE_3820000 [&lnP=-22223.223944858455] = [&R] ((33:[&rate=0.0025348917495015355]107.62002183775593,(14:[&rate=0.002729286001645858]88.62466535866332,((((((18:[&rate=0.0026512077776945837]26.797598396197504,(26:[&rate=0.002608418730121461]17.103545438926975,25:[&rate=0.003028004799265927]17.103545438926975):[&rate=0.002708059334324136]9.694052957270529):[&rate=0.0027645462696972416]5.994556887589791,(2:[&rate=0.0024071326661609877]29.331418285804617,((1:[&rate=0.0024382309618298527 [...]
+tree STATE_3830000 [&lnP=-22223.230051021757] = [&R] (((14:[&rate=0.0035460585620526145]80.60273841072856,33:[&rate=0.0034418618906212357]80.60273841072856):[&rate=0.002545882954697486]11.878892455658644,((((((8:[&rate=0.0036621402974716015]15.10715705672375,10:[&rate=0.0035832125880072624]15.10715705672375):[&rate=0.0028445362687227697]9.867638160605882,16:[&rate=0.003475518834578117]24.974795217329632):[&rate=0.0030336462238587615]4.568487836517878,(((11:[&rate=0.003475518834578117]6.3 [...]
+tree STATE_3840000 [&lnP=-22222.275564567833] = [&R] ((33:[&rate=0.002290477209283815]107.1038059547007,(14:[&rate=0.002449717845625868]86.75668846707337,((((16:[&rate=0.0021914497628535427]41.34938024163787,((8:[&rate=0.002479315172691011]22.366655861275206,10:[&rate=0.0024401220911011738]22.366655861275206):[&rate=0.0026240278881551]16.611920000972887,((7:[&rate=0.0020488928040997625]30.16917019690827,(13:[&rate=0.002270464144923432]22.677816899656627,(11:[&rate=0.0026398938442566493]6 [...]
+tree STATE_3850000 [&lnP=-22212.324574103342] = [&R] ((33:[&rate=0.0021748267548703415]103.4069985663792,(((((((8:[&rate=0.0031144862191688797]25.898889228670242,10:[&rate=0.0024926185138865373]25.898889228670242):[&rate=0.0031144862191688797]11.10048414196546,(((11:[&rate=0.0027056728825695098]6.198711583384072,12:[&rate=0.0025973988020837435]6.198711583384072):[&rate=0.0022642799237942495]23.11016017053277,(13:[&rate=0.0021748267548703415]22.082029045874002,7:[&rate=0.00289455132809533 [...]
+tree STATE_3860000 [&lnP=-22214.21370064041] = [&R] ((33:[&rate=0.0019686615204984412]119.42804851942277,(((((((8:[&rate=0.0028097830944161335]21.966368898973982,10:[&rate=0.0028097830944161335]21.966368898973982):[&rate=0.0022670506657356507]15.845740244343652,((3:[&rate=0.0027090508109937875]12.447361897693385,5:[&rate=0.0028382632781362065]12.447361897693385):[&rate=0.002457460548702373]22.426958128297457,(7:[&rate=0.002763400122103102]28.590944345623555,(13:[&rate=0.00237460302695354 [...]
+tree STATE_3870000 [&lnP=-22220.361883914255] = [&R] (((14:[&rate=0.002375910179898302]95.62231995798959,((((((18:[&rate=0.0021502552465814456]33.807793473576446,(26:[&rate=0.0021933404488617757]20.473646482704773,25:[&rate=0.0020916152609884234]20.473646482704773):[&rate=0.0022158131805760533]13.334146990871673):[&rate=0.0020335373699513727]5.081732019961194,(2:[&rate=0.002056934948048653]33.26232445708432,((1:[&rate=0.0024410062110345123]9.224917760465058,6:[&rate=0.0021686167935725084 [...]
+tree STATE_3880000 [&lnP=-22211.67583235257] = [&R] ((9:[&rate=0.0045530825574133255]88.68996992179677,33:[&rate=0.002337229997795563]88.68996992179677):[&rate=0.0036887370634805694]7.259950368690909,(14:[&rate=0.0031887778257370628]82.33416904164129,((((((3:[&rate=0.002419047658494425]16.611342426161535,5:[&rate=0.002691431507247076]16.611342426161535):[&rate=0.0028788051405335274]12.88517453214439,((11:[&rate=0.0038729155019164184]4.808360136191061,12:[&rate=0.0035975816136695096]4.808 [...]
+tree STATE_3890000 [&lnP=-22213.38636447063] = [&R] (9:[&rate=0.002409778481214024]143.84692839463935,(33:[&rate=0.0027681283558239796]109.1817389021989,(((((((18:[&rate=0.002497371307662191]27.186785255144077,(26:[&rate=0.0025779652100768654]17.75864744499458,25:[&rate=0.0024306354941521224]17.75864744499458):[&rate=0.002957232306074697]9.428137810149497):[&rate=0.0029998197544657914]5.96897292819293,(2:[&rate=0.0026334614328339598]28.010566161235147,((1:[&rate=0.002782471762892349]9.32 [...]
+tree STATE_3900000 [&lnP=-22222.21966644162] = [&R] (9:[&rate=0.002594444290505778]143.45586821858586,(33:[&rate=0.002653504284379408]89.23371628794064,(14:[&rate=0.002653504284379408]82.15570537144306,(((((((24:[&rate=0.002602016890198792]13.467322981931693,(15:[&rate=0.0028064730649167667]9.794176028755409,(19:[&rate=0.002594444290505778]3.4250924057360597,(21:[&rate=0.0027382600064929487]2.239554069598785,20:[&rate=0.0026971525630147064]2.239554069598785):[&rate=0.0028264270917115627] [...]
+tree STATE_3910000 [&lnP=-22221.199883393554] = [&R] (9:[&rate=0.002384490543591944]162.43818535357738,(14:[&rate=0.00214552097872652]112.418144731076,(33:[&rate=0.002700644159827006]100.2745417052384,(((((((24:[&rate=0.0022677792602681587]16.302436467380698,(15:[&rate=0.002725315259407105]9.025484719942543,(21:[&rate=0.002123892690643251]3.4565521074699106,(20:[&rate=0.0032504025130004957]1.7732451247773406,19:[&rate=0.0032504025130004957]1.7732451247773406):[&rate=0.002443132561106435] [...]
+tree STATE_3920000 [&lnP=-22211.24428825743] = [&R] (9:[&rate=0.004053380270119877]108.59352480298942,(33:[&rate=0.0026594613180576745]89.40884181737388,(14:[&rate=0.004881954445514085]68.30170328086867,((((16:[&rate=0.003155519574676079]29.89092908808302,((8:[&rate=0.0046045016536512]13.68337037558164,10:[&rate=0.004215723855313938]13.68337037558164):[&rate=0.0031930399513786047]14.64785489325351,((3:[&rate=0.00388008472544106]10.973697651079746,5:[&rate=0.0029189208141872968]10.9736976 [...]
+tree STATE_3930000 [&lnP=-22210.941248922587] = [&R] (9:[&rate=0.0022392970611572836]149.70842376189498,(33:[&rate=0.001916641053855647]114.91405036424405,((((((((3:[&rate=0.0025370573248187124]14.656325701811523,5:[&rate=0.0026168906175941743]14.656325701811523):[&rate=0.0020485715103118743]21.45171575951246,((7:[&rate=0.0027573493746666468]25.20790409751198,13:[&rate=0.0022392970611572836]25.20790409751198):[&rate=0.0021288730335328335]5.958152113328122,(11:[&rate=0.0025370573248187124 [...]
+tree STATE_3940000 [&lnP=-22214.437930416167] = [&R] (9:[&rate=0.0031141726417667232]129.32108575671094,(33:[&rate=0.002699513567656718]104.4643782450524,(14:[&rate=0.0032159249977543225]91.50211939021885,(((((((3:[&rate=0.003432217860811747]11.414285406283101,5:[&rate=0.002610568208938307]11.414285406283101):[&rate=0.0028481005103669675]20.252969068506687,((7:[&rate=0.0033633161124150884]20.227736947090875,13:[&rate=0.002101058334236922]20.227736947090875):[&rate=0.0020140666961831794]6 [...]
+tree STATE_3950000 [&lnP=-22216.837632882427] = [&R] (9:[&rate=0.0025533347663191618]142.7013954367371,((14:[&rate=0.0024478261823621095]99.69604044927208,((((((8:[&rate=0.0023467639910973795]27.45223462425006,10:[&rate=0.0022493438199822145]27.45223462425006):[&rate=0.0023167527621107834]16.713982184659116,((3:[&rate=0.0028257699999255995]14.348536540679607,5:[&rate=0.002025639520910021]14.348536540679607):[&rate=0.0020817984215581492]25.699790180302312,(((11:[&rate=0.002128898508166101 [...]
+tree STATE_3960000 [&lnP=-22219.28185355342] = [&R] (9:[&rate=0.0026609957079237537]140.37484004396697,((((((16:[&rate=0.0025855388793408615]36.9536452794042,(((3:[&rate=0.0022951480882455583]14.243948392150811,5:[&rate=0.0028300317322505885]14.243948392150811):[&rate=0.0031970166934969196]17.42962062409905,((7:[&rate=0.0028815577771981933]21.11558536390465,13:[&rate=0.0022253258612517203]21.11558536390465):[&rate=0.002863692645034693]6.167382489498252,(11:[&rate=0.0029199058101545823]8. [...]
+tree STATE_3970000 [&lnP=-22205.167912533314] = [&R] (9:[&rate=0.0034727537251712255]122.36309966742209,((((((16:[&rate=0.0031840836304734507]32.90172213145987,(((3:[&rate=0.003166964184931605]12.175217739272048,5:[&rate=0.002631167784475927]12.175217739272048):[&rate=0.003450050215133832]15.811392403239024,(8:[&rate=0.0036658013244934944]18.29519279410251,10:[&rate=0.003047857389499795]18.29519279410251):[&rate=0.003290027960029318]9.691417348408564):[&rate=0.0033862454106649807]2.15630 [...]
+tree STATE_3980000 [&lnP=-22240.077842155606] = [&R] (9:[&rate=0.002374521305336125]144.11179551742433,((((((16:[&rate=0.0025289309020571538]44.27142345704091,(((3:[&rate=0.0027015099373332765]19.607591287004865,5:[&rate=0.0025124852973004205]19.607591287004865):[&rate=0.0024749511173910903]22.70880127006,(8:[&rate=0.002407488078984026]28.593714830906254,10:[&rate=0.002576145935435893]28.593714830906254):[&rate=0.0024540430455023674]13.722677726158611):[&rate=0.0025465681393800803]1.1007 [...]
+tree STATE_3990000 [&lnP=-22223.044376920017] = [&R] (9:[&rate=0.0028451801882944825]159.05219056461067,((14:[&rate=0.002401942373036424]114.02079690408425,((((((((11:[&rate=0.002490512539237157]7.32732569841113,12:[&rate=0.002032857601457953]7.32732569841113):[&rate=0.002464730444086404]23.341806001553884,(13:[&rate=0.002377077167247411]26.10883576717295,7:[&rate=0.0025725707949860776]26.10883576717295):[&rate=0.002464730444086404]4.560295932792066):[&rate=0.001770259828129864]10.420594 [...]
+tree STATE_4000000 [&lnP=-22221.346204431356] = [&R] (9:[&rate=0.00269990755378282]140.26603152735558,(33:[&rate=0.002299313438001218]95.22530777585783,(14:[&rate=0.002371812212782913]80.54712831686444,(((((((23:[&rate=0.002151220085317013]10.619762907911094,17:[&rate=0.0033666003695789195]10.619762907911094):[&rate=0.002423596308605028]12.195727527701441,(24:[&rate=0.0029687318994828325]13.565706833469475,(15:[&rate=0.002663120461627839]9.541600675588668,(21:[&rate=0.001957262098171844] [...]
+tree STATE_4010000 [&lnP=-22207.442133532903] = [&R] (9:[&rate=0.0024823787350107974]138.8279046004069,((14:[&rate=0.0029554398532756036]84.08258270310958,((((((22:[&rate=0.0030846787215125303]20.202420163231906,(24:[&rate=0.0034834473105269854]12.248766975631426,(15:[&rate=0.004172716785304081]7.081384724971951,(21:[&rate=0.0030846787215125303]3.473751057933075,(20:[&rate=0.0028736857855991543]3.40087107245086,19:[&rate=0.0034834473105269854]3.40087107245086):[&rate=0.004172716785304081 [...]
+tree STATE_4020000 [&lnP=-22218.600481973474] = [&R] (9:[&rate=0.002680006218214456]149.52384388306572,((14:[&rate=0.0025732431346061416]86.90073630593179,((((((23:[&rate=0.0024895801448677696]12.024151879827082,17:[&rate=0.0021901595483827868]12.024151879827082):[&rate=0.0024070604949621576]13.800484922899978,(22:[&rate=0.002738216659972013]24.60490959843162,(24:[&rate=0.0027797338392433414]14.837411055130755,(15:[&rate=0.002042498360176346]9.211834801038263,(21:[&rate=0.002539432845840 [...]
+tree STATE_4030000 [&lnP=-22211.6653593659] = [&R] (9:[&rate=0.0038954386621259892]104.43099302507396,((14:[&rate=0.0033117007605620806]79.43465435405149,(((((22:[&rate=0.0028899259139543018]22.765837193521065,((23:[&rate=0.003278016411397321]9.595794260663364,17:[&rate=0.0029725907341271477]9.595794260663364):[&rate=0.0033291235651637394]10.175196089372431,(24:[&rate=0.004003609544818953]12.03673360580147,(15:[&rate=0.0029240780687330823]9.53436226401957,(19:[&rate=0.0035702397183009753 [...]
+tree STATE_4040000 [&lnP=-22226.199190515952] = [&R] (9:[&rate=0.0029439105865116146]132.46635371464805,((33:[&rate=0.002887417868227094]87.3700543215703,14:[&rate=0.002881604227599463]87.3700543215703):[&rate=0.0029728525802723534]3.9671508142056098,((((((23:[&rate=0.0026879588328679606]9.841601959592293,17:[&rate=0.002915804474229927]9.841601959592293):[&rate=0.002927018889297416]13.457621937897851,(22:[&rate=0.002898878613290718]23.0787647261011,(24:[&rate=0.002898878613290718]13.8910 [...]
+tree STATE_4050000 [&lnP=-22217.685141170332] = [&R] (9:[&rate=0.0022491659472105453]154.61729824940113,(33:[&rate=0.0021107662721347393]115.9502151306954,((((((22:[&rate=0.002304741680363474]26.19283081545296,((23:[&rate=0.0023392150438422722]13.990875932283808,17:[&rate=0.001958307529312326]13.990875932283808):[&rate=0.002685563081348611]10.378423843153419,(24:[&rate=0.0031183938918608323]14.673189742821652,(15:[&rate=0.003010554711443553]8.87398742016236,((21:[&rate=0.0023392150438422 [...]
+tree STATE_4060000 [&lnP=-22228.323740459913] = [&R] (9:[&rate=0.00241140854371114]141.39531911873388,(33:[&rate=0.002244313786330724]109.44389664658297,(14:[&rate=0.002617621406709662]97.14322335868322,(((((22:[&rate=0.0026556417827634966]23.786308922481233,((23:[&rate=0.002371730463461408]9.706959970078344,17:[&rate=0.0019210349975706737]9.706959970078344):[&rate=0.002371730463461408]13.499700047186003,(24:[&rate=0.00246127500425065]11.745246150439915,(15:[&rate=0.002436668172499529]9. [...]
+tree STATE_4070000 [&lnP=-22209.03923681182] = [&R] (9:[&rate=0.002470358108457131]140.9661270051888,(33:[&rate=0.002210338096914241]107.86526265284566,(((((((((7:[&rate=0.0028482333595255187]22.000707091957768,13:[&rate=0.0024544077211962243]22.000707091957768):[&rate=0.002417635689929854]3.763438001667634,(11:[&rate=0.0033381455829233175]5.341457474189565,12:[&rate=0.0030276842790251043]5.341457474189565):[&rate=0.0028711483525567204]20.422687619435838):[&rate=0.00263886474794076]7.470 [...]
+tree STATE_4080000 [&lnP=-22209.246915594187] = [&R] (9:[&rate=0.0027843925703358455]140.1539376956719,(33:[&rate=0.0023572044604175304]93.09180077103444,((((((22:[&rate=0.002712091546899912]23.872020405761575,((23:[&rate=0.0034321193262778524]8.580016007837804,17:[&rate=0.0037440421846099193]8.580016007837804):[&rate=0.0022724585195137825]14.565486734923224,(24:[&rate=0.0034321193262778524]12.108220191051664,(15:[&rate=0.003017318203621375]10.558379650183587,((20:[&rate=0.00318111885773 [...]
+tree STATE_4090000 [&lnP=-22225.467814591157] = [&R] (9:[&rate=0.003606688635192941]116.02207458703707,(33:[&rate=0.0030107468993944226]96.08541147635974,(14:[&rate=0.002755817596400538]88.38097473992342,(((((22:[&rate=0.0031507117099757553]21.290032699852294,((23:[&rate=0.002776160187738857]10.703089034765112,17:[&rate=0.0034764443767904187]10.703089034765112):[&rate=0.0029242808020478097]9.62041677699049,(24:[&rate=0.004091981001792365]10.339779558219576,(((20:[&rate=0.0036464720436744 [...]
+tree STATE_4100000 [&lnP=-22220.44143667285] = [&R] (9:[&rate=0.0023953299123192088]150.34755190499308,(33:[&rate=0.00241641430734011]111.59734777759306,(((((((8:[&rate=0.003077352153325656]22.761745012362084,10:[&rate=0.002486739023035198]22.761745012362084):[&rate=0.0024059565774069776]14.736187562071137,16:[&rate=0.0024369204158788517]37.49793257443322):[&rate=0.0024059565774069776]0.7588922769538868,((7:[&rate=0.0026306749344265365]30.919069040118725,(13:[&rate=0.0023385430412957335] [...]
+tree STATE_4110000 [&lnP=-22221.63607226427] = [&R] (9:[&rate=0.0029760472132881414]139.4029404888007,(33:[&rate=0.0024324717271647913]121.19277772665964,(((((((8:[&rate=0.002403629948341513]28.2535421276169,10:[&rate=0.002046141408399328]28.2535421276169):[&rate=0.003152110669016793]12.376715548274486,(((11:[&rate=0.0019853622979152975]7.11801347223645,12:[&rate=0.002474256279999508]7.11801347223645):[&rate=0.0019853622979152975]25.440965327166147,(13:[&rate=0.0023084910426066376]23.497 [...]
+tree STATE_4120000 [&lnP=-22229.813179212168] = [&R] (9:[&rate=0.0020834525025312306]153.60748105875425,(((((((8:[&rate=0.002826127941929728]23.832184746322937,10:[&rate=0.002568465373501879]23.832184746322937):[&rate=0.0022930180016479985]13.751827796528808,(((11:[&rate=0.002477539972337732]8.474574210816913,12:[&rate=0.001982636737441356]8.474574210816913):[&rate=0.0026041721158529697]21.73040486585323,(13:[&rate=0.0022689389473085323]21.31749736666764,7:[&rate=0.0033964276199784345]21 [...]
+tree STATE_4130000 [&lnP=-22219.859697306725] = [&R] (9:[&rate=0.002559641278378841]131.5877406591495,(33:[&rate=0.002393199879210784]96.68531109806125,(((((16:[&rate=0.0026844847278409607]40.30995618799632,(((((((20:[&rate=0.002283333185881556]4.3981728777262195,21:[&rate=0.002501888812573448]4.3981728777262195):[&rate=0.00383949862050145]0.32808457048219797,19:[&rate=0.003078857764614418]4.726257448208417):[&rate=0.0023684315578344902]11.735279276217597,15:[&rate=0.0023684315578344902] [...]
+tree STATE_4140000 [&lnP=-22215.78391683524] = [&R] (9:[&rate=0.003632415942309948]114.13850422794597,(33:[&rate=0.0026092454975530765]96.46816568841886,(14:[&rate=0.003109648337997487]83.39025749700717,(((((((7:[&rate=0.002313867204167191]28.63750133700206,(13:[&rate=0.0020118724910969817]26.128127006013987,(11:[&rate=0.003137069787587389]5.81219302707615,12:[&rate=0.00247386432108553]5.81219302707615):[&rate=0.0032533230654392376]20.315933978937835):[&rate=0.0038017484438524055]2.50937 [...]
+tree STATE_4150000 [&lnP=-22224.62817296606] = [&R] (9:[&rate=0.0024394011788260446]158.41417282441236,(14:[&rate=0.002256586402558419]130.61840008839667,(((((((8:[&rate=0.0025152040817950088]21.57785203312014,10:[&rate=0.0027589824910403743]21.57785203312014):[&rate=0.002164565076992772]21.19762885444023,((7:[&rate=0.0025502774789976164]32.601474950232685,(13:[&rate=0.002164565076992772]26.145111864770367,(11:[&rate=0.002498872048883576]7.757451703836165,12:[&rate=0.0020759948068034455] [...]
+tree STATE_4160000 [&lnP=-22215.724933476475] = [&R] ((33:[&rate=0.0024741387159111874]103.93671009484946,(((((16:[&rate=0.0028654656684089725]36.21228438486461,((8:[&rate=0.0031716448523729903]22.538515335111008,10:[&rate=0.0026343287122846263]22.538515335111008):[&rate=0.002881700772967281]11.982405474841627,(((11:[&rate=0.002703848667282593]6.124536107335293,12:[&rate=0.0028173553442000297]6.124536107335293):[&rate=0.0028413923840934947]17.385472006332982,(13:[&rate=0.0027679755770142 [...]
+tree STATE_4170000 [&lnP=-22223.60590655793] = [&R] ((33:[&rate=0.0015572440297109782]144.40954949322864,(((((16:[&rate=0.0018138555086340772]47.86419734799767,(((11:[&rate=0.001997743496337505]11.619753810448806,12:[&rate=0.001510137843767139]11.619753810448806):[&rate=0.0024410898222246577]23.68545899826318,(13:[&rate=0.00170946783137921]29.916684227987645,7:[&rate=0.002107005517305902]29.916684227987645):[&rate=0.0017749583547716332]5.388528580724341):[&rate=0.001510137843767139]8.159 [...]
+tree STATE_4180000 [&lnP=-22224.36611314562] = [&R] (((((16:[&rate=0.0026101718854969766]39.17869948628242,((((23:[&rate=0.0025981908695780994]9.507254716627719,17:[&rate=0.002640830247525777]9.507254716627719):[&rate=0.002640830247525777]12.293614028631234,(22:[&rate=0.0026350426563205858]20.734772990913374,(24:[&rate=0.0025928060825897176]10.939928594842655,(15:[&rate=0.0026193319680044122]10.47867682582775,(21:[&rate=0.002617418711468612]2.9584786727723182,(19:[&rate=0.002597173603296 [...]
+tree STATE_4190000 [&lnP=-22229.463936407883] = [&R] (((14:[&rate=0.0017252690175682606]117.18747801263746,(((16:[&rate=0.0016469355201759444]61.70803806203029,(((((7:[&rate=0.002195467956563496]34.469375561870486,13:[&rate=0.0014286732490665097]34.469375561870486):[&rate=0.0022263761271670885]6.459476664460816,(11:[&rate=0.0018749337537612536]9.113594409349894,12:[&rate=0.001746529355148454]9.113594409349894):[&rate=0.0013484725945234956]31.815257816981408):[&rate=0.0015681675693958788] [...]
+tree STATE_4200000 [&lnP=-22223.306406618092] = [&R] (((((((((8:[&rate=0.0024495789983024563]23.343211451658117,10:[&rate=0.002401226749407421]23.343211451658117):[&rate=0.0025751242811865287]12.9180728530531,((((11:[&rate=0.002528844387887378]6.289595536379515,12:[&rate=0.0030325628187994826]6.289595536379515):[&rate=0.003161869846154576]16.56471219734154,13:[&rate=0.002725862130563955]22.854307733721054):[&rate=0.0022484317101858294]4.924212846342662,7:[&rate=0.002968297190392316]27.77 [...]
+tree STATE_4210000 [&lnP=-22223.245018070058] = [&R] ((((((((8:[&rate=0.0033622411941864024]17.973172047903365,10:[&rate=0.003044496563175594]17.973172047903365):[&rate=0.0033751218573796773]11.292831943086483,(((7:[&rate=0.003313203480746572]18.33708057174539,13:[&rate=0.0034018741527575606]18.33708057174539):[&rate=0.0029865724008913555]3.2380488009559834,(11:[&rate=0.003083274418374708]4.799213024410401,12:[&rate=0.0029154526091956655]4.799213024410401):[&rate=0.0028430851177059105]16 [...]
+tree STATE_4220000 [&lnP=-22209.14146450605] = [&R] ((33:[&rate=0.002875382667673381]89.49297253465465,(14:[&rate=0.003927495595621105]66.16861652561997,(((((((22:[&rate=0.0029604877722842256]20.23623331666144,((15:[&rate=0.003843517371596086]9.00048526088175,((21:[&rate=0.003207941920166573]2.6943791563098634,20:[&rate=0.003123362777344208]2.6943791563098634):[&rate=0.003452345890280348]1.030960588323524,19:[&rate=0.00328046883176749]3.7253397446333874):[&rate=0.00328046883176749]5.2751 [...]
+tree STATE_4230000 [&lnP=-22203.970812921747] = [&R] (((14:[&rate=0.0030661116921831717]70.7759586312744,((((((23:[&rate=0.0031843116153055457]7.921778390115581,17:[&rate=0.003405715464705503]7.921778390115581):[&rate=0.0025053568655321427]11.239772856392026,(22:[&rate=0.0033544393766245283]17.48299981599374,(24:[&rate=0.003951297159884875]12.038653127097309,(((21:[&rate=0.0028686638132722347]1.5843327050381073,20:[&rate=0.004126585182671401]1.5843327050381073):[&rate=0.00259065185232696 [...]
+tree STATE_4240000 [&lnP=-22220.293819872266] = [&R] ((33:[&rate=0.0022211623047338285]120.43283369985078,(14:[&rate=0.00235892755256526]104.12313567575963,(((((((7:[&rate=0.0025850183783925496]21.621764366493846,13:[&rate=0.0023450870470639765]21.621764366493846):[&rate=0.0022943714012218833]5.885613375699922,(11:[&rate=0.0024297320980736507]8.541053715348541,12:[&rate=0.002446080030988916]8.541053715348541):[&rate=0.002637695077433412]18.96632402684523):[&rate=0.0024351960101013366]7.6 [...]
+tree STATE_4250000 [&lnP=-22200.173077816413] = [&R] (((((((((23:[&rate=0.002909281124410641]9.277247096674515,17:[&rate=0.0031319985995754466]9.277247096674515):[&rate=0.0020768673256601057]13.463903629432998,((24:[&rate=0.0031063915193253874]13.828885399075036,((19:[&rate=0.0024798709639950872]3.503296079313934,(20:[&rate=0.002777829021888128]2.896688434198872,21:[&rate=0.0025590667605828316]2.896688434198872):[&rate=0.00245887606672272]0.6066076451150617):[&rate=0.0025590667605828316] [...]
+tree STATE_4260000 [&lnP=-22223.288848080487] = [&R] (((((((((23:[&rate=0.0018848751030465444]15.664825384659421,17:[&rate=0.0017030163951420078]15.664825384659421):[&rate=0.0018022941283161087]17.309402760756768,((24:[&rate=0.00249318646516767]16.45698607621223,((19:[&rate=0.0019183732180786007]3.905484928050143,(20:[&rate=0.0016005883854952885]3.8085081827998395,21:[&rate=0.0019072648002091625]3.8085081827998395):[&rate=0.0018268761827716401]0.09697674525030342):[&rate=0.00244776744058 [...]
+tree STATE_4270000 [&lnP=-22207.100300087284] = [&R] ((33:[&rate=0.0020971922411856586]105.2655269533926,(((((16:[&rate=0.0024909619547384128]35.42849059819177,((((7:[&rate=0.003579979329276475]18.1662882319906,13:[&rate=0.0025551104307281556]18.1662882319906):[&rate=0.002932140817899543]6.280004086116904,(11:[&rate=0.002725011729087475]5.505506923187135,12:[&rate=0.0020578282177655663]5.505506923187135):[&rate=0.002813435922989266]18.94078539492037):[&rate=0.0024909619547384128]4.746552 [...]
+tree STATE_4280000 [&lnP=-22223.887899453766] = [&R] (((((((16:[&rate=0.002245941758386814]37.58419852036182,((8:[&rate=0.002999168202583427]22.475903305600227,10:[&rate=0.0029738852766511164]22.475903305600227):[&rate=0.0027970660371412965]14.306436787436539,(((7:[&rate=0.0024593953969659276]22.112302014311968,13:[&rate=0.0021313472878787022]22.112302014311968):[&rate=0.0025526205597286864]5.958732464868717,(11:[&rate=0.003551111321711699]6.353834475684408,12:[&rate=0.002515742468475776 [...]
+tree STATE_4290000 [&lnP=-22223.091247446268] = [&R] (((((((((8:[&rate=0.0024217822877487475]23.172448249581223,10:[&rate=0.0021740561318496087]23.172448249581223):[&rate=0.0021594596287492574]15.981528673224929,(((7:[&rate=0.00239772279622048]24.10323028226911,13:[&rate=0.0020102032052109445]24.10323028226911):[&rate=0.0025670873196565877]3.974116017484871,(11:[&rate=0.003079740483663239]5.053147852715583,12:[&rate=0.0026875277255514352]5.053147852715583):[&rate=0.002228401297996333]23. [...]
+tree STATE_4300000 [&lnP=-22210.90426920334] = [&R] (((((((((23:[&rate=0.002440426808886529]12.131144739842016,17:[&rate=0.002299468745507117]12.131144739842016):[&rate=0.0022578879831027486]14.443191064728884,(22:[&rate=0.002037760513085967]24.43727236447997,((((20:[&rate=0.0024257641489512627]4.147695820682296,19:[&rate=0.002517656071263449]4.147695820682296):[&rate=0.002354803204381842]0.8407037559371968,21:[&rate=0.0022718004549687606]4.988399576619493):[&rate=0.0027120835767038935]6 [...]
+tree STATE_4310000 [&lnP=-22221.942771275088] = [&R] (((14:[&rate=0.002790118328109813]92.19719071712589,(((((22:[&rate=0.0026057196775797643]24.217900961651438,((23:[&rate=0.002544019721730534]12.264969346415493,17:[&rate=0.002649681399085612]12.264969346415493):[&rate=0.002734784365603833]11.649258864537364,((((20:[&rate=0.0024800242968758567]2.546132047160309,21:[&rate=0.0028484946190075492]2.546132047160309):[&rate=0.0026718609515447604]0.4854369775584866,19:[&rate=0.0027023626295201 [...]
+tree STATE_4320000 [&lnP=-22216.0333507197] = [&R] (((14:[&rate=0.002246626902968455]100.79536545216513,((((((8:[&rate=0.002128818808288255]24.902374999746346,10:[&rate=0.002271483431978582]24.902374999746346):[&rate=0.002711262695823174]15.414095614716512,((3:[&rate=0.002384603747380074]15.948508153208188,5:[&rate=0.002371668806124167]15.948508153208188):[&rate=0.002068837026256888]20.179415170399245,((7:[&rate=0.0025983927621265747]27.170997989159083,13:[&rate=0.0018277928164484487]27. [...]
+tree STATE_4330000 [&lnP=-22214.457056820655] = [&R] (((((((16:[&rate=0.002103904284232214]40.94760073298814,((8:[&rate=0.0027636304026952274]26.109021429592538,10:[&rate=0.0022486069314873]26.109021429592538):[&rate=0.002211496092102196]13.611119972209266,((3:[&rate=0.002175164734234546]17.108040670507634,5:[&rate=0.002211496092102196]17.108040670507634):[&rate=0.0020862185071383625]20.95934438651325,(((11:[&rate=0.0026187731876464223]7.601868285499291,12:[&rate=0.002050805621950612]7.6 [...]
+tree STATE_4340000 [&lnP=-22218.188491319703] = [&R] (((14:[&rate=0.003976338019771755]65.59986182281276,((((16:[&rate=0.0030859074152016284]31.670714062552076,(((3:[&rate=0.0031408390170699057]10.6980262743273,5:[&rate=0.00302220873529953]10.6980262743273):[&rate=0.00333767437821498]15.61877585784985,((7:[&rate=0.0036254207066703683]18.682196494705654,13:[&rate=0.002997952653702483]18.682196494705654):[&rate=0.0030859074152016284]2.9060348006624324,(11:[&rate=0.0036628507370432086]4.251 [...]
+tree STATE_4350000 [&lnP=-22218.541760242235] = [&R] (((33:[&rate=0.002576158746825745]83.767289725115,((((((8:[&rate=0.0030579854600398604]18.08038625769316,10:[&rate=0.0028588963634298887]18.08038625769316):[&rate=0.002635904627782396]14.749788805546995,((3:[&rate=0.0028679408382730675]13.616948818656558,5:[&rate=0.0031699141575937707]13.616948818656558):[&rate=0.002551895233310839]17.74267682782866,((7:[&rate=0.002953805904257772]22.8589422239148,13:[&rate=0.0023293856132884498]22.858 [...]
+tree STATE_4360000 [&lnP=-22216.632266025434] = [&R] (((((((16:[&rate=0.002323010427158408]42.32378688388294,(((((((21:[&rate=0.002292148069154881]5.314671319604872,20:[&rate=0.0021472697384383945]5.314671319604872):[&rate=0.002211033383936748]1.2643220159623896,19:[&rate=0.0021126886632574083]6.578993335567262):[&rate=0.0026172744597112137]6.6618275947548184,15:[&rate=0.001872874141522277]13.24082093032208):[&rate=0.0022415670045074273]6.975870652093416,24:[&rate=0.002490171372699101]20 [...]
+tree STATE_4370000 [&lnP=-22219.1542323635] = [&R] ((14:[&rate=0.0021266750459944415]134.80221995888797,(33:[&rate=0.0019805480858490895]125.14480677838357,(((((((23:[&rate=0.0018076082988271302]16.020201949005944,17:[&rate=0.0017490344954081295]16.020201949005944):[&rate=0.0023514848329514023]12.148318415546846,(24:[&rate=0.002002067182147126]16.181377119494844,(15:[&rate=0.0019912993436119364]10.049212167951513,((19:[&rate=0.0024706924125318195]3.366779192157423,20:[&rate=0.00174903449 [...]
+tree STATE_4380000 [&lnP=-22207.424851760876] = [&R] ((33:[&rate=0.002754435314740449]91.94418692534853,(((((((3:[&rate=0.002734465625049712]14.607056644561435,5:[&rate=0.0025576169523498444]14.607056644561435):[&rate=0.0030232535707254778]14.870424205167737,((7:[&rate=0.0041279735000115]16.822474302880693,13:[&rate=0.003395892724685946]16.822474302880693):[&rate=0.0027741430574175145]5.857567146209046,(11:[&rate=0.0031459030633858506]6.306703174016226,12:[&rate=0.0027141847079707403]6.3 [...]
+tree STATE_4390000 [&lnP=-22227.42198283699] = [&R] (((((((((23:[&rate=0.002255638472723066]12.298954307237649,17:[&rate=0.002053898598796247]12.298954307237649):[&rate=0.002303336433905588]15.478825197112597,((24:[&rate=0.002255638472723066]18.51480071528166,((21:[&rate=0.0019319566288378083]6.451474406987742,(20:[&rate=0.0019126550406126503]3.0786930461852138,19:[&rate=0.00254698956612377]3.0786930461852138):[&rate=0.001949923069866078]3.3727813608025285):[&rate=0.0024088914518699198]5 [...]
+tree STATE_4400000 [&lnP=-22216.816752298168] = [&R] ((33:[&rate=0.002408136986083421]99.98632299005908,(14:[&rate=0.002322470094677671]84.76914076584673,(((((((3:[&rate=0.004039820923994377]8.57260076641323,5:[&rate=0.0034922034634764553]8.57260076641323):[&rate=0.0030001987886033566]17.461319191210386,(((11:[&rate=0.003670645109070783]5.908145779255725,12:[&rate=0.0031328847354477893]5.908145779255725):[&rate=0.0031951213040784647]14.7441602525173,13:[&rate=0.002322470094677671]20.6523 [...]
+tree STATE_4410000 [&lnP=-22213.347413172218] = [&R] (9:[&rate=0.002691348836591134]125.47704378015595,((14:[&rate=0.0027283620122526674]80.65562857619264,(((((((3:[&rate=0.0022678211537111178]14.29253099046807,5:[&rate=0.0027651029157192367]14.29253099046807):[&rate=0.00297289583954649]16.466716317544222,((7:[&rate=0.0038204114186956755]17.987736234172765,13:[&rate=0.0026347308141701437]17.987736234172765):[&rate=0.0025957906427935924]8.822759172265183,(11:[&rate=0.0031740763196751545]5 [...]
+tree STATE_4420000 [&lnP=-22224.09738235524] = [&R] (9:[&rate=0.002680919909443772]167.59043770962583,((14:[&rate=0.0018424578160761414]116.74286600249607,((((16:[&rate=0.001966057215174805]45.831062464973954,(((3:[&rate=0.0022911976523849772]17.122081903185943,5:[&rate=0.001918394372761262]17.122081903185943):[&rate=0.00210799221906635]22.36517126381149,((7:[&rate=0.0021696211344804676]25.436330936228554,13:[&rate=0.0022631945885776437]25.436330936228554):[&rate=0.0024958311384908824]3. [...]
+tree STATE_4430000 [&lnP=-22226.160786757617] = [&R] (9:[&rate=0.0020404075688439144]165.41421091194647,(((((((((23:[&rate=0.0025689980928679424]10.846388019095974,17:[&rate=0.00226926715814044]10.846388019095974):[&rate=0.0020232134532296438]18.141987324101414,((15:[&rate=0.0025321793426973166]10.283076969565231,(19:[&rate=0.002360896776495149]5.2703512506408075,(21:[&rate=0.0023265051298965176]4.0564674601343285,20:[&rate=0.0023978602516327298]4.0564674601343285):[&rate=0.0024503469064 [...]
+tree STATE_4440000 [&lnP=-22220.713614180208] = [&R] (9:[&rate=0.0030982235560314073]116.89356430836966,((((((((8:[&rate=0.00302648126081408]22.337120236824866,10:[&rate=0.003177244980201413]22.337120236824866):[&rate=0.003086261749498895]11.065842584364667,((3:[&rate=0.0030506884865639074]13.32377716437674,5:[&rate=0.0030075789287043503]13.32377716437674):[&rate=0.003013976806502206]15.899055251075218,((11:[&rate=0.0030326153887585135]7.252480278686153,12:[&rate=0.0029135631962267194]7. [...]
+tree STATE_4450000 [&lnP=-22216.459492049064] = [&R] (9:[&rate=0.002372481324419576]141.9491339247825,((14:[&rate=0.002707449860371763]84.27105295675183,33:[&rate=0.0028240218880024133]84.27105295675183):[&rate=0.0023861596689169827]13.498419130177183,((((((8:[&rate=0.002545802672516687]22.68555767393198,10:[&rate=0.0022996823558567633]22.68555767393198):[&rate=0.0025690023838825632]15.259746231153905,((3:[&rate=0.0025300701491781305]15.705968585130003,5:[&rate=0.0023219849133190753]15.7 [...]
+tree STATE_4460000 [&lnP=-22220.420336363906] = [&R] (9:[&rate=0.0028273390882696064]131.08610186871854,((14:[&rate=0.003279150595866442]85.51142686124872,((((((8:[&rate=0.003047485108581145]22.72541344444206,10:[&rate=0.0030928144490830357]22.72541344444206):[&rate=0.002803000894974943]12.564871992088598,((((11:[&rate=0.003047485108581145]6.77624268553933,12:[&rate=0.0027438092389753758]6.77624268553933):[&rate=0.0030300831974589444]14.638538197344396,13:[&rate=0.002860115691802508]21.4 [...]
+tree STATE_4470000 [&lnP=-22230.565229374486] = [&R] (9:[&rate=0.002378448256957688]143.67962758209015,(14:[&rate=0.0022719301908956644]106.85780700190192,(((((((23:[&rate=0.0025149437349497664]11.319101245664301,17:[&rate=0.002628285875082219]11.319101245664301):[&rate=0.0025327141484091827]14.816208385568205,((24:[&rate=0.0024709428011240424]11.334233544585151,((21:[&rate=0.002368301958031408]4.633556362634604,(19:[&rate=0.0025327141484091827]2.224181802519849,20:[&rate=0.0025416913791 [...]
+tree STATE_4480000 [&lnP=-22209.548793312024] = [&R] (9:[&rate=0.0026695463175856124]137.17131734751783,(33:[&rate=0.0025495899693703645]107.531967033049,(14:[&rate=0.002807735845132702]92.82096670039955,((((((23:[&rate=0.0026182401915945033]12.0229985808666,17:[&rate=0.002494283574457634]12.0229985808666):[&rate=0.0026293583996215637]13.023076381429975,(((15:[&rate=0.002579105017845445]10.50912865256159,(19:[&rate=0.0026755859581070046]4.029478651394512,(21:[&rate=0.002494283574457634]3 [...]
+tree STATE_4490000 [&lnP=-22230.51585488711] = [&R] (9:[&rate=0.002036364183349499]172.49065250725891,((((((((((7:[&rate=0.0023716563251733914]23.38873027284257,13:[&rate=0.0020458518590686365]23.38873027284257):[&rate=0.0021505410091042978]6.6275511788137536,(11:[&rate=0.00240197724317156]8.333746202941988,12:[&rate=0.0022980706381464586]8.333746202941988):[&rate=0.0021505410091042978]21.682535248714334):[&rate=0.0019610736862809446]8.978258946550977,(3:[&rate=0.0021251450724573753]16.9 [...]
+tree STATE_4500000 [&lnP=-22218.206138565958] = [&R] (9:[&rate=0.004259841184076634]98.15144470315174,((((((((22:[&rate=0.0027769024666452445]22.81287472227589,((15:[&rate=0.0028753491042665096]10.760310629581811,(19:[&rate=0.0038826083685445287]2.5042820955278686,(21:[&rate=0.0037401657793625447]2.2644196054555312,20:[&rate=0.0031304804328596758]2.2644196054555312):[&rate=0.0031072420564656608]0.2398624900723374):[&rate=0.002724559435859161]8.256028534053943):[&rate=0.00260916039549762] [...]
+tree STATE_4510000 [&lnP=-22228.32978451553] = [&R] (9:[&rate=0.002609091999544063]132.7073942904333,((33:[&rate=0.002504776395755627]88.74571506046668,(((((((23:[&rate=0.0027383675607537458]10.350495603920528,17:[&rate=0.0026035760355614035]10.350495603920528):[&rate=0.002598041736196854]11.628040257997736,((15:[&rate=0.0024887782001976593]12.09475716448433,(21:[&rate=0.0028581451858852164]6.8869713292790715,(19:[&rate=0.0027159681787932107]6.016553559434381,20:[&rate=0.0025262652039883 [...]
+tree STATE_4520000 [&lnP=-22217.35420445014] = [&R] (9:[&rate=0.002468557163671125]148.33661365516278,((14:[&rate=0.0024468464297642814]91.42517781737536,((((((8:[&rate=0.002779820481977933]24.864243418587435,10:[&rate=0.0026314629981420406]24.864243418587435):[&rate=0.002929105309582388]11.414539536355576,((3:[&rate=0.0027437526840057994]13.125825587241614,5:[&rate=0.002779820481977933]13.125825587241614):[&rate=0.0026126885665869824]21.837379329783758,((11:[&rate=0.0023221404158674935] [...]
+tree STATE_4530000 [&lnP=-22225.992876759126] = [&R] (9:[&rate=0.002715536210156532]124.20151051360928,(14:[&rate=0.002754449476690883]100.39518517786662,(33:[&rate=0.0026768431366930526]88.80042109420278,(((((22:[&rate=0.0027137487387264322]22.283934764358698,((24:[&rate=0.0027306599397901006]13.723678389798236,(15:[&rate=0.002733919891498614]11.268532028057267,((19:[&rate=0.0027623580511914013]2.566821023905196,20:[&rate=0.002760290533986364]2.566821023905196):[&rate=0.0027525990241055 [...]
+tree STATE_4540000 [&lnP=-22219.96413525497] = [&R] (9:[&rate=0.0023439605383086216]155.19910603365085,(33:[&rate=0.002308163844612906]111.54125791753498,(14:[&rate=0.0024596313399880103]94.04894476196097,((((((8:[&rate=0.0023370173086523826]25.158964046905982,10:[&rate=0.0023777767243426317]25.158964046905982):[&rate=0.0026125714829952932]16.681413304082188,((3:[&rate=0.0024044396230582994]15.980377946801688,5:[&rate=0.002481994646455121]15.980377946801688):[&rate=0.002308163844612906]2 [...]
+tree STATE_4550000 [&lnP=-22215.184517558704] = [&R] (9:[&rate=0.0032384464947179236]128.0894046722769,((((((((22:[&rate=0.0026919109268979035]23.168534462321436,(24:[&rate=0.0025406732069262958]14.280405776280087,(15:[&rate=0.0027321904362186495]11.745610633959439,(19:[&rate=0.002182012282384549]3.9276868601774653,(21:[&rate=0.0020086274462469906]3.104718505664938,20:[&rate=0.003166874740613404]3.104718505664938):[&rate=0.0026111703560185388]0.8229683545125273):[&rate=0.0028908799970325 [...]
+tree STATE_4560000 [&lnP=-22219.851544551893] = [&R] (9:[&rate=0.002280114002605229]166.07884552294365,(33:[&rate=0.002011852537874538]124.20302031916326,(14:[&rate=0.0024611344201002066]105.8563722261646,(((((((7:[&rate=0.003138054676199225]21.4980952224412,13:[&rate=0.002072968578162827]21.4980952224412):[&rate=0.002072968578162827]6.717456830361957,(11:[&rate=0.0021845361433932733]8.843309349768314,12:[&rate=0.0021175896910126464]8.843309349768314):[&rate=0.00293380835760319]19.372242 [...]
+tree STATE_4570000 [&lnP=-22207.41169828731] = [&R] (9:[&rate=0.0030428236659172415]125.8489182358299,(33:[&rate=0.003162918819330213]76.95363638097282,(14:[&rate=0.003581729046573258]65.2016359318144,(((((22:[&rate=0.003512220876396924]19.184599644064832,((23:[&rate=0.0029722581781168106]9.256692931866676,17:[&rate=0.002868742176673336]9.256692931866676):[&rate=0.0031863413743010906]8.891061620290175,(24:[&rate=0.003528266607791427]10.969778524546513,(15:[&rate=0.00337817030941134]7.455 [...]
+tree STATE_4580000 [&lnP=-22204.326015396313] = [&R] (9:[&rate=0.0025428789174326326]131.72743635028178,(33:[&rate=0.0025688564345166132]92.99677863662191,(14:[&rate=0.0034668880271476314]77.7631407998407,((((((((15:[&rate=0.002957933996278645]12.389152651261057,((20:[&rate=0.002402396611285075]2.687197727724244,19:[&rate=0.0035517497567715654]2.687197727724244):[&rate=0.0031403155941022187]1.2807482389625773,21:[&rate=0.0025428789174326326]3.967945966686821):[&rate=0.0026687538467008746 [...]
+tree STATE_4590000 [&lnP=-22224.68514017941] = [&R] (9:[&rate=0.0028165406824195074]129.42406711830034,(33:[&rate=0.002732578181239414]105.34079755400202,(((((16:[&rate=0.0023209268715239126]41.054989048098356,((8:[&rate=0.0025681817444498656]23.340060830882923,10:[&rate=0.0030564829036086867]23.340060830882923):[&rate=0.002518647555001743]15.207097603500902,((((11:[&rate=0.002685985360473118]5.193559479475022,12:[&rate=0.002881808605245245]5.193559479475022):[&rate=0.0024970620320494932 [...]
+tree STATE_4600000 [&lnP=-22219.926747667992] = [&R] (9:[&rate=0.0017581658654289803]202.17413339906344,(33:[&rate=0.0014593632971375316]142.20172020818606,(14:[&rate=0.0018359467300904327]122.46903831148157,((((16:[&rate=0.001857014974223144]53.01276896100146,((22:[&rate=0.0022715355436141327]27.410393655942535,((23:[&rate=0.0019091041186338176]13.070922371347033,17:[&rate=0.0023282460805249643]13.070922371347033):[&rate=0.002017952138148758]13.3202949021625,(((19:[&rate=0.0017581658654 [...]
+tree STATE_4610000 [&lnP=-22215.77943113359] = [&R] (9:[&rate=0.0037688686798450026]113.43805568389145,(33:[&rate=0.0028383789756270075]97.58039698329698,(14:[&rate=0.0033672233554856617]78.9028882984472,(((((((23:[&rate=0.003143030014434775]9.808846801147677,17:[&rate=0.0037688686798450026]9.808846801147677):[&rate=0.002900202052157738]13.791111139216518,(22:[&rate=0.0029295771454873783]22.74316285868547,(((21:[&rate=0.003056775357799209]3.362627489545852,(19:[&rate=0.003042720886510689 [...]
+tree STATE_4620000 [&lnP=-22219.96388127341] = [&R] (9:[&rate=0.0024681202497782]158.83958808101366,(33:[&rate=0.0019888038517741654]128.1249139355705,(14:[&rate=0.002609363294311821]96.53870381267211,(((((((22:[&rate=0.0024021438146332053]22.74986894297865,(((19:[&rate=0.0027176065218870375]4.519705599227374,(20:[&rate=0.003275276316100773]3.4200423683484984,21:[&rate=0.0025747454190856492]3.4200423683484984):[&rate=0.0025747454190856492]1.0996632308788752):[&rate=0.002901201825904258]6 [...]
+tree STATE_4630000 [&lnP=-22214.25226894769] = [&R] (9:[&rate=0.0026295852053621212]151.86558887106963,(33:[&rate=0.0018706402214902535]135.3129797068224,(14:[&rate=0.0021594733184094822]120.77350139350088,((((((((((21:[&rate=0.002354277181422913]5.5480259054174885,19:[&rate=0.002232644228346208]5.5480259054174885):[&rate=0.0026295852053621212]0.6939361223082443,20:[&rate=0.002262193338063385]6.241962027725733):[&rate=0.002354277181422913]7.829271602431832,15:[&rate=0.0018706402214902535 [...]
+tree STATE_4640000 [&lnP=-22215.039246954857] = [&R] (9:[&rate=0.0026741524061720673]140.53207487251413,((((((((23:[&rate=0.0020410453927613027]14.90062841120339,17:[&rate=0.0022262009327944214]14.90062841120339):[&rate=0.003063873670911126]10.048447036859644,((((21:[&rate=0.002706177311393474]4.934432331308562,(20:[&rate=0.0023604245637285823]3.9777162255146266,19:[&rate=0.002921327305275701]3.9777162255146266):[&rate=0.002706177311393474]0.9567161057939355):[&rate=0.0030370054142640004 [...]
+tree STATE_4650000 [&lnP=-22221.205780301756] = [&R] (9:[&rate=0.0037032504646454362]123.27501884333421,((((((16:[&rate=0.0035397852902941587]27.632148521271045,(((8:[&rate=0.005436424542264184]12.44433221150411,10:[&rate=0.0050099297918279025]12.44433221150411):[&rate=0.003971768802073837]12.325135008718648,(3:[&rate=0.004122786471941024]10.493606580923045,5:[&rate=0.004176638537424172]10.493606580923045):[&rate=0.0031904960570302574]14.275860639299713):[&rate=0.0031904960570302574]1.77 [...]
+tree STATE_4660000 [&lnP=-22223.451206152957] = [&R] (9:[&rate=0.003333420682698167]119.69195580173714,(((((16:[&rate=0.003077313782900751]31.666747850364146,((((3:[&rate=0.0029705618693097948]9.984330190504814,5:[&rate=0.0031301903461994325]9.984330190504814):[&rate=0.0032514551537025866]16.901829647009464,((7:[&rate=0.003456899967294777]18.816634994868853,13:[&rate=0.003077313782900751]18.816634994868853):[&rate=0.003211715637708087]4.042364115801707,(11:[&rate=0.0031126389880635733]6. [...]
+tree STATE_4670000 [&lnP=-22224.55987102006] = [&R] ((33:[&rate=0.0015033098697250875]127.41344739336316,(14:[&rate=0.0025951432915762005]99.10247379587003,((((((((((20:[&rate=0.0017594615224802508]4.43550829226851,19:[&rate=0.002065587221039234]4.43550829226851):[&rate=0.002707675895955605]0.9875865165921294,21:[&rate=0.001890273380730282]5.42309480886064):[&rate=0.0030166988620823313]6.1465743961731825,15:[&rate=0.0019493548449256324]11.569669205033822):[&rate=0.002018240964514909]9.07 [...]
+tree STATE_4680000 [&lnP=-22210.28812855184] = [&R] ((33:[&rate=0.003121817579034999]95.45263690279089,(14:[&rate=0.0027535418398201153]87.65204705813822,((((16:[&rate=0.002775385935771575]36.72952799361245,(((((11:[&rate=0.0031721958886923676]5.564549944795253,12:[&rate=0.002900662746755736]5.564549944795253):[&rate=0.002913053407856406]17.371314461701413,13:[&rate=0.002377392297329991]22.935864406496666):[&rate=0.002864869749351544]4.119265322673982,7:[&rate=0.0025464476022432274]27.05 [...]
+tree STATE_4690000 [&lnP=-22219.616842096693] = [&R] ((14:[&rate=0.0025330241318734324]97.54864510981844,((((((((7:[&rate=0.002844961374798401]19.623588755693753,13:[&rate=0.002938724999159014]19.623588755693753):[&rate=0.002830114370880461]6.05680655276106,(11:[&rate=0.0028226828181906753]5.073157036638655,12:[&rate=0.0030164651091391247]5.073157036638655):[&rate=0.0028673636462708952]20.607238271816158):[&rate=0.002753276631125845]4.678827762491515,(3:[&rate=0.0029748512649849235]12.95 [...]
+tree STATE_4700000 [&lnP=-22218.342666179575] = [&R] (((((((((8:[&rate=0.0024490481685494683]28.5910992005153,10:[&rate=0.002066714720956119]28.5910992005153):[&rate=0.002623516303617296]14.295491609448877,((3:[&rate=0.0022968503528366588]16.403784329434266,5:[&rate=0.002096977485555279]16.403784329434266):[&rate=0.002198581585480701]21.132981784225336,((11:[&rate=0.0022968503528366588]7.5144376079111215,12:[&rate=0.0023705407164097635]7.5144376079111215):[&rate=0.002155132597951434]21.5 [...]
+tree STATE_4710000 [&lnP=-22216.58461992204] = [&R] (((14:[&rate=0.0030390028386709554]86.74226661559766,((((16:[&rate=0.0031169722924297904]34.00132943067121,((8:[&rate=0.0031169722924297904]19.449280302613285,10:[&rate=0.00320957248104009]19.449280302613285):[&rate=0.002796015425941976]12.671012405156464,((3:[&rate=0.0031169722924297904]13.111135141054516,5:[&rate=0.0029255200666655407]13.111135141054516):[&rate=0.0028483044917478207]17.472732412586872,(((11:[&rate=0.003099536871278776 [...]
+tree STATE_4720000 [&lnP=-22226.113710274196] = [&R] ((14:[&rate=0.0024883554222826856]123.43597473561411,(33:[&rate=0.0026725158862222504]111.10179551849282,((((16:[&rate=0.0021887910477040805]41.11624449523075,((22:[&rate=0.0024444807292156737]25.61791250174128,((23:[&rate=0.0025562743583633753]11.27991292222992,17:[&rate=0.002976351061162406]11.27991292222992):[&rate=0.0026442261467531573]11.666604851039855,(24:[&rate=0.002433528198064673]16.232873020868603,(((21:[&rate=0.002533157533 [...]
+tree STATE_4730000 [&lnP=-22216.959739463273] = [&R] ((33:[&rate=0.001764223778063507]134.43103427285652,(((((((8:[&rate=0.0027077880907069524]24.661192586566305,10:[&rate=0.0022454037184716884]24.661192586566305):[&rate=0.0022454037184716884]16.708017072882203,((3:[&rate=0.0029503150527035566]13.557868662492748,5:[&rate=0.0025025145062596657]13.557868662492748):[&rate=0.0022042339431905113]23.10625766452556,(((11:[&rate=0.0022248309289109587]7.598052011168258,12:[&rate=0.001685126208121 [...]
+tree STATE_4740000 [&lnP=-22231.33209712474] = [&R] ((33:[&rate=0.002048306810943527]124.4637459564176,(((((16:[&rate=0.0018923883129962397]49.0581272561071,(((3:[&rate=0.0023834256989603616]18.4844245677295,5:[&rate=0.001628443942422197]18.4844245677295):[&rate=0.0017930756933127664]23.13609690457525,(((11:[&rate=0.002591291980236014]6.575731517624619,12:[&rate=0.002297563022104746]6.575731517624619):[&rate=0.0019608371229652677]24.510338217124076,13:[&rate=0.0017510994566052312]31.0860 [...]
+tree STATE_4750000 [&lnP=-22211.195517289347] = [&R] (((14:[&rate=0.003609518878315946]64.8634183674596,(((((((3:[&rate=0.0038059719197710952]11.103713277043576,5:[&rate=0.0030373432080378834]11.103713277043576):[&rate=0.003520352263525292]12.960954886909247,((7:[&rate=0.004194293418160428]14.689855092202905,13:[&rate=0.0039777963666523634]14.689855092202905):[&rate=0.003609518878315946]2.5811083106400616,(11:[&rate=0.004431085204295034]4.531547744527879,12:[&rate=0.003609518878315946]4. [...]
+tree STATE_4760000 [&lnP=-22215.62185639781] = [&R] (((((((((((15:[&rate=0.0031072174275546508]8.574886657729321,((20:[&rate=0.0031026536341733236]1.793143207305484,21:[&rate=0.0031026536341733236]1.793143207305484):[&rate=0.0030381974820822308]1.6329943498625843,19:[&rate=0.0030887764882152145]3.4261375571680683):[&rate=0.0030672555530280718]5.148749100561252):[&rate=0.0030751002507832235]1.585041432231959,24:[&rate=0.0030704968533325407]10.15992808996128):[&rate=0.003042494146491175]9. [...]
+tree STATE_4770000 [&lnP=-22217.50270471593] = [&R] (((((((((((15:[&rate=0.0027569994321217997]10.804240801089316,((20:[&rate=0.0025960615648710295]2.6835982454970684,19:[&rate=0.0029596056527520734]2.6835982454970684):[&rate=0.0024131676005676116]2.370067365171914,21:[&rate=0.0029749746069859494]5.053665610668983):[&rate=0.0029749746069859494]5.750575190420333):[&rate=0.0024790477637640232]1.5388532234611478,24:[&rate=0.0030070820730032513]12.343094024550464):[&rate=0.002638794020321335 [...]
+tree STATE_4780000 [&lnP=-22206.905211770463] = [&R] ((((((16:[&rate=0.0030133055759852543]33.40444087288752,((((3:[&rate=0.0031157784522009746]11.144767213919575,5:[&rate=0.0031566962296331012]11.144767213919575):[&rate=0.0028388082369536264]15.112754269132054,((7:[&rate=0.003710460935580338]17.05581440911708,13:[&rate=0.0025814720303469494]17.05581440911708):[&rate=0.0028149486816914987]3.6934736157561616,(11:[&rate=0.003833362136480703]5.89769741190567,12:[&rate=0.0030133055759852543] [...]
+tree STATE_4790000 [&lnP=-22221.452790074494] = [&R] ((((((((22:[&rate=0.0029630311739479312]23.445622478078647,(((15:[&rate=0.002867719390639163]11.677735949776826,((20:[&rate=0.002911606945926435]2.5513293529452064,21:[&rate=0.003048346579055423]2.5513293529452064):[&rate=0.0029763380584310944]0.3708763133507418,19:[&rate=0.002994564970017723]2.922205666295948):[&rate=0.003045058643538822]8.755530283480878):[&rate=0.0030003979234030733]1.8236010099771853,24:[&rate=0.0029916169219372443 [...]
+tree STATE_4800000 [&lnP=-22210.760665332953] = [&R] ((((((((((23:[&rate=0.002312024693203912]9.941913715791854,17:[&rate=0.002761070835167999]9.941913715791854):[&rate=0.0031427038312879492]9.453339737925704,(24:[&rate=0.0028392047960501426]11.881621798804298,(((20:[&rate=0.002150096706168167]3.1936766546632023,21:[&rate=0.0031653988162738548]3.1936766546632023):[&rate=0.0032141887619891677]0.33755180560725906,19:[&rate=0.0027138108621447975]3.5312284602704613):[&rate=0.0034532095485043 [...]
+tree STATE_4810000 [&lnP=-22218.415424599014] = [&R] (9:[&rate=0.0030105372884086535]122.10877494203787,(33:[&rate=0.0026124818712781554]102.77564698833037,(14:[&rate=0.002951898725193621]88.01344739645431,(((((16:[&rate=0.003134436196130451]30.860629311988564,(8:[&rate=0.003612965405184882]16.02984112333197,10:[&rate=0.003975860652207892]16.02984112333197):[&rate=0.003134436196130451]14.830788188656594):[&rate=0.0027392338141255896]0.9032398851732708,((3:[&rate=0.003567368089116731]12.1 [...]
+tree STATE_4820000 [&lnP=-22212.72093756749] = [&R] (9:[&rate=0.0034360691324366406]104.16010990966957,(33:[&rate=0.003379971937534091]83.88590476941499,(14:[&rate=0.003613907516095115]74.0589077058189,((((16:[&rate=0.0026651649081145716]32.343255146502216,(((3:[&rate=0.002845090850290963]12.567966280353719,5:[&rate=0.002921866942082345]12.567966280353719):[&rate=0.002845090850290963]17.083500829088127,(((11:[&rate=0.003056217257947167]6.260873603997446,12:[&rate=0.0030192479493707835]6. [...]
+tree STATE_4830000 [&lnP=-22205.934554786018] = [&R] (9:[&rate=0.0032937543755931285]114.15294156997712,(((((((((3:[&rate=0.0032937543755931285]12.493863789767428,5:[&rate=0.003057195133157351]12.493863789767428):[&rate=0.002938932402519374]17.00264999655477,((7:[&rate=0.0036457893499222452]19.983629421692576,13:[&rate=0.002176998708864632]19.983629421692576):[&rate=0.002826812938699491]6.538698526052009,(11:[&rate=0.0036457893499222452]5.732182016969857,12:[&rate=0.003036747582057669]5. [...]
+tree STATE_4840000 [&lnP=-22212.69733152494] = [&R] (9:[&rate=0.00242128920767528]141.10264071908708,((((((16:[&rate=0.0023453543072272137]40.55896000689801,((8:[&rate=0.002659670326404295]21.93277007360401,10:[&rate=0.002108917870448848]21.93277007360401):[&rate=0.002659670326404295]16.0983168799836,((3:[&rate=0.0025724966924903225]16.340997140593174,5:[&rate=0.002392212910422646]16.340997140593174):[&rate=0.0025724966924903225]18.251226823711356,((7:[&rate=0.0028824759380064045]24.4807 [...]
+tree STATE_4850000 [&lnP=-22206.67426191656] = [&R] (9:[&rate=0.003121925483912394]128.46186902638817,(33:[&rate=0.002718945501411668]95.09234785706259,((((((((((11:[&rate=0.003658383313291193]5.579602711193922,12:[&rate=0.0030290351603230337]5.579602711193922):[&rate=0.0031821677066359434]13.013080301530561,13:[&rate=0.002734673782666253]18.592683012724482):[&rate=0.0030806869228523657]5.221512207905569,7:[&rate=0.0033661742904868746]23.81419522063005):[&rate=0.0026267837484997697]4.469 [...]
+tree STATE_4860000 [&lnP=-22209.37951171693] = [&R] (9:[&rate=0.002801419591204044]146.27194267454155,(14:[&rate=0.00254133797714016]91.97224856459457,(((((((8:[&rate=0.0031459977868698824]21.11699138088285,10:[&rate=0.00265465049624263]21.11699138088285):[&rate=0.002461362262586813]11.979104691739472,(((11:[&rate=0.00343035797341953]7.065682911447794,12:[&rate=0.0026024283855980183]7.065682911447794):[&rate=0.002914011159123044]17.28552377384316,(13:[&rate=0.002218096885907644]19.717249 [...]
+tree STATE_4870000 [&lnP=-22206.056422914004] = [&R] (9:[&rate=0.0033707343438971506]97.80909596125811,(33:[&rate=0.0031747226622915376]76.54370318843056,(((((((((7:[&rate=0.0036404683358530637]15.01732596552696,13:[&rate=0.0035422927645704996]15.01732596552696):[&rate=0.002899548951538126]6.588619828003379,(11:[&rate=0.0035986252628263764]5.293611478466983,12:[&rate=0.003231558173103933]5.293611478466983):[&rate=0.00344415172171562]16.312334315063357):[&rate=0.003065906729828492]3.82355 [...]
+tree STATE_4880000 [&lnP=-22235.90717440479] = [&R] (9:[&rate=0.0028000469925502725]130.2278636045025,(33:[&rate=0.0024475050951885315]108.46140456525673,((((((((((11:[&rate=0.0027551035345267146]7.4394412038262585,12:[&rate=0.002963603559836466]7.4394412038262585):[&rate=0.0022945770439839566]16.116635881487365,13:[&rate=0.002521058486703798]23.556077085313625):[&rate=0.002893293401590596]6.117076045765305,7:[&rate=0.0028501971312428574]29.67315313107893):[&rate=0.002366701112301063]6.6 [...]
+tree STATE_4890000 [&lnP=-22216.09867604451] = [&R] (9:[&rate=0.002285875558958057]145.93542329264022,((((((((8:[&rate=0.0024411138105242176]22.41916483907691,10:[&rate=0.0022972725416552345]22.41916483907691):[&rate=0.002693565972247387]14.931663179495697,(((7:[&rate=0.002537090172737461]24.200966584891823,13:[&rate=0.0022384921969043504]24.200966584891823):[&rate=0.0026742868660537215]3.63382106700913,(11:[&rate=0.0022626021065503465]8.923960366453715,12:[&rate=0.0021422065604553876]8. [...]
+tree STATE_4900000 [&lnP=-22226.573271703823] = [&R] (9:[&rate=0.0023159261275937453]141.58917129207663,(((((((((24:[&rate=0.002641094593346042]13.070694018667195,(15:[&rate=0.0025991635569714375]9.669232920992908,(21:[&rate=0.0028051631378683112]4.564163356284677,(20:[&rate=0.002251458091873954]4.330681053081089,19:[&rate=0.0028051631378683112]4.330681053081089):[&rate=0.0025479275037933527]0.23348230320358798):[&rate=0.0024202656636432736]5.105069564708231):[&rate=0.002782075961448972] [...]
+tree STATE_4910000 [&lnP=-22205.22193336272] = [&R] (9:[&rate=0.0028861822926907537]108.51031952902513,((14:[&rate=0.0033737262797061776]75.23213281408664,((((16:[&rate=0.003292994424928273]30.820746349968104,((((11:[&rate=0.0032733066086718986]5.952224495953296,12:[&rate=0.0026048948650268917]5.952224495953296):[&rate=0.0029761824258867876]19.43931695920914,(13:[&rate=0.0026861424232335392]19.88662198623488,7:[&rate=0.0030587477916432307]19.88662198623488):[&rate=0.0033128465600372444]5 [...]
+tree STATE_4920000 [&lnP=-22211.659110170967] = [&R] (9:[&rate=0.003464349554863494]104.86315480943301,(33:[&rate=0.0024859996863149042]92.53054383931767,(14:[&rate=0.0027515424431998987]79.3128573876324,(((((22:[&rate=0.002732413724179267]21.91533259136202,((((21:[&rate=0.0029058908909849177]3.4562157826700366,(19:[&rate=0.003314867849424762]3.422964448885341,20:[&rate=0.0028666628203045056]3.422964448885341):[&rate=0.003510225739005197]0.0332513337846958):[&rate=0.002808848049313007]7. [...]
+tree STATE_4930000 [&lnP=-22227.397027778374] = [&R] (9:[&rate=0.0023352762473654743]168.62380056964997,(((((((22:[&rate=0.0023811990826638233]27.71973434292106,((23:[&rate=0.0023074368845579143]10.08559603625823,17:[&rate=0.002314240105729549]10.08559603625823):[&rate=0.002232676953781348]17.431641594238208,(((19:[&rate=0.0024632518226303892]4.7181083966000275,(20:[&rate=0.002328111615281306]4.409387271314197,21:[&rate=0.0021583048332231105]4.409387271314197):[&rate=0.002261971078813978 [...]
+tree STATE_4940000 [&lnP=-22214.53089643107] = [&R] (9:[&rate=0.0029521487782895924]124.8340131363707,((((((16:[&rate=0.0027901576412336674]35.219518708625536,((((11:[&rate=0.002880404298464631]6.7846036574860245,12:[&rate=0.0024978313936255183]6.7846036574860245):[&rate=0.00314478595358104]15.380722685114641,13:[&rate=0.0023111743101884323]22.165326342600665):[&rate=0.0030178882967027243]5.557311099249699,7:[&rate=0.002802624726528288]27.722637441850363):[&rate=0.002530176344969558]5.47 [...]
+tree STATE_4950000 [&lnP=-22212.76889198465] = [&R] (9:[&rate=0.0025438070478364916]143.96541795057354,(33:[&rate=0.0035568546937710566]87.88139393538937,(14:[&rate=0.0030850445093758565]80.27970421990105,((((16:[&rate=0.0032300321959550836]34.009394062001405,((8:[&rate=0.003054756738850642]23.17354001404064,10:[&rate=0.002767790348396639]23.17354001404064):[&rate=0.003163156355391068]9.898032784527139,((7:[&rate=0.0030397187923127114]26.511904284557968,((11:[&rate=0.002949499895668596]5 [...]
+tree STATE_4960000 [&lnP=-22219.119774902778] = [&R] (9:[&rate=0.002515635277051509]140.5861191481761,(14:[&rate=0.00276369015777749]95.72491496901608,(((((((23:[&rate=0.0027736185889353157]8.716196214167775,17:[&rate=0.0028215103243415763]8.716196214167775):[&rate=0.002783403035063019]14.2137040894926,((((19:[&rate=0.002783403035063019]3.3413445797036867,(20:[&rate=0.003150543420609328]2.9857679691507824,21:[&rate=0.0033245811604508146]2.9857679691507824):[&rate=0.002858793944554209]0.3 [...]
+tree STATE_4970000 [&lnP=-22214.32466409214] = [&R] (9:[&rate=0.004257902585711602]94.10471070973108,((14:[&rate=0.003297252053942166]65.57037387657283,(((((16:[&rate=0.003917160143118176]26.15430521412131,((3:[&rate=0.0034207639984245642]11.395164837571247,5:[&rate=0.0033688591605757824]11.395164837571247):[&rate=0.0036297500006033778]13.381166213947353,((11:[&rate=0.003744765587319564]4.781305189421153,12:[&rate=0.003961542653630217]4.781305189421153):[&rate=0.00372728006677581]13.5000 [...]
+tree STATE_4980000 [&lnP=-22230.70924088489] = [&R] (9:[&rate=0.0027835934529280926]131.7483589638473,(33:[&rate=0.002626900175705848]124.1049693030993,((((((((3:[&rate=0.0028740000121335514]15.577952355383292,5:[&rate=0.002638350497792378]15.577952355383292):[&rate=0.00281512641090787]17.12156381402166,(7:[&rate=0.0028214511562983722]27.482163049425548,((11:[&rate=0.002777236147927513]6.49733660151137,12:[&rate=0.002853716270455167]6.49733660151137):[&rate=0.0027446006336453435]19.00887 [...]
+tree STATE_4990000 [&lnP=-22221.402725722946] = [&R] (9:[&rate=0.0025367235999839187]153.76986986942217,((14:[&rate=0.002987259763980932]97.64477726727054,((((16:[&rate=0.0023744528970245983]43.06105501913985,(((18:[&rate=0.0023080461821598343]33.52573622471462,(26:[&rate=0.002105000105336257]20.959408543534597,25:[&rate=0.0025083058215164006]20.959408543534597):[&rate=0.0019047993224302765]12.566327681180024):[&rate=0.002987259763980932]3.430993627207201,(2:[&rate=0.0023348470067598164] [...]
+tree STATE_5000000 [&lnP=-22204.30380549576] = [&R] (9:[&rate=0.0032096524705853397]138.15075428367138,(33:[&rate=0.002071623576519196]100.11137503131377,(((((16:[&rate=0.0026362117950991374]37.471396666609714,(((3:[&rate=0.0024843056160957647]15.50924607676263,5:[&rate=0.0023424681878954955]15.50924607676263):[&rate=0.003148359977371939]14.289409703161603,(7:[&rate=0.0032417923520647106]24.897315083337375,((11:[&rate=0.002981768255319541]7.2498643101854015,12:[&rate=0.002586822882343529 [...]
+tree STATE_5010000 [&lnP=-22218.159958366123] = [&R] ((((((((8:[&rate=0.0031435600414614585]22.69646182701321,10:[&rate=0.0026787606615669294]22.69646182701321):[&rate=0.0019151665415677694]17.081803462474227,((3:[&rate=0.002620893528490695]13.13806823954774,5:[&rate=0.00198644189887625]13.13806823954774):[&rate=0.0018640239562139423]22.172212975423314,((11:[&rate=0.0027749393227423265]6.720101951110585,12:[&rate=0.001631976667128167]6.720101951110585):[&rate=0.002491040800291723]20.1730 [...]
+tree STATE_5020000 [&lnP=-22219.380719936667] = [&R] (9:[&rate=0.0028504155356409745]132.526884402515,(14:[&rate=0.002378934302100941]116.29115314643536,(33:[&rate=0.0022853627817424247]99.19372387085564,((((16:[&rate=0.0022168710294639222]39.70934786292905,(((7:[&rate=0.002325007875775106]26.36854828817752,((11:[&rate=0.002475300400808559]6.951650702208589,12:[&rate=0.003253028326386397]6.951650702208589):[&rate=0.002909796914680829]15.129803554026726,13:[&rate=0.002378934302100941]22.0 [...]
+tree STATE_5030000 [&lnP=-22212.555193075903] = [&R] (9:[&rate=0.0026086645261357896]126.63934602074636,(33:[&rate=0.002384485416837245]103.78557390226257,(14:[&rate=0.0023466026612955194]93.7812323796712,((((((((11:[&rate=0.00237242734942493]8.13261580222975,12:[&rate=0.00224239889243708]8.13261580222975):[&rate=0.002506801698999358]20.876287177865322,(7:[&rate=0.003203312344279136]24.94147204625889,13:[&rate=0.00224239889243708]24.94147204625889):[&rate=0.0024181749785675403]4.06743093 [...]
+tree STATE_5040000 [&lnP=-22214.120912926744] = [&R] (9:[&rate=0.0032195035393483983]109.67002637883643,(33:[&rate=0.002699029460704342]88.52805646416857,(14:[&rate=0.003006773560512658]74.9530657483835,((((16:[&rate=0.002699029460704342]31.105120454251264,((8:[&rate=0.0029636858163401155]22.325938136401234,10:[&rate=0.0029747848891587395]22.325938136401234):[&rate=0.003586592240294962]7.770846048485364,(((11:[&rate=0.0033054902313880254]5.16756903641182,12:[&rate=0.003652528427495183]5. [...]
+tree STATE_5050000 [&lnP=-22204.0989590612] = [&R] (9:[&rate=0.0033641728659491096]104.8000085881205,(33:[&rate=0.002280302638962793]80.46207995173538,(14:[&rate=0.003179388836173783]68.15154746575485,(((((16:[&rate=0.003422153840743087]26.548712018900694,(8:[&rate=0.0043814481910208715]15.677843251651217,10:[&rate=0.0040523744475469605]15.677843251651217):[&rate=0.0036778872888055493]10.870868767249478):[&rate=0.00428789583834276]0.9294469933056995,(((11:[&rate=0.003706717223390086]6.00 [...]
+tree STATE_5060000 [&lnP=-22213.208005702163] = [&R] (9:[&rate=0.002574590527146721]147.02048697937718,(14:[&rate=0.002455062207279368]111.93344967609605,(33:[&rate=0.0022016291035047773]99.84566144982267,((((((((11:[&rate=0.0027350173995020293]7.590678401387812,12:[&rate=0.002420596407652252]7.590678401387812):[&rate=0.002443385341686071]23.128818500505837,(7:[&rate=0.0025309510058988617]23.49324752810731,13:[&rate=0.001990582861865712]23.49324752810731):[&rate=0.0023020010178136817]7.2 [...]
+tree STATE_5070000 [&lnP=-22222.57411989855] = [&R] (9:[&rate=0.00254098915656323]146.59264281875312,(33:[&rate=0.0023046371430365683]106.21650524706493,(14:[&rate=0.0031466768790078194]96.18806858847616,((((((18:[&rate=0.0029746805323740367]26.49316879898461,(26:[&rate=0.002778037617559615]16.399848476638436,25:[&rate=0.0028806132481655903]16.399848476638436):[&rate=0.0019880911895728067]10.093320322346173):[&rate=0.0021366703453333945]8.025423099035525,(2:[&rate=0.002558583279176255]28 [...]
+tree STATE_5080000 [&lnP=-22212.363042006633] = [&R] (9:[&rate=0.0023709380956479513]151.8054074009101,(33:[&rate=0.001955852923809429]118.8797604989908,(14:[&rate=0.002595694939160683]99.7521225892356,((((((3:[&rate=0.002734432225265506]14.133331731263455,5:[&rate=0.002130923638722131]14.133331731263455):[&rate=0.0021701288395262843]22.505276608037455,((11:[&rate=0.002595694939160683]7.869822739328593,12:[&rate=0.002678026599779779]7.869822739328593):[&rate=0.002423026401156208]21.45120 [...]
+tree STATE_5090000 [&lnP=-22209.80127858242] = [&R] (9:[&rate=0.003338550423159094]135.81639017361996,(33:[&rate=0.0018900070574660692]127.26383476107736,(((((16:[&rate=0.0023243477805197188]43.96327321987071,((7:[&rate=0.0022844576253450767]33.42380211742437,((11:[&rate=0.0024787666385503674]7.219489868261257,12:[&rate=0.0022844576253450767]7.219489868261257):[&rate=0.0021756339711281538]19.8576470023306,13:[&rate=0.0019922420753311207]27.077136870591858):[&rate=0.0018900070574660692]6. [...]
+tree STATE_5100000 [&lnP=-22217.013085428094] = [&R] (14:[&rate=0.002099775872465361]125.28100562440235,(((((((16:[&rate=0.002214113230970389]39.66054912352548,(8:[&rate=0.0026483265165929155]22.70859878618732,10:[&rate=0.0025073380503302985]22.70859878618732):[&rate=0.0026928694747499152]16.951950337338157):[&rate=0.002435053224978328]0.5728684632477581,((3:[&rate=0.002317103226442308]17.143136547721433,5:[&rate=0.002227286879842147]17.143136547721433):[&rate=0.0020836157836217]20.35936 [...]
+tree STATE_5110000 [&lnP=-22209.56233622363] = [&R] (9:[&rate=0.003261183969366686]103.17138441122789,((((((16:[&rate=0.0038131582309739952]28.821183591028216,(((3:[&rate=0.004129934154775173]9.420273370965557,5:[&rate=0.004400648447974026]9.420273370965557):[&rate=0.003954743770657805]11.706750851490256,((11:[&rate=0.004474886600390112]4.53123719291697,12:[&rate=0.0038600474929724182]4.53123719291697):[&rate=0.003978825009364161]14.41683666463837,(7:[&rate=0.0047706518848007326]15.92297 [...]
+tree STATE_5120000 [&lnP=-22205.686142149203] = [&R] (9:[&rate=0.003345943925122991]112.49718731477742,(((((((((11:[&rate=0.0034024713358075005]5.266651355514912,12:[&rate=0.0027764566770588695]5.266651355514912):[&rate=0.003188441085883816]16.114095054789434,(7:[&rate=0.0029928819635590045]17.402707837512335,13:[&rate=0.0029785053597847153]17.402707837512335):[&rate=0.0030070972138662817]3.9780385727920127):[&rate=0.002886841187284242]4.645131740456318,(3:[&rate=0.003188441085883816]11. [...]
+tree STATE_5130000 [&lnP=-22210.18947324276] = [&R] (9:[&rate=0.002823044828033675]125.55659959441493,((14:[&rate=0.0034145988389824404]75.5123697369385,((((((18:[&rate=0.0029276642634145514]23.255106671672014,(26:[&rate=0.0029758050304352236]14.003435205295181,25:[&rate=0.0027754252450325942]14.003435205295181):[&rate=0.0030008648400286103]9.251671466376832):[&rate=0.0027507068684550703]5.186557827965487,(2:[&rate=0.0029276642634145514]24.835556629436923,((1:[&rate=0.0028810295911407863 [...]
+tree STATE_5140000 [&lnP=-22222.927694571976] = [&R] (9:[&rate=0.0027487580841778284]119.53629798830038,((33:[&rate=0.002947093241184035]83.94724584462713,((((((18:[&rate=0.0030277825208043316]23.311822624087824,(26:[&rate=0.00305185849737107]14.063489397203107,25:[&rate=0.0028818985717905437]14.063489397203107):[&rate=0.0029655111302218807]9.248333226884718):[&rate=0.0029166498527630714]4.598170333809378,(2:[&rate=0.0029607932215445286]25.553833424403752,((1:[&rate=0.0029426455628311296 [...]
+tree STATE_5150000 [&lnP=-22209.704394351298] = [&R] (9:[&rate=0.0037093237550680797]114.18790245944403,(33:[&rate=0.0033958814459149273]81.83708255604597,(14:[&rate=0.0033958814459149273]70.61941156764813,((((((18:[&rate=0.004070859978794771]22.68220546384164,(26:[&rate=0.0036153250815216405]12.950196482334906,25:[&rate=0.00332263378889197]12.950196482334906):[&rate=0.003178268231958158]9.732008981506734):[&rate=0.0029888696974596186]3.177444934675748,(2:[&rate=0.0030380180604789517]23. [...]
+tree STATE_5160000 [&lnP=-22218.241119704162] = [&R] (9:[&rate=0.002999764456215517]131.23555602499,(33:[&rate=0.002999764456215517]106.83238982882912,(14:[&rate=0.002572035461571246]93.25935411637212,((((((18:[&rate=0.0028845150514000694]26.22105334063442,(26:[&rate=0.002393588275398275]17.715258111236267,25:[&rate=0.00251771530518457]17.715258111236267):[&rate=0.0024518996286303166]8.505795229398153):[&rate=0.0027888917483922223]8.02315510933932,(2:[&rate=0.0028057167787638154]31.38036 [...]
+tree STATE_5170000 [&lnP=-22207.87851614278] = [&R] (9:[&rate=0.002361074334045992]119.59248188289621,((14:[&rate=0.0034214556047382112]73.69966591884119,((((((18:[&rate=0.002949288861940233]22.797947302521504,(26:[&rate=0.004032618808825393]10.956070363408227,25:[&rate=0.003657731240142558]10.956070363408227):[&rate=0.0028316302460663365]11.841876939113277):[&rate=0.0027720117040491567]6.039378486389587,(2:[&rate=0.0028316302460663365]23.918767431333677,((1:[&rate=0.00318931147310442]6. [...]
+tree STATE_5180000 [&lnP=-22216.312659986386] = [&R] (9:[&rate=0.002956410084828772]139.546679879265,(33:[&rate=0.002793788623147577]94.05183784272246,(((((((18:[&rate=0.002742972967305372]23.45685825141344,(26:[&rate=0.003272797025136479]14.401396201639342,25:[&rate=0.003356789714962574]14.401396201639342):[&rate=0.0028645436573548877]9.055462049774098):[&rate=0.0023991225958110243]5.202033040544297,(2:[&rate=0.0027294955879080757]25.170071988940883,((1:[&rate=0.0032089725146450723]7.07 [...]
+tree STATE_5190000 [&lnP=-22225.689375133574] = [&R] (9:[&rate=0.003690076528777556]113.16994144675895,((((16:[&rate=0.00200605804752064]42.118383644842424,((((18:[&rate=0.0028813795965125953]23.99905599592805,(26:[&rate=0.003334279662513983]15.85739745682683,25:[&rate=0.002693567202788863]15.85739745682683):[&rate=0.0034509825035266752]8.14165853910122):[&rate=0.0014752596720830273]10.055638106357552,(2:[&rate=0.003111897743867031]25.75479526296421,((1:[&rate=0.003597983130715344]6.1725 [...]
+tree STATE_5200000 [&lnP=-22212.903093179943] = [&R] (9:[&rate=0.0028724556170166074]121.19458442299148,(14:[&rate=0.002296231870766004]85.53569159726983,(((((((18:[&rate=0.004148055336341314]18.00728445951315,(26:[&rate=0.0044705075626172925]12.283471719897419,25:[&rate=0.002673807064101646]12.283471719897419):[&rate=0.003193631130245969]5.723812739615733):[&rate=0.0016881823717988665]13.501144687129553,(2:[&rate=0.0027296631147699243]28.434571654347437,((1:[&rate=0.004740505299651676]5 [...]
+tree STATE_5210000 [&lnP=-22223.78483158925] = [&R] (9:[&rate=0.002832722627443509]143.31863757977322,((14:[&rate=0.0026546771296873883]98.0250394967244,((((16:[&rate=0.002674280011821692]38.72127148887744,((8:[&rate=0.002812470554845533]22.0649605302977,10:[&rate=0.002792409459549399]22.0649605302977):[&rate=0.0032894166803668565]13.827808551784354,(((11:[&rate=0.002832722627443509]5.371820564322669,12:[&rate=0.0021260018559322216]5.371820564322669):[&rate=0.002353600844182889]18.988686 [...]
+tree STATE_5220000 [&lnP=-22223.012874115273] = [&R] (9:[&rate=0.002315450750147485]151.06237030448904,((((((((8:[&rate=0.002457772366930652]22.75527680265781,10:[&rate=0.002608361738807738]22.75527680265781):[&rate=0.0028478125429368204]16.203929470038105,(((11:[&rate=0.0026430682882859196]8.093127719409887,12:[&rate=0.0024699623938786177]8.093127719409887):[&rate=0.0023638783033918848]17.892652148535113,(7:[&rate=0.002784457427632364]22.952398638325356,13:[&rate=0.002432699178938431]22 [...]
+tree STATE_5230000 [&lnP=-22216.857238998848] = [&R] (9:[&rate=0.002601419970642022]134.19677632448446,((14:[&rate=0.0028141196041967546]88.27828628462268,((((((18:[&rate=0.002799811934053731]25.612612413794153,(26:[&rate=0.003303472545988825]13.542059694332352,25:[&rate=0.0029462837395668024]13.542059694332352):[&rate=0.0026620992031126055]12.070552719461801):[&rate=0.002799811934053731]5.81129971515055,(2:[&rate=0.002585719156241221]26.335504442342188,((1:[&rate=0.0025678247530515853]8 [...]
+tree STATE_5240000 [&lnP=-22216.137594812277] = [&R] (9:[&rate=0.0016904618887781647]153.43179345462897,((14:[&rate=0.0027750987004489516]87.545952481707,((((((18:[&rate=0.002442978564243666]32.12999912119132,(26:[&rate=0.002380722425780875]21.697261920639754,25:[&rate=0.0018310491194024415]21.697261920639754):[&rate=0.002510514676474324]10.432737200551564):[&rate=0.0024217257874794246]4.290029941029687,(2:[&rate=0.002266344407928942]29.29863855839726,((1:[&rate=0.00248729789655316]6.654 [...]
+tree STATE_5250000 [&lnP=-22226.023900371718] = [&R] (9:[&rate=0.0022255252316962172]155.76858769599983,((((((((18:[&rate=0.0023862012884044792]26.741096127241825,(26:[&rate=0.0024284068873191666]18.671781505072687,25:[&rate=0.002182551473101961]18.671781505072687):[&rate=0.002095609663802839]8.069314622169138):[&rate=0.0020382979718270343]7.453913907742717,(2:[&rate=0.0020703507403699636]31.74822841618522,((1:[&rate=0.0020382979718270343]8.285303607079607,6:[&rate=0.0024284068873191666] [...]
+tree STATE_5260000 [&lnP=-22220.884938369752] = [&R] (9:[&rate=0.003802902411055901]93.25286346709998,((14:[&rate=0.003581904108334479]69.15146403598969,((((((18:[&rate=0.003502933501374527]19.216461614920725,(26:[&rate=0.004097669529727069]11.529434755550987,25:[&rate=0.0038322334028030613]11.529434755550987):[&rate=0.003896210502149507]7.6870268593697375):[&rate=0.0034140876376950006]5.291679350327623,(2:[&rate=0.003774832969823606]20.339551967207104,((1:[&rate=0.004277668931955461]5.8 [...]
+tree STATE_5270000 [&lnP=-22223.342370558952] = [&R] (9:[&rate=0.0022495391071022447]145.83769981144425,(33:[&rate=0.001822830549178412]123.52061934726865,((((((((18:[&rate=0.002200862586719697]28.487111500953922,(26:[&rate=0.002184109358705352]18.42681995111028,25:[&rate=0.002273668550497182]18.42681995111028):[&rate=0.0018953348479612542]10.060291549843644):[&rate=0.002209102412674309]6.110333232724685,(2:[&rate=0.0021755602916826147]31.22411794731269,((1:[&rate=0.0022253875616873972]8 [...]
+tree STATE_5280000 [&lnP=-22215.538054720593] = [&R] (9:[&rate=0.0024165874210988706]154.05132200685776,(33:[&rate=0.0020422319471379738]109.23740770134165,(((((((18:[&rate=0.002461725166584542]28.36064693725955,(26:[&rate=0.0033399548887758923]17.0569324117241,25:[&rate=0.0020885587144032227]17.0569324117241):[&rate=0.0030508486331466012]11.303714525535447):[&rate=0.002461725166584542]4.417968211513642,(2:[&rate=0.0024908575355345594]30.540944243990104,((1:[&rate=0.0025052298765142963]7 [...]
+tree STATE_5290000 [&lnP=-22219.3579829598] = [&R] (9:[&rate=0.00275609989358064]139.720735213659,(33:[&rate=0.0025186296620174687]101.99467183978055,((((((((18:[&rate=0.0029011791013198547]25.059346601511088,(26:[&rate=0.0031668172734098683]15.725101420861082,25:[&rate=0.002551603614870123]15.725101420861082):[&rate=0.002551603614870123]9.334245180650006):[&rate=0.002999131620226811]7.2224218302753584,(2:[&rate=0.0029878760314360872]26.96631883641404,((1:[&rate=0.0031668172734098683]6.4 [...]
+tree STATE_5300000 [&lnP=-22212.490700307197] = [&R] (9:[&rate=0.0038274480740587966]107.7488776495065,((33:[&rate=0.003540934599923473]68.72147648042153,14:[&rate=0.0035730418263347505]68.72147648042153):[&rate=0.003427489754277636]7.744270647128047,(((((((24:[&rate=0.003556724670914337]12.00836097982347,(((20:[&rate=0.002943949340516357]2.508262033689405,21:[&rate=0.003168030780012372]2.508262033689405):[&rate=0.003350448029647854]0.6260188301438196,19:[&rate=0.0036259612697883926]3.13 [...]
+tree STATE_5310000 [&lnP=-22212.27345416055] = [&R] (9:[&rate=0.0026206370381189515]135.48985773197188,((14:[&rate=0.0028693765999232156]79.94077914366616,((((((23:[&rate=0.0025513165283472805]8.809174513767664,17:[&rate=0.003374399850870783]8.809174513767664):[&rate=0.002708151077673979]12.666653390653876,((24:[&rate=0.0032106336141034244]15.1492576467952,(((20:[&rate=0.0030302334250175336]2.6885953317512095,21:[&rate=0.0028693765999232156]2.6885953317512095):[&rate=0.002620637038118951 [...]
+tree STATE_5320000 [&lnP=-22211.265437022397] = [&R] (9:[&rate=0.0029734628371548503]122.1439343101691,((33:[&rate=0.0031119117283435853]87.94046131546959,(((((((((21:[&rate=0.0024972716798454166]3.5109827023100406,(20:[&rate=0.003345612922580855]2.3488011082354063,19:[&rate=0.0027379198446692253]2.3488011082354063):[&rate=0.0034997818326869793]1.1621815940746343):[&rate=0.0028072628466823705]5.4770223488381555,15:[&rate=0.0027379198446692253]8.988005051148196):[&rate=0.00323462411969328 [...]
+tree STATE_5330000 [&lnP=-22212.158836782743] = [&R] (9:[&rate=0.0030324374906832477]129.30159436280476,(33:[&rate=0.002622046399339781]109.51187281986917,(((((((23:[&rate=0.002502479686047535]8.907092894618378,17:[&rate=0.003336239863861066]8.907092894618378):[&rate=0.0025203205183652808]11.438521876479074,(22:[&rate=0.003596029693497546]19.57299013220842,(((21:[&rate=0.0023389391431755603]5.679665893487142,(19:[&rate=0.0021895753608074113]3.9881637492116573,20:[&rate=0.0021895753608074 [...]
+tree STATE_5340000 [&lnP=-22221.6337481788] = [&R] (9:[&rate=0.004384230311740255]94.77531968919314,(14:[&rate=0.003154894098540919]79.52957445005424,(((((((23:[&rate=0.003884575890264553]8.017296004241746,17:[&rate=0.003495837814054729]8.017296004241746):[&rate=0.0035916608957245943]11.533464412614041,(22:[&rate=0.003576267809846027]18.6604926970217,(((19:[&rate=0.0036818156188070123]4.034280899970486,(20:[&rate=0.0041122869879578585]2.3596849694143067,21:[&rate=0.003495837814054729]2.3 [...]
+tree STATE_5350000 [&lnP=-22217.322142057328] = [&R] (9:[&rate=0.0017083809879094606]182.43675731817353,((14:[&rate=0.0025734153323504756]110.1152315008258,(((((22:[&rate=0.0021673423430990414]27.52657388276412,(((((21:[&rate=0.001964366377680155]4.608771947322291,20:[&rate=0.0021673423430990414]4.608771947322291):[&rate=0.0020308032120844137]0.5697554917486327,19:[&rate=0.0017406755697811287]5.1785274390709235):[&rate=0.0018148779579136452]8.018497472810466,15:[&rate=0.00223112159537369 [...]
+tree STATE_5360000 [&lnP=-22207.227829789248] = [&R] (9:[&rate=0.0032288844759729113]110.89584985996278,((((((((22:[&rate=0.003204560646200602]21.457220039137386,(((19:[&rate=0.0035120901676209655]4.779853793791555,(20:[&rate=0.002693800462225735]3.2149523921225622,21:[&rate=0.0028330826824916047]3.2149523921225622):[&rate=0.0036532299616729613]1.564901401668993):[&rate=0.0027720427479034256]5.789332528696122,15:[&rate=0.002306885245411876]10.569186322487678):[&rate=0.002990757257026399] [...]
+tree STATE_5370000 [&lnP=-22213.037992680187] = [&R] (9:[&rate=0.003702184514108257]96.98469766912123,((((((((8:[&rate=0.003288592017195402]16.187304897123962,10:[&rate=0.003774614555050541]16.187304897123962):[&rate=0.003542976761063367]10.479576949660665,(((11:[&rate=0.003774614555050541]4.418747240187768,12:[&rate=0.003386049614407502]4.418747240187768):[&rate=0.003288592017195402]14.553744068086052,(7:[&rate=0.003718360232622523]16.433562821503923,13:[&rate=0.003095627231621471]16.43 [...]
+tree STATE_5380000 [&lnP=-22212.36836951469] = [&R] (9:[&rate=0.004215931925459813]100.31797361393572,((((((16:[&rate=0.003782024306128632]30.65053035674008,((((23:[&rate=0.0033311802755274203]8.226951399484257,17:[&rate=0.004124351926649709]8.226951399484257):[&rate=0.004297698561870781]7.587073945188617,(((21:[&rate=0.003598940269965555]2.783645051966385,(19:[&rate=0.003554501281848001]2.472826285818843,20:[&rate=0.0032417890707693046]2.472826285818843):[&rate=0.004098679875627549]0.31 [...]
+tree STATE_5390000 [&lnP=-22219.338099335615] = [&R] (9:[&rate=0.002500774884221704]160.78947402270197,(((((((((23:[&rate=0.0029396851203717838]10.051042517436834,17:[&rate=0.0027792140921071486]10.051042517436834):[&rate=0.002524006400602444]12.537899730670159,((((19:[&rate=0.002500774884221704]4.914211159967982,(21:[&rate=0.003201080979245636]3.2648201681083,20:[&rate=0.0030241949470114883]3.2648201681083):[&rate=0.0027302405597204853]1.6493909918596819):[&rate=0.0026794423069289057]5. [...]
+tree STATE_5400000 [&lnP=-22211.30638033257] = [&R] (9:[&rate=0.0023549112657749705]147.11104296747146,(33:[&rate=0.002093428796234794]103.86138459030488,(14:[&rate=0.0032421992338197256]81.01978287450366,(((((((((21:[&rate=0.0027513849706555667]3.6160721611931343,(19:[&rate=0.003170941233648286]2.6074193069951312,20:[&rate=0.0018077759182261483]2.6074193069951312):[&rate=0.0027962093522957386]1.008652854198003):[&rate=0.0028899157461153993]4.838779768058895,15:[&rate=0.00317094123364828 [...]
+tree STATE_5410000 [&lnP=-22224.116677393606] = [&R] (9:[&rate=0.0030113966135955836]130.99530763376657,(33:[&rate=0.002549252947000949]108.99302404811098,((((((((((19:[&rate=0.001637355086795208]5.971888114631868,(21:[&rate=0.001637355086795208]3.190502613465282,20:[&rate=0.0025717784194435405]3.190502613465282):[&rate=0.0021775290925654454]2.7813855011665862):[&rate=0.002670877512371207]3.3799549250648013,15:[&rate=0.0026985735962430256]9.35184303969667):[&rate=0.0021775290925654454]4. [...]
+tree STATE_5420000 [&lnP=-22212.751170585096] = [&R] (9:[&rate=0.0025067393257332798]151.90794167806865,(14:[&rate=0.002466753891114798]110.13939969334702,((((((((23:[&rate=0.0022890094904586067]11.697688118099391,17:[&rate=0.002548283401437343]11.697688118099391):[&rate=0.0024766632223276507]14.140153963774322,(((19:[&rate=0.0026164801727209227]5.622467285499473,(21:[&rate=0.0021698115306321287]3.119925751927801,20:[&rate=0.002548283401437343]3.119925751927801):[&rate=0.0024966399760736 [...]
+tree STATE_5430000 [&lnP=-22224.232683658334] = [&R] (9:[&rate=0.0024376930647155497]167.2482010662499,((((((((8:[&rate=0.0022149788668953314]26.51433989773405,10:[&rate=0.0024376930647155497]26.51433989773405):[&rate=0.0020843372711388326]21.252878618492392,((7:[&rate=0.0021724508270469594]34.02310471634459,(13:[&rate=0.001529637728769088]27.8034516536583,(11:[&rate=0.00214599364383981]10.099502431456461,12:[&rate=0.001529637728769088]10.099502431456461):[&rate=0.002279452708605936]17.7 [...]
+tree STATE_5440000 [&lnP=-22209.70586307304] = [&R] (9:[&rate=0.0025346145314440642]125.53396635844743,((((((((8:[&rate=0.0034836151635754603]19.14082965475526,10:[&rate=0.0032273068830528705]19.14082965475526):[&rate=0.003099870702863906]11.190209708804172,((7:[&rate=0.003345830320149662]23.57491744429367,(13:[&rate=0.002487736280978821]18.647568721003136,(11:[&rate=0.0029180759919942784]5.688730333135418,12:[&rate=0.0028988507992658307]5.688730333135418):[&rate=0.0035369933550324925]12 [...]
+tree STATE_5450000 [&lnP=-22223.69282121362] = [&R] (9:[&rate=0.0022162236066278953]185.45896258617265,((14:[&rate=0.002496279222337159]104.82015391403768,((((((8:[&rate=0.0023636178929080445]27.90477994435297,10:[&rate=0.0020190918945105643]27.90477994435297):[&rate=0.0025154974840306482]15.158172271828615,(((11:[&rate=0.0023265058855297697]7.716664210532199,12:[&rate=0.0023060179631459307]7.716664210532199):[&rate=0.0023195641305107043]27.586689214013973,(13:[&rate=0.002175712519674621 [...]
+tree STATE_5460000 [&lnP=-22214.907928509438] = [&R] (9:[&rate=0.0022823754162756356]150.08438680609652,((14:[&rate=0.0023396738606684528]102.23005816845624,(((((((23:[&rate=0.0029828225982113333]11.117100941572554,17:[&rate=0.0027154228981798095]11.117100941572554):[&rate=0.0017964946959288738]13.924670906630032,((((19:[&rate=0.002419295419486359]5.191442784482652,(20:[&rate=0.0020716645996768095]3.9452279124541354,21:[&rate=0.002163228994920416]3.9452279124541354):[&rate=0.001692229893 [...]
+tree STATE_5470000 [&lnP=-22223.52254159187] = [&R] (9:[&rate=0.002319009250658605]154.05358399667546,((((((((23:[&rate=0.002319009250658605]11.150699740958714,17:[&rate=0.0023994451560895675]11.150699740958714):[&rate=0.0022703632455297338]15.201944656624747,(22:[&rate=0.0024432927608076696]25.402074583161166,((((21:[&rate=0.002378676497222527]3.098163070316471,20:[&rate=0.0024668234904076294]3.098163070316471):[&rate=0.002045430779211065]0.6569963296138517,19:[&rate=0.00217837857712571 [...]
+tree STATE_5480000 [&lnP=-22208.859503729676] = [&R] (9:[&rate=0.00335353819933728]101.6026814978329,(33:[&rate=0.003134855238829244]74.09469783378826,((((((((3:[&rate=0.003389717481432928]11.12091205508916,5:[&rate=0.002362259717296842]11.12091205508916):[&rate=0.003250682150367645]16.67362696263983,(8:[&rate=0.002918559954421125]18.311950571717023,10:[&rate=0.003032438447297633]18.311950571717023):[&rate=0.0037888799963955496]9.482588446011967):[&rate=0.0029585491213811304]0.5680491903 [...]
+tree STATE_5490000 [&lnP=-22210.402109760118] = [&R] (9:[&rate=0.0029616112064698883]121.99632507571968,(33:[&rate=0.0024690622861698684]88.45071315207069,(14:[&rate=0.003468540045632961]74.57727326197363,(((((((3:[&rate=0.003060828960244898]14.656270615179237,5:[&rate=0.002566805076261263]14.656270615179237):[&rate=0.0027343974391683627]14.245658041623075,((7:[&rate=0.0031836264095852973]19.46994608316764,13:[&rate=0.0026474562223283747]19.46994608316764):[&rate=0.0022130232010230113]3. [...]
+tree STATE_5500000 [&lnP=-22224.18295658448] = [&R] (9:[&rate=0.002317607920606383]179.44802789267194,(33:[&rate=0.0018081708288761017]139.2407654867596,((((((((23:[&rate=0.002239297416764552]14.395935229004262,17:[&rate=0.002148483448356042]14.395935229004262):[&rate=0.002662589503458768]12.608479019145229,(((15:[&rate=0.002381555354552456]13.660008629976167,(19:[&rate=0.0015297319845636599]8.285184999481903,(21:[&rate=0.002202250250682428]3.1724887276541334,20:[&rate=0.0017873294754294 [...]
+tree STATE_5510000 [&lnP=-22216.291907725023] = [&R] (9:[&rate=0.0031660155425359657]121.79330292437305,(14:[&rate=0.0028322936210257274]92.17385842431787,((((((((3:[&rate=0.0032333947099676937]12.402482025880825,5:[&rate=0.0031762467658523227]12.402482025880825):[&rate=0.00308354631126541]15.72282059886155,((11:[&rate=0.0032752517962471427]5.065674747818076,12:[&rate=0.003058225718210491]5.065674747818076):[&rate=0.003246499031741375]17.8914264666235,(13:[&rate=0.0027692799832663866]20. [...]
+tree STATE_5520000 [&lnP=-22231.6596852859] = [&R] (9:[&rate=0.0033120888787969245]130.69680035837783,(33:[&rate=0.0017070210693064128]128.00287208450825,(14:[&rate=0.0027558424553877324]96.05191022718891,((((16:[&rate=0.0020326041185701094]45.82074625609716,(((3:[&rate=0.0018938495116826671]22.08160743032578,5:[&rate=0.0018282710327667878]22.08160743032578):[&rate=0.0029572262741397615]16.46599735399192,(7:[&rate=0.002379100670730246]33.34486306365151,(13:[&rate=0.0022669655465695194]23 [...]
+tree STATE_5530000 [&lnP=-22214.881097801044] = [&R] (9:[&rate=0.002181847968248494]155.96667766528998,((14:[&rate=0.003145544329280032]81.76179654620222,(((((22:[&rate=0.0029159423073223728]23.878075300390137,((23:[&rate=0.0028071237634701898]11.221950064970192,17:[&rate=0.0026099742550467087]11.221950064970192):[&rate=0.002525874769438579]10.286731019609972,(((21:[&rate=0.0028818881958302626]3.5530504329755983,(20:[&rate=0.0032497420498701214]2.881482794436159,19:[&rate=0.0027800463379 [...]
+tree STATE_5540000 [&lnP=-22207.736368207752] = [&R] (9:[&rate=0.0022577885685679975]138.41370029313046,(33:[&rate=0.0022577885685679975]109.32119405583609,(14:[&rate=0.0026019327259730737]90.6947117203229,(((((((23:[&rate=0.0029222600134494268]10.022160414831333,17:[&rate=0.0029550444090687697]10.022160414831333):[&rate=0.0025048376434377915]10.41855437251471,((((21:[&rate=0.00257113635072283]1.9848085264375916,20:[&rate=0.0032463324310658083]1.9848085264375916):[&rate=0.002571136350722 [...]
+tree STATE_5550000 [&lnP=-22218.77545035855] = [&R] (9:[&rate=0.0021289919139487593]162.68110408966035,(((((((((15:[&rate=0.0021994366551748384]10.56120976802592,((21:[&rate=0.0021289919139487593]2.5287441067206977,20:[&rate=0.0029427260694422537]2.5287441067206977):[&rate=0.003406673813361188]1.980630540492034,19:[&rate=0.0020009591519327724]4.509374647212732):[&rate=0.002779840430104384]6.051835120813188):[&rate=0.00299569062116731]2.8485047437437547,24:[&rate=0.002462595606827378]13.4 [...]
+tree STATE_5560000 [&lnP=-22218.437246080717] = [&R] (9:[&rate=0.0027667031794226644]135.95949418549824,(33:[&rate=0.00261156338007728]97.8074966250012,(((((16:[&rate=0.002404110569474225]39.32661711566674,(((22:[&rate=0.0028307342908626175]23.979186878862237,(24:[&rate=0.0028172120882769464]12.840120014983485,(((21:[&rate=0.0028307342908626175]3.1817657148166845,20:[&rate=0.0025112419139486277]3.1817657148166845):[&rate=0.002589996521175023]1.0645990777988352,19:[&rate=0.002217019118607 [...]
+tree STATE_5570000 [&lnP=-22218.199086303095] = [&R] (9:[&rate=0.0028992000193803266]113.09105973317659,(33:[&rate=0.0033386926317742183]75.76292065178363,((((((((3:[&rate=0.003809086200534733]10.085035639651197,5:[&rate=0.002371956107474593]10.085035639651197):[&rate=0.0027927514051082677]15.749891184943992,((11:[&rate=0.0031900348820281003]5.000054994302498,12:[&rate=0.0031900348820281003]5.000054994302498):[&rate=0.002847759105248136]15.895904481167651,(7:[&rate=0.003745495314565536]1 [...]
+tree STATE_5580000 [&lnP=-22231.380904776463] = [&R] (9:[&rate=0.0027248977602305057]137.744223086416,(14:[&rate=0.0022029975170347895]106.77342613879034,(((((((8:[&rate=0.002648316209730779]24.857725253783414,10:[&rate=0.002290359718962203]24.857725253783414):[&rate=0.002326798771099991]16.283941178281857,((3:[&rate=0.0025473003153423735]15.22355687751347,5:[&rate=0.0026647341861283664]15.22355687751347):[&rate=0.002436410330015787]23.49889907944294,((11:[&rate=0.0027248977602305057]7.8 [...]
+tree STATE_5590000 [&lnP=-22217.08322034969] = [&R] (9:[&rate=0.00251662886402082]139.5076348479047,((14:[&rate=0.0027070716210236337]84.63881768466062,(((((((11:[&rate=0.002351636153366753]7.749036264697337,12:[&rate=0.0023068773299428228]7.749036264697337):[&rate=0.002784105217503276]17.40113041671984,(7:[&rate=0.0034257914326222797]16.266079600596843,13:[&rate=0.00255270859518417]16.266079600596843):[&rate=0.00191115034672328]8.884087080820336):[&rate=0.0025648060638842137]10.10698911 [...]
+tree STATE_5600000 [&lnP=-22213.66817104392] = [&R] (9:[&rate=0.002865899180363957]134.10662623302096,((14:[&rate=0.003034923717729736]83.3532081189853,(((((((3:[&rate=0.002649400917644812]14.383415095975847,5:[&rate=0.0024180647149544964]14.383415095975847):[&rate=0.00223250017694569]20.3991536691681,((11:[&rate=0.002621147740337993]7.051116918702267,12:[&rate=0.0018596890035386879]7.051116918702267):[&rate=0.002594440314883155]21.965276775743103,(7:[&rate=0.002748745852182277]26.255015 [...]
+tree STATE_5610000 [&lnP=-22204.75966020581] = [&R] (9:[&rate=0.002686566464154741]117.7510338311477,((14:[&rate=0.0032104983811862536]69.45732163690435,((((16:[&rate=0.003008448510498302]32.032304571700045,(((3:[&rate=0.0036895796580127265]10.727425154750991,5:[&rate=0.002532776214955362]10.727425154750991):[&rate=0.0033658996961351386]18.139151321265388,((11:[&rate=0.0031075939106936343]6.024965594492382,12:[&rate=0.0031243259833687097]6.024965594492382):[&rate=0.002488908352869732]17. [...]
+tree STATE_5620000 [&lnP=-22210.3333927942] = [&R] (9:[&rate=0.0031499430901304414]124.62509339625339,((14:[&rate=0.0023674088445587082]86.47481460222987,(((((((3:[&rate=0.0024650477215576805]13.576204954112876,5:[&rate=0.002774436471639522]13.576204954112876):[&rate=0.0029304829445753124]15.540101648044027,((11:[&rate=0.003501227689389126]4.90911965074083,12:[&rate=0.002604776133445206]4.90911965074083):[&rate=0.002945845646521763]19.14721130579071,(7:[&rate=0.003293464629935907]18.9659 [...]
+tree STATE_5630000 [&lnP=-22224.16205664683] = [&R] (9:[&rate=0.0024531869389813887]133.89783749045912,(33:[&rate=0.0024271063787995864]107.6437813036174,(((((((8:[&rate=0.002446248401760654]23.614642680220886,10:[&rate=0.002442688465231209]23.614642680220886):[&rate=0.0023984996012811014]15.972368742771035,((3:[&rate=0.0024531869389813887]15.593877369783513,5:[&rate=0.002458323573431393]15.593877369783513):[&rate=0.0024818845458567476]20.47325399725041,(7:[&rate=0.0024669711972571022]30 [...]
+tree STATE_5640000 [&lnP=-22209.026359758846] = [&R] (9:[&rate=0.0021248740449276303]132.89141278484297,((14:[&rate=0.002544047218814271]87.78965933742361,((((((8:[&rate=0.0026385636420723775]23.175476350149285,10:[&rate=0.002525973477618986]23.175476350149285):[&rate=0.0024903879470749434]16.47759596604917,((3:[&rate=0.0024903879470749434]15.73355696358384,5:[&rate=0.0021026847569479644]15.73355696358384):[&rate=0.0025809153046696125]17.17621876935033,((11:[&rate=0.002508097068918765]6. [...]
+tree STATE_5650000 [&lnP=-22223.06229420842] = [&R] (9:[&rate=0.002228390430292725]152.82325192317143,((14:[&rate=0.0024661293771630465]98.90853637946498,(((((((3:[&rate=0.002033357685349022]20.125175733335777,5:[&rate=0.001691278448802729]20.125175733335777):[&rate=0.0026087112569281312]17.72101958074573,((11:[&rate=0.0024661293771630465]5.836956142364736,12:[&rate=0.0019357882195395503]5.836956142364736):[&rate=0.002128010820497559]21.174944125973546,(7:[&rate=0.002695760721231675]24.3 [...]
+tree STATE_5660000 [&lnP=-22235.532630855338] = [&R] (9:[&rate=0.0019072250408053988]186.41096847710688,((33:[&rate=0.0019907398275541568]131.8245844164453,((((((3:[&rate=0.002116651253519937]17.735867814598315,5:[&rate=0.0019282293127613945]17.735867814598315):[&rate=0.0020742381304141317]27.691718852706444,(7:[&rate=0.002138140383934821]38.92714128748321,((11:[&rate=0.0024270084843294702]7.643051722596313,12:[&rate=0.0021818601526497157]7.643051722596313):[&rate=0.0015321441125373478]2 [...]
+tree STATE_5670000 [&lnP=-22218.02694054907] = [&R] (9:[&rate=0.002727836762338125]139.55857574444653,((33:[&rate=0.002671467351652202]94.59683874773745,((((((((((20:[&rate=0.0033365643816859357]3.0638589400210883,21:[&rate=0.0029856465846940785]3.0638589400210883):[&rate=0.002800723375366241]0.8191640148532429,19:[&rate=0.0024762157239987923]3.883022954874331):[&rate=0.0025419668297241953]8.19393901815033,15:[&rate=0.002841575808821037]12.07696197302466):[&rate=0.002841575808821037]2.14 [...]
+tree STATE_5680000 [&lnP=-22219.37893566537] = [&R] (9:[&rate=0.0024026021932252385]140.09283477752513,((14:[&rate=0.0025569233808994825]88.33050973888007,(((((((23:[&rate=0.0022754055406648682]10.60238141623423,17:[&rate=0.002606906923478691]10.60238141623423):[&rate=0.002255821394802907]11.662296661832599,(15:[&rate=0.0023885122145696877]12.822673922805066,(24:[&rate=0.002658349487693673]12.231778646344779,((20:[&rate=0.002234768449400961]3.6027158212328323,19:[&rate=0.0026850577428666 [...]
+tree STATE_5690000 [&lnP=-22210.84020661516] = [&R] (9:[&rate=0.00318501615424269]109.25253418047794,((14:[&rate=0.00303595958377175]81.4940752011351,((((((22:[&rate=0.002822465197271966]18.011258750628123,(24:[&rate=0.003313165028810552]14.03889850558464,(((21:[&rate=0.003225504314956073]2.4621658179148307,20:[&rate=0.0032902370020323284]2.4621658179148307):[&rate=0.0025618112353551874]2.046485163062642,19:[&rate=0.002588849347116605]4.508650980977473):[&rate=0.00407766223393599]4.86951 [...]
+tree STATE_5700000 [&lnP=-22213.327569652498] = [&R] (9:[&rate=0.003128896730757228]121.03789815907125,((14:[&rate=0.004384382388384556]68.71576776859362,((((((23:[&rate=0.0023658690543361624]11.173595890925544,17:[&rate=0.0029611599967377267]11.173595890925544):[&rate=0.0023658690543361624]10.64211750628213,((24:[&rate=0.005697129955690052]10.726493082485122,(15:[&rate=0.0032008183656069247]7.537631931822191,((20:[&rate=0.0021392307633923036]1.3614576176732622,21:[&rate=0.00204663287670 [...]
+tree STATE_5710000 [&lnP=-22214.964479957485] = [&R] ((((((((((23:[&rate=0.0030843838148290528]9.660767611896686,17:[&rate=0.0024533436423048323]9.660767611896686):[&rate=0.002875266055107474]10.217829334750432,((24:[&rate=0.0031974037032816505]11.738342546925633,(15:[&rate=0.004046223514316612]8.517638423012176,((20:[&rate=0.003314100844893098]1.4071568032253883,21:[&rate=0.0029705130337038263]1.4071568032253883):[&rate=0.002685162420682822]0.3854031596750518,19:[&rate=0.003792014547557 [...]
+tree STATE_5720000 [&lnP=-22214.944840401997] = [&R] (((14:[&rate=0.0030450350314665826]72.0488287886847,((((((23:[&rate=0.0031966278817912256]7.303335775097941,17:[&rate=0.0030343723966026375]7.303335775097941):[&rate=0.002986913468222192]13.560447021744668,((24:[&rate=0.0031030097651130323]12.948011633985493,(15:[&rate=0.003023828874718939]9.714058925269278,((20:[&rate=0.003209910953806844]3.1066405250964633,21:[&rate=0.0029289494921388775]3.1066405250964633):[&rate=0.00313075477541006 [...]
+tree STATE_5730000 [&lnP=-22217.68755779652] = [&R] ((33:[&rate=0.002684193403904205]103.36680022705248,(((((((23:[&rate=0.002253392583706152]12.067680429923648,17:[&rate=0.002350125272083425]12.067680429923648):[&rate=0.0027594439991639177]10.071323744969089,((24:[&rate=0.0026698005926483635]12.378442726815292,(15:[&rate=0.0023142748195537875]9.50687231898717,((20:[&rate=0.0034308539603785117]2.1725385353774183,21:[&rate=0.0024149583024400912]2.1725385353774183):[&rate=0.002759443999163 [...]
+tree STATE_5740000 [&lnP=-22209.794301802714] = [&R] (((((((((23:[&rate=0.0026897787075622105]10.689280771693378,17:[&rate=0.002420365409305722]10.689280771693378):[&rate=0.002288514814230409]13.16096156961157,(22:[&rate=0.002547015925124523]23.295339344517778,(24:[&rate=0.0023695348002415067]15.96344965929059,(15:[&rate=0.002408352976088213]11.50721572502205,((20:[&rate=0.0023830151893974795]2.9017815340594852,19:[&rate=0.002869486687523486]2.9017815340594852):[&rate=0.00244336936320911 [...]
+tree STATE_5750000 [&lnP=-22209.41388445451] = [&R] ((33:[&rate=0.0024346869782605474]104.86898380604133,(((((((8:[&rate=0.0024346869782605474]27.467612360926125,10:[&rate=0.0022190336788202574]27.467612360926125):[&rate=0.0026666444465022485]15.997554957999306,(((11:[&rate=0.0021954311443447517]9.190679176256737,12:[&rate=0.0015912477124099553]9.190679176256737):[&rate=0.0024881551554322917]23.599080652388487,(7:[&rate=0.0027534661685881716]24.91435818622796,13:[&rate=0.0016874368807760 [...]
+tree STATE_5760000 [&lnP=-22217.23410064574] = [&R] ((33:[&rate=0.002395640140248982]100.98062199894008,(14:[&rate=0.0027843853862232057]86.18120027883974,((((((8:[&rate=0.0029627680871864384]17.299627785183542,10:[&rate=0.0032496156202955462]17.299627785183542):[&rate=0.003191035922908935]15.88896840157604,(((11:[&rate=0.002887713514732303]5.958236459902343,12:[&rate=0.0028070015620524314]5.958236459902343):[&rate=0.0026282134715960377]19.22676263599532,(7:[&rate=0.003218543406071848]19 [...]
+tree STATE_5770000 [&lnP=-22234.786580732496] = [&R] ((14:[&rate=0.002184826641653344]128.52281328423723,(33:[&rate=0.0020171595896412565]115.87014949861528,((((((22:[&rate=0.002221897065787198]26.533441074769698,((23:[&rate=0.002369904352220297]11.548926863354051,17:[&rate=0.002497764493796651]11.548926863354051):[&rate=0.0020052272769916896]13.19710216668338,(24:[&rate=0.0024417984441147973]14.87882403944442,((19:[&rate=0.0020171595896412565]5.110561563067233,(21:[&rate=0.0027210585083 [...]
+tree STATE_5780000 [&lnP=-22215.057980600322] = [&R] ((33:[&rate=0.002550705197091982]89.07190105274852,(14:[&rate=0.003557344942123077]73.47804718747942,(((((8:[&rate=0.002957834676472316]18.398179089462374,10:[&rate=0.0029405590027119875]18.398179089462374):[&rate=0.0026160058277749317]14.691280431042536,(16:[&rate=0.0030124069052769762]32.99033742574143,(((11:[&rate=0.002843404785549969]6.43641625070795,12:[&rate=0.002737565620331118]6.43641625070795):[&rate=0.0024586669935192933]19.1 [...]
+tree STATE_5790000 [&lnP=-22217.279079610584] = [&R] ((((((((8:[&rate=0.0028976865087444676]21.597485770412092,10:[&rate=0.002946604967825078]21.597485770412092):[&rate=0.003304368580968396]11.744803082467776,((3:[&rate=0.0026885419560093386]14.53514126376529,5:[&rate=0.0029643369417749746]14.53514126376529):[&rate=0.003046039768517339]14.275166057416506,((11:[&rate=0.003096845212063201]6.901447993453622,12:[&rate=0.002929651145728386]6.901447993453622):[&rate=0.0030236014817104943]18.36 [...]
+tree STATE_5800000 [&lnP=-22215.11930780154] = [&R] ((14:[&rate=0.0022600263421294993]104.66215763226073,(33:[&rate=0.0026852838613545997]92.52165719004624,((((((8:[&rate=0.002792577244776797]22.66408796573961,10:[&rate=0.0028977379977688502]22.66408796573961):[&rate=0.0026041332398855374]14.102933137294045,((3:[&rate=0.002921545161198801]14.897520748089038,5:[&rate=0.001965661271490832]14.897520748089038):[&rate=0.002636004249155291]18.450351476616085,((11:[&rate=0.0020551684498718048]9 [...]
+tree STATE_5810000 [&lnP=-22217.165299535325] = [&R] (((14:[&rate=0.0024051149377217014]103.62830840234238,(((((((3:[&rate=0.0031845207637966715]12.082808359372155,5:[&rate=0.0021945045766745904]12.082808359372155):[&rate=0.0021128896763374806]26.10614242102525,((11:[&rate=0.002766775029652277]7.448721528687838,12:[&rate=0.0026931155013081827]7.448721528687838):[&rate=0.002554901496328621]20.8037160005626,(7:[&rate=0.002766775029652277]23.325296402038685,13:[&rate=0.0021128896763374806]2 [...]
+tree STATE_5820000 [&lnP=-22211.8582389998] = [&R] (((33:[&rate=0.0032159972209911563]74.81351742944992,((((((18:[&rate=0.002913564719505657]24.186294464103987,(26:[&rate=0.003898013948894328]12.68891229859857,25:[&rate=0.002934662753505041]12.68891229859857):[&rate=0.002789353933793802]11.497382165505417):[&rate=0.002934662753505041]3.2606241784413257,(2:[&rate=0.003400056749257101]21.95831637410772,((1:[&rate=0.002528640290557303]8.530239956049886,6:[&rate=0.0024787461119748907]8.53023 [...]
+tree STATE_5830000 [&lnP=-22210.777902367056] = [&R] (((((((16:[&rate=0.002243116230550336]41.490450450716246,(((3:[&rate=0.002525480840070585]15.462401721497876,5:[&rate=0.002456861959600111]15.462401721497876):[&rate=0.002714165612200649]20.4411362132415,((11:[&rate=0.0027447810315119536]6.941766912263059,12:[&rate=0.002224992336145117]6.941766912263059):[&rate=0.002373299933103631]19.061022010568166,(7:[&rate=0.0027780112241390254]22.46393449532541,13:[&rate=0.0021842604993548035]22.4 [...]
+tree STATE_5840000 [&lnP=-22225.328406465982] = [&R] ((33:[&rate=0.0025410933279594442]112.31617819650529,(14:[&rate=0.002629869585667858]99.54239889484623,(((((((3:[&rate=0.002871163547682031]14.309336485378036,5:[&rate=0.0025223074828661833]14.309336485378036):[&rate=0.0028060848531012072]20.759417430666485,((11:[&rate=0.0025928629219656666]6.207285015171891,12:[&rate=0.002528850066170196]6.207285015171891):[&rate=0.002648421907775399]20.549066440001155,(7:[&rate=0.0026437409254317875] [...]
+tree STATE_5850000 [&lnP=-22219.531732543477] = [&R] ((33:[&rate=0.0023510252447046694]109.03633241534949,(((((((8:[&rate=0.0032729481210743653]20.64567800474747,10:[&rate=0.0025863073798680367]20.64567800474747):[&rate=0.0028259642520674392]18.306407912065733,((3:[&rate=0.002870199888791099]13.449911136566024,5:[&rate=0.0026558294535411206]13.449911136566024):[&rate=0.002482584954366655]21.496449387402244,(7:[&rate=0.0025209589347686833]28.500054889259133,((11:[&rate=0.00295016505247654 [...]
+tree STATE_5860000 [&lnP=-22208.503871628] = [&R] ((33:[&rate=0.0026714271923620157]101.33290418055446,(14:[&rate=0.0025151105978317182]86.24830092380626,((((16:[&rate=0.002729943377804585]34.96881676590285,((8:[&rate=0.003256356476061101]19.48380673426958,10:[&rate=0.0027441052633722764]19.48380673426958):[&rate=0.0031403734651953602]14.158980788523923,((3:[&rate=0.00259095806304586]15.301081242605507,5:[&rate=0.002701125451620039]15.301081242605507):[&rate=0.002554842120834213]15.45209 [...]
+tree STATE_5870000 [&lnP=-22214.878113621828] = [&R] ((33:[&rate=0.0019307644538500014]115.5662749790916,(14:[&rate=0.0031633760336533066]82.71967194035108,(((((((3:[&rate=0.002092606035552298]16.921133101813894,5:[&rate=0.0018254173196075058]16.921133101813894):[&rate=0.0028546377848926493]15.214643770374465,((11:[&rate=0.0025479086379269055]7.264247361637869,12:[&rate=0.0021163753594253027]7.264247361637869):[&rate=0.002640280328110608]19.194425654322032,(7:[&rate=0.002292246156113709] [...]
+tree STATE_5880000 [&lnP=-22206.08553918966] = [&R] ((33:[&rate=0.001983313040044135]126.61860238358433,(14:[&rate=0.0026008196834712513]100.9690884937723,(((((((8:[&rate=0.002827776125198891]21.56526797591315,10:[&rate=0.002868258430681706]21.56526797591315):[&rate=0.002827776125198891]12.3210443585224,(3:[&rate=0.003067432160595948]12.850267399790388,5:[&rate=0.002789454649974374]12.850267399790388):[&rate=0.0022236238493654283]21.03604493464516):[&rate=0.0027003585267477113]2.33640160 [...]
+tree STATE_5890000 [&lnP=-22211.47497246434] = [&R] ((33:[&rate=0.0019205171042654607]121.41287109548662,((((((8:[&rate=0.002578110453806206]24.310909217313814,10:[&rate=0.0025647671923613572]24.310909217313814):[&rate=0.0024782329634052283]14.057557517115413,(16:[&rate=0.0025517312947169546]37.35894235737263,((3:[&rate=0.0029539301820692204]13.054139318068597,5:[&rate=0.0021990942387167443]13.054139318068597):[&rate=0.0023977911687663436]21.54958718708384,((11:[&rate=0.00201165388060598 [...]
+tree STATE_5900000 [&lnP=-22226.763824273963] = [&R] ((((((((((18:[&rate=0.0019164007953519986]31.763088980463127,(26:[&rate=0.002751990944165881]17.170245041687625,25:[&rate=0.0019590112608298556]17.170245041687625):[&rate=0.0019590112608298556]14.592843938775502):[&rate=0.002274663650681567]3.340255436257795,(2:[&rate=0.002532757146331978]31.133645539662908,((1:[&rate=0.002751990944165881]7.692699748000993,6:[&rate=0.0019590112608298556]7.692699748000993):[&rate=0.002015444112347856]19 [...]
+tree STATE_5910000 [&lnP=-22223.79490597233] = [&R] ((33:[&rate=0.0025218590879750647]106.83450719165363,(((((16:[&rate=0.0030327747444989]38.56046094352955,((8:[&rate=0.0032968734351789056]21.810455054511387,10:[&rate=0.0030327747444989]21.810455054511387):[&rate=0.002471188867482878]12.854597237721428,((3:[&rate=0.0029690954112443603]12.91837701600915,5:[&rate=0.0027993765163800556]12.91837701600915):[&rate=0.0027799557600708307]18.62381260839864,((11:[&rate=0.003094959940875016]6.3221 [...]
+tree STATE_5920000 [&lnP=-22224.10395158517] = [&R] ((33:[&rate=0.0021569179800258677]134.12382110967994,(((((((18:[&rate=0.002330834492439535]29.268543909341265,(26:[&rate=0.0024983653475825537]15.722738193965355,25:[&rate=0.0025882311070409996]15.722738193965355):[&rate=0.0020882146821824305]13.54580571537591):[&rate=0.0023182408325386396]9.497017404910533,(2:[&rate=0.0022818061442548276]33.312089690925916,((1:[&rate=0.002293753664057656]9.465824174870749,6:[&rate=0.0019543533894898846 [...]
+tree STATE_5930000 [&lnP=-22198.966707384425] = [&R] ((33:[&rate=0.0024138428415823527]84.36748492152223,(((((16:[&rate=0.003281450226988718]30.63286484667011,(((3:[&rate=0.004163122077552924]10.138505434274126,5:[&rate=0.003908510064020155]10.138505434274126):[&rate=0.0031764866756537045]15.091328943472384,((11:[&rate=0.00347660808525765]6.706336773304774,12:[&rate=0.002975982658593539]6.706336773304774):[&rate=0.003323577017138323]14.545913538749843,(7:[&rate=0.004236333764358161]15.39 [...]
+tree STATE_5940000 [&lnP=-22219.164381225353] = [&R] ((33:[&rate=0.002708046678550176]109.62321706290804,(14:[&rate=0.0031691627034363162]94.09897930919524,((((((3:[&rate=0.002397181543149982]16.609972593664935,5:[&rate=0.0019697184104215124]16.609972593664935):[&rate=0.002825232429883964]16.95055118223859,(8:[&rate=0.002517398175325815]22.78253354713266,10:[&rate=0.0027237251932616027]22.78253354713266):[&rate=0.0028831739645192042]10.777990228770864):[&rate=0.0029993689068689754]2.8206 [...]
+tree STATE_5950000 [&lnP=-22220.227842340344] = [&R] ((33:[&rate=0.0015073694906538857]158.5619879890659,(((((((18:[&rate=0.0026462658031806787]28.56049415648264,(26:[&rate=0.0029349386932997646]16.072117925997492,25:[&rate=0.0026228712459702313]16.072117925997492):[&rate=0.0023406336128080104]12.488376230485148):[&rate=0.0024613536749878775]12.011724505772605,(2:[&rate=0.0025374925099485588]33.022834136299664,((1:[&rate=0.0022914329903023917]10.490449392250001,6:[&rate=0.001822484945051 [...]
+tree STATE_5960000 [&lnP=-22214.45231168983] = [&R] ((33:[&rate=0.0024293974673270063]92.24047172784691,(((((((18:[&rate=0.0027740207691327574]25.60019855265416,(26:[&rate=0.003951453584390086]10.604907006188387,25:[&rate=0.003581535075865104]10.604907006188387):[&rate=0.0019893645904923494]14.995291546465772):[&rate=0.002317286610594174]7.006999591371741,(2:[&rate=0.00259212673931985]25.427883845479155,((1:[&rate=0.0031188788704941974]7.822679105449708,6:[&rate=0.002241133518252565]7.82 [...]
+tree STATE_5970000 [&lnP=-22215.1294128208] = [&R] ((33:[&rate=0.0022192628082545384]108.33405927070667,(((((((18:[&rate=0.002161213029929624]28.733985175005177,(26:[&rate=0.0027025581762028343]15.845801877944243,25:[&rate=0.0027615111748540344]15.845801877944243):[&rate=0.0025454728032411273]12.888183297060934):[&rate=0.0024248613284446117]7.030525548377039,(2:[&rate=0.002508600817673303]29.94679036413621,((1:[&rate=0.0025454728032411273]8.413515210005494,6:[&rate=0.002034246774202588]8 [...]
+tree STATE_5980000 [&lnP=-22210.699306359646] = [&R] ((33:[&rate=0.0019367242760861022]135.18908004371042,(((((((18:[&rate=0.0029513340165125616]26.220788884063133,(26:[&rate=0.002508065230544569]17.126971666897497,25:[&rate=0.002795365160917507]17.126971666897497):[&rate=0.0038282062783948807]9.093817217165636):[&rate=0.003186471819828572]4.20456645301666,(2:[&rate=0.003265799045343313]25.265964526686307,((1:[&rate=0.002658127822955298]8.678270527261507,6:[&rate=0.002305286450003654]8.6 [...]
+tree STATE_5990000 [&lnP=-22218.37602885908] = [&R] ((33:[&rate=0.002282229589618701]113.56808168331912,(((((((18:[&rate=0.002354669289089273]28.865136233292684,(26:[&rate=0.0027533247367316034]16.1089135240376,25:[&rate=0.0025744731954907177]16.1089135240376):[&rate=0.002512322307312631]12.756222709255084):[&rate=0.0029740145193183853]4.0630222310182695,(2:[&rate=0.002852587857595225]28.26673606980033,((1:[&rate=0.0026495486288448016]7.943415010872829,6:[&rate=0.002213513977548363]7.943 [...]
+tree STATE_6000000 [&lnP=-22220.712145176127] = [&R] (((((((((18:[&rate=0.0022948272932840786]27.05552253793358,(26:[&rate=0.002855607998130833]15.623113463816846,25:[&rate=0.0024760287162550665]15.623113463816846):[&rate=0.002271972346495329]11.432409074116734):[&rate=0.0020298485887756492]6.9026604870230805,(2:[&rate=0.0026621458487556047]29.144789552426893,((1:[&rate=0.0026621458487556047]6.203559337777462,6:[&rate=0.0027114768488559777]6.203559337777462):[&rate=0.0023176605823335265] [...]
+tree STATE_6010000 [&lnP=-22214.500457678896] = [&R] ((14:[&rate=0.0024199651458944767]101.57671634121064,(((((((18:[&rate=0.0030622836030733402]24.17503296158913,(26:[&rate=0.0032587777704435523]12.581523640424757,25:[&rate=0.002480564696081669]12.581523640424757):[&rate=0.002402870033540414]11.593509321164374):[&rate=0.0032023968137686164]6.463225539732683,(2:[&rate=0.0032587777704435523]25.600695855745634,((1:[&rate=0.0031217467430165076]6.750740658580644,6:[&rate=0.002261582112904501 [...]
+tree STATE_6020000 [&lnP=-22218.368411929638] = [&R] (((((((22:[&rate=0.003051569028550509]21.197039507196674,((24:[&rate=0.003029740142486515]11.07747149022373,(15:[&rate=0.0023757386516124247]8.60497483639992,((21:[&rate=0.0037560993103855915]2.9276855236007213,20:[&rate=0.003231751775421886]2.9276855236007213):[&rate=0.003117625164286714]0.5846337485658815,19:[&rate=0.0029861631924319146]3.512319272166603):[&rate=0.003162483990547421]5.0926555642333176):[&rate=0.0029424620721967432]2. [...]
+tree STATE_6030000 [&lnP=-22202.937034000493] = [&R] (((14:[&rate=0.0040310928548953065]58.830280384568596,(((((((24:[&rate=0.004235689858046763]8.517997004115127,(15:[&rate=0.004535902079633314]6.2881672097906165,(19:[&rate=0.0036690941549593716]2.064624547231299,(21:[&rate=0.004331246199499137]1.8283357920087056,20:[&rate=0.003823388532775883]1.8283357920087056):[&rate=0.004193784495014816]0.23628875522259363):[&rate=0.004135788390305043]4.223542662559318):[&rate=0.004154575389363739]2 [...]
+tree STATE_6040000 [&lnP=-22232.75640130492] = [&R] ((33:[&rate=0.0024766281720643014]125.6476961202,(14:[&rate=0.0027031075352930644]97.94023686509054,(((((((24:[&rate=0.002581518513933403]13.841114776881666,(15:[&rate=0.002513669081875034]9.983063717850497,(19:[&rate=0.002520933778011145]2.769119562817569,(21:[&rate=0.0024875988908225773]1.990593445030662,20:[&rate=0.002562095796603976]1.990593445030662):[&rate=0.002447390422372261]0.778526117786907):[&rate=0.0026926600404898133]7.2139 [...]
+tree STATE_6050000 [&lnP=-22220.188854924716] = [&R] ((14:[&rate=0.0023916016378371483]90.70810934641943,(33:[&rate=0.0029202485270754877]81.85819476751884,(((((((11:[&rate=0.003384860209571966]5.204876137918633,12:[&rate=0.003146415454361108]5.204876137918633):[&rate=0.002566097528248473]19.3868259202672,(7:[&rate=0.003168352852745817]18.912974819029476,13:[&rate=0.0026629650983371043]18.912974819029476):[&rate=0.0028692059809796878]5.6787272391563555):[&rate=0.002606395810652195]5.1582 [...]
+tree STATE_6060000 [&lnP=-22215.03683188466] = [&R] ((33:[&rate=0.002925704034101118]104.29778729937621,(14:[&rate=0.002908423019810548]87.16437498611592,((((((22:[&rate=0.0026992556128225043]23.49616735511042,(24:[&rate=0.002837531378134753]13.81974326473563,(15:[&rate=0.0028734039353556983]10.14315094221078,((19:[&rate=0.0026022061281128373]4.442025325192809,20:[&rate=0.002977012004650165]4.442025325192809):[&rate=0.0026285379980224093]0.044220763286492826,21:[&rate=0.00283753137813475 [...]
+tree STATE_6070000 [&lnP=-22226.8307318717] = [&R] (9:[&rate=0.0022787056164694955]152.74793807664943,(14:[&rate=0.002328221487686696]110.9236767371143,(((((((22:[&rate=0.002415729417695587]27.756336349974763,(24:[&rate=0.0019797365932929657]15.05050354304274,(15:[&rate=0.0022497128830190997]9.213524318877024,(21:[&rate=0.0020835309007907633]3.1988408894389218,(20:[&rate=0.002958219971472942]2.5385402731626634,19:[&rate=0.00201440372351769]2.5385402731626634):[&rate=0.0025880304575463826 [...]
+tree STATE_6080000 [&lnP=-22216.90027061096] = [&R] (9:[&rate=0.0022849671706664247]172.7839360638917,((((((((22:[&rate=0.0024982607325769435]25.941338197920754,(24:[&rate=0.003153531601597807]14.363709990096497,(15:[&rate=0.0015712427607162036]12.165076041615928,(21:[&rate=0.002058848718110275]2.9758863482190083,(19:[&rate=0.002631049370410896]2.4368354325715282,20:[&rate=0.0019699154792249527]2.4368354325715282):[&rate=0.0026522635312309193]0.5390509156474801):[&rate=0.0026742656217079 [...]
+tree STATE_6090000 [&lnP=-22224.330566061388] = [&R] (9:[&rate=0.002193598628069742]143.72135520899423,(((((((((24:[&rate=0.0029930505429929233]15.158885962108645,(15:[&rate=0.0027398486133913403]12.8570448110557,(19:[&rate=0.002682410042589084]4.5377559856591025,(21:[&rate=0.003081142558893267]4.121418660280648,20:[&rate=0.0031109622131054787]4.121418660280648):[&rate=0.002759286543379622]0.41633732537845436):[&rate=0.0025101024560176964]8.319288825396598):[&rate=0.002720636353786783]2. [...]
+tree STATE_6100000 [&lnP=-22211.424586972873] = [&R] (9:[&rate=0.003220459487611983]111.27433677382705,(14:[&rate=0.003013254694306751]88.35718281314351,(33:[&rate=0.003573207396932996]76.74334507160172,(((16:[&rate=0.0029709025361096854]36.23478695603487,(((8:[&rate=0.0030565467501714925]21.237441761102573,10:[&rate=0.0029085273373263667]21.237441761102573):[&rate=0.0026556362614795453]12.848443003567176,(((11:[&rate=0.003951399870088011]4.568652873103929,12:[&rate=0.002887912138305984] [...]
+tree STATE_6110000 [&lnP=-22219.54601173748] = [&R] (9:[&rate=0.0025996894645277345]127.26079064648347,(33:[&rate=0.002587065700013735]98.8394917468782,(((((((24:[&rate=0.002894496611172763]14.862737179912479,(15:[&rate=0.0024769240045233966]11.244344799194147,(19:[&rate=0.0030165730381876616]3.6587744935070163,(21:[&rate=0.0034104955216616127]2.9073901947696292,20:[&rate=0.0029099053271698966]2.9073901947696292):[&rate=0.002894496611172763]0.7513842987373871):[&rate=0.002758901675297008 [...]
+tree STATE_6120000 [&lnP=-22208.11891681101] = [&R] (9:[&rate=0.0029205017503878923]138.34058191674154,(33:[&rate=0.0021932351927925467]103.17855276001359,(14:[&rate=0.002585950219345778]89.79005152126096,((((((22:[&rate=0.0027871981939558053]22.373372120748343,(24:[&rate=0.002713153619954344]10.451140408808419,(15:[&rate=0.0028768832933588426]9.229225219510564,(19:[&rate=0.002453262599942995]5.311215063647527,(21:[&rate=0.002660087800966533]3.887427762678023,20:[&rate=0.0028979353031291 [...]
+tree STATE_6130000 [&lnP=-22213.1370928046] = [&R] (9:[&rate=0.0028376710503828523]114.41087434886087,(14:[&rate=0.002930516324789352]79.83971463879897,(((((16:[&rate=0.002776129985712257]31.35630282576961,((8:[&rate=0.0037854918785322073]18.745243001332504,10:[&rate=0.002863676728286821]18.745243001332504):[&rate=0.003142183756646267]11.401607669095156,(((11:[&rate=0.0032122005304429345]6.031549251514027,12:[&rate=0.0030686568612251055]6.031549251514027):[&rate=0.003142183756646267]16.8 [...]
+tree STATE_6140000 [&lnP=-22216.780898475517] = [&R] (9:[&rate=0.002179039437186622]167.16704846186096,((14:[&rate=0.00229006367360993]98.75976056425064,((((16:[&rate=0.0023528464674510492]41.573669638150506,((8:[&rate=0.002253239228723489]26.156524411447943,10:[&rate=0.0024944934917950925]26.156524411447943):[&rate=0.003017617977319559]13.97859978676468,((7:[&rate=0.0021915942034675864]29.816453724422395,(13:[&rate=0.0018752195451418748]27.767261656682685,(11:[&rate=0.002478659584391166 [...]
+tree STATE_6150000 [&lnP=-22225.694326549503] = [&R] (9:[&rate=0.002780621791314275]144.00219772971923,((33:[&rate=0.0024200340218922564]91.91003832158782,14:[&rate=0.002464239881346265]91.91003832158782):[&rate=0.002806293064349396]20.24179990008696,(((((8:[&rate=0.0027024976056332865]23.411005000002188,10:[&rate=0.002806293064349396]23.411005000002188):[&rate=0.0027515592647737236]15.946328860092745,(16:[&rate=0.0024521705787904262]37.85750974699059,(((7:[&rate=0.0027433520821902517]21 [...]
+tree STATE_6160000 [&lnP=-22220.81177526964] = [&R] (9:[&rate=0.0025893958647691677]158.76950378673217,((14:[&rate=0.0025102730603720554]108.43895141622124,((((((((11:[&rate=0.0024240087753671265]6.551774216035833,12:[&rate=0.0027146822390099338]6.551774216035833):[&rate=0.0025102730603720554]22.815503771202827,(13:[&rate=0.002168361844623177]25.5304030392356,7:[&rate=0.002543647165148802]25.5304030392356):[&rate=0.0025612675284842693]3.836874948003061):[&rate=0.0023587227481515045]8.533 [...]
+tree STATE_6170000 [&lnP=-22210.815065992738] = [&R] (9:[&rate=0.0032683849742048657]102.58301489102897,((14:[&rate=0.003132317018775403]68.2438547524431,((((((22:[&rate=0.0032683849742048657]18.96814878215189,(24:[&rate=0.003574636057086441]13.585971298063521,(15:[&rate=0.003059057206843953]10.002336113814799,((21:[&rate=0.00402416252951719]2.835553246629597,20:[&rate=0.003059057206843953]2.835553246629597):[&rate=0.0031145143911279435]0.769204731231762,19:[&rate=0.003059057206843953]3. [...]
+tree STATE_6180000 [&lnP=-22219.702593090486] = [&R] (9:[&rate=0.0018716870731312904]164.28859546598062,(((((((((23:[&rate=0.001957667689477095]12.987012323161036,17:[&rate=0.0021143281698152096]12.987012323161036):[&rate=0.0018324052744085504]17.97732138302173,(24:[&rate=0.0021488880665803223]20.45997167673881,(15:[&rate=0.002057206466314758]14.036132175257846,((21:[&rate=0.00200086754612176]3.7037081236848164,20:[&rate=0.0022048945490625834]3.7037081236848164):[&rate=0.0018073371614695 [...]
+tree STATE_6190000 [&lnP=-22225.9629983004] = [&R] (9:[&rate=0.00206512695647918]159.7914207778754,(33:[&rate=0.00206512695647918]107.98657583378699,(14:[&rate=0.0028913477686835017]89.10268881127233,(((((8:[&rate=0.0025580773395900208]24.434357914612693,10:[&rate=0.0022695621006084644]24.434357914612693):[&rate=0.002459072719511582]16.021246170568727,(((23:[&rate=0.0025217562289181323]10.059030566906921,17:[&rate=0.0030066480897017753]10.059030566906921):[&rate=0.002570101354770617]14.7 [...]
+tree STATE_6200000 [&lnP=-22230.48751814539] = [&R] (9:[&rate=0.0022485531481366244]157.90298330059034,(33:[&rate=0.0018429942263309125]115.85426237854954,((((((22:[&rate=0.002217202178567826]27.92435865225193,((24:[&rate=0.0018322015750244839]17.459394507195494,(15:[&rate=0.0021644131455143996]14.576642658626492,(19:[&rate=0.0023584644179058265]4.0705567514796055,(20:[&rate=0.00214129295682242]3.4721470701430603,21:[&rate=0.0017970389610534247]3.4721470701430603):[&rate=0.00249680492174 [...]
+tree STATE_6210000 [&lnP=-22214.207240921638] = [&R] (9:[&rate=0.00227637892432645]152.89896808013765,(33:[&rate=0.0020069130086789316]109.22240338466769,((((((22:[&rate=0.0029187920765768463]24.680487080738782,((24:[&rate=0.0031948058668871816]14.727346366229193,(15:[&rate=0.0022391659370058744]11.969570694464956,(21:[&rate=0.0024488076292143663]3.965309541053056,(20:[&rate=0.0037778164602867274]1.8540868885895119,19:[&rate=0.0029392670838256467]1.8540868885895119):[&rate=0.002741922933 [...]
+tree STATE_6220000 [&lnP=-22217.70387602361] = [&R] (9:[&rate=0.002184492157673323]181.73796376233912,(14:[&rate=0.0015501204066786234]141.5579398074383,((((((((24:[&rate=0.0018789042088501228]19.27453898784807,(15:[&rate=0.0017216658876809774]13.546930930577064,((21:[&rate=0.0022982600753946964]2.9965123651937224,20:[&rate=0.0027986211211205175]2.9965123651937224):[&rate=0.0015261076999435706]1.2547883060985248,19:[&rate=0.0016334205599351152]4.251300671292247):[&rate=0.0017048875923325 [...]
+tree STATE_6230000 [&lnP=-22223.471176009385] = [&R] (9:[&rate=0.0028893886506897665]137.97477295643074,((((((((((11:[&rate=0.002356386520040013]7.5363605460212675,12:[&rate=0.001724640829463004]7.5363605460212675):[&rate=0.0019945555329569675]25.00170175449357,13:[&rate=0.0016761923942570259]32.53806230051484):[&rate=0.0021059692958552536]3.471858219546732,7:[&rate=0.0022055295652307822]36.00992052006157):[&rate=0.0018261108650383696]8.788997203537427,((8:[&rate=0.002356386520040013]27. [...]
+tree STATE_6240000 [&lnP=-22206.72455908636] = [&R] (((14:[&rate=0.0025585711657727913]83.4611568005413,((((((((((19:[&rate=0.003309352038898484]2.330802178173013,20:[&rate=0.0029159420165737584]2.330802178173013):[&rate=0.003633797745381637]1.0810377042935846,21:[&rate=0.00326215319129798]3.4118398824665976):[&rate=0.004322257251644765]5.210329745297686,15:[&rate=0.00340775272563617]8.622169627764285):[&rate=0.002668003069355509]2.5064695901512373,24:[&rate=0.0037007827337501554]11.1286 [...]
+tree STATE_6250000 [&lnP=-22219.968889498483] = [&R] (((14:[&rate=0.00221537193611638]107.46861956912063,(((((8:[&rate=0.002467919751369986]25.719787797462683,10:[&rate=0.0022992510311247182]25.719787797462683):[&rate=0.002360413107009225]21.131311921278968,(((11:[&rate=0.0023730250299030167]7.197610788809269,12:[&rate=0.002547975300979018]7.197610788809269):[&rate=0.0019670293760203823]28.4070006252845,(13:[&rate=0.0017682862838611661]29.297213476867586,7:[&rate=0.0022752971757890837]29 [...]
+tree STATE_6260000 [&lnP=-22213.3335785925] = [&R] (((14:[&rate=0.001983871150710292]124.4670607592443,((((((23:[&rate=0.0021908433783079033]11.408834276461672,17:[&rate=0.002821406780660278]11.408834276461672):[&rate=0.002178851234509864]14.042830685342935,(22:[&rate=0.002555725922506423]24.11589582507951,(((21:[&rate=0.0025021034528507637]3.9323990717439323,(20:[&rate=0.0020176078122624607]2.355505061520176,19:[&rate=0.0029704222229439826]2.355505061520176):[&rate=0.001965450985605407] [...]
+tree STATE_6270000 [&lnP=-22228.596766602957] = [&R] (((((((((7:[&rate=0.0019101050793671147]38.08857930979428,(13:[&rate=0.0017136208760136311]31.00600873643966,(11:[&rate=0.002263890760786722]9.018515763747754,12:[&rate=0.001755727240613137]9.018515763747754):[&rate=0.0019347887243448413]21.987492972691907):[&rate=0.00195744442478479]7.082570573354616):[&rate=0.0023416580656898567]9.576631377490827,((3:[&rate=0.0019101050793671147]21.203548127221858,5:[&rate=0.0019227451441778001]21.20 [...]
+tree STATE_6280000 [&lnP=-22230.739225176367] = [&R] ((((((((((11:[&rate=0.002366037541814452]6.975661080001195,12:[&rate=0.002062158442879315]6.975661080001195):[&rate=0.0021477443796138274]24.680511231629424,(13:[&rate=0.0022798080351478143]21.246829193396845,7:[&rate=0.0027819349947534053]21.246829193396845):[&rate=0.002119548096088624]10.409343118233775):[&rate=0.002529728684108564]4.920956221933217,(3:[&rate=0.002549440458214159]16.306066349169797,5:[&rate=0.0021734193446111593]16.3 [...]
+tree STATE_6290000 [&lnP=-22207.82876239225] = [&R] ((((((((23:[&rate=0.0027436397831453914]9.602938420407705,17:[&rate=0.002726206591340406]9.602938420407705):[&rate=0.0024291450471352768]11.744550978585751,((((19:[&rate=0.0028084914210470546]2.5467148242459956,(20:[&rate=0.0030974170030582614]2.3412484276862084,21:[&rate=0.0031438853427818276]2.3412484276862084):[&rate=0.0029958544440077755]0.20546639655978716):[&rate=0.0030974170030582614]8.99598699067066,15:[&rate=0.00274363978314539 [...]
+tree STATE_6300000 [&lnP=-22229.433942359436] = [&R] ((33:[&rate=0.0022660337240053316]115.47009172854955,(((((16:[&rate=0.002308879027644634]42.53924059506922,((8:[&rate=0.0024168075780202965]23.63334125184516,10:[&rate=0.0024441085369491062]23.63334125184516):[&rate=0.00218285405903274]16.81090693435896,((7:[&rate=0.0022301266377637535]32.45806361746486,(13:[&rate=0.0023424487036429863]28.394592923211064,(11:[&rate=0.0022925499681373486]7.01740859011046,12:[&rate=0.0023361888704566897] [...]
+tree STATE_6310000 [&lnP=-22213.529196980737] = [&R] ((14:[&rate=0.002175699851995552]119.21198015164886,((((((8:[&rate=0.0024365035054897694]22.287906063789418,10:[&rate=0.002600413141099494]22.287906063789418):[&rate=0.002240963188703698]19.487187370901527,(((7:[&rate=0.003037202326524365]23.031598529445223,13:[&rate=0.0020559675918064195]23.031598529445223):[&rate=0.0021566048502515045]4.526375730820465,(11:[&rate=0.002356955129355574]8.071979387365436,12:[&rate=0.0023085391531247742] [...]
+tree STATE_6320000 [&lnP=-22217.368739169913] = [&R] (((((((((23:[&rate=0.002933945756353374]11.952810454522764,17:[&rate=0.0026013662449373993]11.952810454522764):[&rate=0.0032602388273998365]9.140881031832881,(((15:[&rate=0.003174135531763762]6.331496301683273,((20:[&rate=0.0029780746834158905]3.2617831085252367,21:[&rate=0.002628330570475813]3.2617831085252367):[&rate=0.0034344854435780655]0.5819791036463458,19:[&rate=0.0026761333116554716]3.8437622121715824):[&rate=0.0027921821982799 [...]
+tree STATE_6330000 [&lnP=-22211.898920234704] = [&R] (((((((((22:[&rate=0.004109888801409916]17.871285285123157,((15:[&rate=0.0037126293243305965]9.543600996838803,((20:[&rate=0.003345312524158927]2.4579397061017776,19:[&rate=0.002890636295511501]2.4579397061017776):[&rate=0.003257981955702537]1.4796925119170332,21:[&rate=0.0034920500403426823]3.937632218018811):[&rate=0.0032792783980607728]5.605968778819992):[&rate=0.00269842402926186]3.1044543876702413,24:[&rate=0.0037523679966022703]1 [...]
+tree STATE_6340000 [&lnP=-22220.28275874653] = [&R] (((((((8:[&rate=0.0023796080951128183]25.36615596703243,10:[&rate=0.002032441815289593]25.36615596703243):[&rate=0.0023796080951128183]20.45923774103026,(((13:[&rate=0.0018640195749127651]27.782691261611163,7:[&rate=0.0022995782171487756]27.782691261611163):[&rate=0.00207583998100541]4.818201376090542,(11:[&rate=0.002199457807521225]8.956670138734058,12:[&rate=0.00207583998100541]8.956670138734058):[&rate=0.0024109135787430255]23.644222 [...]
+tree STATE_6350000 [&lnP=-22216.235402956176] = [&R] ((33:[&rate=0.0021668387118546936]106.47671682247702,(14:[&rate=0.0027174682461238117]99.32342364373476,((((((23:[&rate=0.002187070273026834]10.721995172566968,17:[&rate=0.0021249443787064704]10.721995172566968):[&rate=0.002413134023761845]13.796410451351088,(22:[&rate=0.0023761956252500516]23.901643083527212,((15:[&rate=0.00260696868840601]10.172660228321545,((19:[&rate=0.0018000676948116577]2.7179235789272,20:[&rate=0.002264709922181 [...]
+tree STATE_6360000 [&lnP=-22206.854472223276] = [&R] ((14:[&rate=0.002393697854561524]93.70272076411243,(((((((22:[&rate=0.0038936164600455346]16.004968730079646,((15:[&rate=0.0037342790930182754]5.816890734430937,((21:[&rate=0.0028836292495976823]1.7354222255544087,20:[&rate=0.003391714458831161]1.7354222255544087):[&rate=0.0028125564928423797]1.4390180730685453,19:[&rate=0.003705139751617229]3.174440298622954):[&rate=0.004046185814818559]2.6424504358079832):[&rate=0.003966183005954181] [...]
+tree STATE_6370000 [&lnP=-22213.962768484533] = [&R] ((33:[&rate=0.0022045232544235342]118.75290220542419,((((16:[&rate=0.002243636834083968]43.29690778234636,((((((15:[&rate=0.002301597633580516]10.01155493040647,(19:[&rate=0.0019889784696359237]3.8802942250555894,(21:[&rate=0.002340151092615346]3.2051485201854644,20:[&rate=0.0021443722091059715]3.2051485201854644):[&rate=0.003038156799315235]0.675145704870125):[&rate=0.0029535658370884754]6.131260705350881):[&rate=0.0023594942390603186 [...]
+tree STATE_6380000 [&lnP=-22214.761560249004] = [&R] (((14:[&rate=0.0029103366568447846]88.7708145146059,(((((((24:[&rate=0.0025170093675155438]13.858520464720936,((21:[&rate=0.0030444901277789658]3.8307323433522207,(19:[&rate=0.002638019665157453]2.019099672858592,20:[&rate=0.0029623391705360935]2.019099672858592):[&rate=0.0025170093675155438]1.8116326704936285):[&rate=0.002558358411846147]5.0740923042283015,15:[&rate=0.0028130484119759146]8.904824647580522):[&rate=0.0026058316813168695 [...]
+tree STATE_6390000 [&lnP=-22208.148117106954] = [&R] ((33:[&rate=0.0023378625377736668]109.88411509874283,((((16:[&rate=0.002737088519058361]36.45964639646225,(((8:[&rate=0.0037318607114590545]19.05055488881249,10:[&rate=0.00358505073999338]19.05055488881249):[&rate=0.003690353888990345]14.732611857517423,((3:[&rate=0.0027831617615992005]14.728429405602578,5:[&rate=0.0023378625377736668]14.728429405602578):[&rate=0.003830408207018787]14.089166259171192,((13:[&rate=0.0025617638356917384]2 [...]
+tree STATE_6400000 [&lnP=-22208.77192588535] = [&R] ((33:[&rate=0.0018786813781152337]102.19869081068553,(((((((8:[&rate=0.0033781357887386733]18.969049129602663,10:[&rate=0.0030673659970173626]18.969049129602663):[&rate=0.0034976528746343423]16.392121039906105,((3:[&rate=0.0033074462440123246]12.501983710953766,5:[&rate=0.003014093326766354]12.501983710953766):[&rate=0.0026925914754920687]18.908948210793632,((13:[&rate=0.002668126918815414]21.668700871775084,(11:[&rate=0.003828805350389 [...]
+tree STATE_6410000 [&lnP=-22228.487536434273] = [&R] (((((((16:[&rate=0.0026456067484696075]39.594095540729526,((8:[&rate=0.003035604578267989]19.928954852478636,10:[&rate=0.0026867717294655523]19.928954852478636):[&rate=0.0025988952091567166]17.578633794655847,((3:[&rate=0.0027972259456613253]15.744389862639606,5:[&rate=0.002524105528472444]15.744389862639606):[&rate=0.0028427157356886064]18.356985406587988,((13:[&rate=0.002541452166873472]24.67442524629098,7:[&rate=0.002797225945661325 [...]
+tree STATE_6420000 [&lnP=-22213.60607107689] = [&R] (((14:[&rate=0.0025332365337402325]86.91251308610927,((((16:[&rate=0.0021975306433814614]44.32110953660355,(((3:[&rate=0.0022114405368428344]14.907011657723368,5:[&rate=0.002464920745572243]14.907011657723368):[&rate=0.0021534406367488667]21.81413625960524,((13:[&rate=0.002225046383720148]23.79396780270573,7:[&rate=0.002519031020350812]23.79396780270573):[&rate=0.0021832674608098803]6.009222645983741,(11:[&rate=0.0028346771613687225]7.2 [...]
+tree STATE_6430000 [&lnP=-22208.71531195983] = [&R] (((14:[&rate=0.0027698538462978895]106.10520304266495,((((((8:[&rate=0.0026610689407252407]24.57665651260562,10:[&rate=0.0027103807676189303]24.57665651260562):[&rate=0.002543067831189624]17.03913693005516,((3:[&rate=0.0023595307670420122]16.902041051695335,5:[&rate=0.002128532603461565]16.902041051695335):[&rate=0.0024337379758826725]19.72167423752527,((13:[&rate=0.002106676216354418]25.469236256979475,7:[&rate=0.0024337379758826725]25 [...]
+tree STATE_6440000 [&lnP=-22225.152154739095] = [&R] (((((((16:[&rate=0.002737945079975086]37.02612961110775,(((13:[&rate=0.0025984146382348563]21.78330703978029,(11:[&rate=0.0026796555846590564]7.006135217383446,12:[&rate=0.0026621950370132492]7.006135217383446):[&rate=0.002576173311981449]14.777171822396845):[&rate=0.0027411725657745774]6.2122281787097755,7:[&rate=0.0026748315836661474]27.995535218490065):[&rate=0.002677255073742415]6.490101049301284,((3:[&rate=0.002684404564123517]15. [...]
+tree STATE_6450000 [&lnP=-22228.638213115446] = [&R] ((14:[&rate=0.0027773120477023534]102.01981477011135,(33:[&rate=0.002782563102622336]86.761937459721,(((((((3:[&rate=0.0025045769636301727]15.27709539487226,5:[&rate=0.0029310511714785745]15.27709539487226):[&rate=0.002834272947535355]18.386011264795414,((13:[&rate=0.0027110256142796737]21.052481437601006,7:[&rate=0.0026784254969730486]21.052481437601006):[&rate=0.002531228868695564]5.4279553229350554,(11:[&rate=0.0028100213291190907]7 [...]
+tree STATE_6460000 [&lnP=-22207.073147594427] = [&R] ((14:[&rate=0.0027325599873382063]83.37945411686155,((((((((8:[&rate=0.0034713316968727997]20.0394186871915,10:[&rate=0.0031861167262620745]20.0394186871915):[&rate=0.0038113225167565163]9.833764122017651,(3:[&rate=0.0027325599873382063]15.280557700698232,5:[&rate=0.0025473152713345955]15.280557700698232):[&rate=0.0036570700119609046]14.59262510851092):[&rate=0.0028163402967861595]3.267820551503572,((13:[&rate=0.0027325599873382063]18. [...]
+tree STATE_6470000 [&lnP=-22222.76167739849] = [&R] (9:[&rate=0.0019056445879253555]159.87755856078243,((14:[&rate=0.0022449316963383545]100.60600705110981,((((((18:[&rate=0.0023736593139964115]30.21398682547947,(26:[&rate=0.002557246388947611]19.716801812622457,25:[&rate=0.0024884510574624636]19.716801812622457):[&rate=0.0022755556156515712]10.497185012857013):[&rate=0.00232318613933866]8.390646582189238,(2:[&rate=0.00232318613933866]30.851470852281068,((1:[&rate=0.0022298738004887177]9 [...]
+tree STATE_6480000 [&lnP=-22223.543771414406] = [&R] (9:[&rate=0.0032541946769922905]115.02469307040543,((33:[&rate=0.002955351808458409]74.42833440206434,14:[&rate=0.003326202929094054]74.42833440206434):[&rate=0.002619996823971017]12.659648012592328,((((((18:[&rate=0.002551535423564537]23.875963600080365,(26:[&rate=0.003063143519030988]12.657732006445753,25:[&rate=0.003090374790816537]12.657732006445753):[&rate=0.0036611276844029567]11.218231593634611):[&rate=0.002695823600278437]5.119 [...]
+tree STATE_6490000 [&lnP=-22215.08726269819] = [&R] (9:[&rate=0.002565492602404266]139.5244719321712,(33:[&rate=0.002238273795669377]109.62165905287719,(((((((((13:[&rate=0.0022798108648927656]23.043419218551964,7:[&rate=0.002700412024021525]23.043419218551964):[&rate=0.002401127631675882]6.669998256250576,(11:[&rate=0.0034906306457213924]5.713474719038409,12:[&rate=0.0019406174160055151]5.713474719038409):[&rate=0.0025442902516870933]23.99994275576413):[&rate=0.0023408010957031793]3.818 [...]
+tree STATE_6500000 [&lnP=-22214.596998284902] = [&R] (9:[&rate=0.002710936392374192]139.5757396413583,(33:[&rate=0.002575432431989365]98.43210119822184,((((((((13:[&rate=0.0022302113941765137]22.979098060900494,7:[&rate=0.00277927426023132]22.979098060900494):[&rate=0.0024251396299551633]5.777909650269844,(11:[&rate=0.00207723124901844]7.283427343055914,12:[&rate=0.0020304165424847383]7.283427343055914):[&rate=0.002710936392374192]21.473580368114426):[&rate=0.002409139480065118]4.0538911 [...]
+tree STATE_6510000 [&lnP=-22203.906094138856] = [&R] (9:[&rate=0.002784291421857566]128.89301874629155,(33:[&rate=0.002147079867994224]105.10461544960131,(((((((18:[&rate=0.0028879539822738174]24.76705341014851,(26:[&rate=0.0028042946032534634]16.236289925967313,25:[&rate=0.002071186163413062]16.236289925967313):[&rate=0.003178871793461693]8.530763484181197):[&rate=0.003178871793461693]5.064427573375784,(2:[&rate=0.003114917236883377]25.454105872895084,((1:[&rate=0.002845304899889642]7.6 [...]
+tree STATE_6520000 [&lnP=-22215.811285664415] = [&R] (9:[&rate=0.00210555229320194]176.77051845274195,(33:[&rate=0.0017980106833917545]134.1420650604868,(14:[&rate=0.0021340907941440157]117.27492178203148,((((((18:[&rate=0.001869731706964855]35.8898467735961,(26:[&rate=0.002254588407747721]22.394396527120595,25:[&rate=0.0019178025252488294]22.394396527120595):[&rate=0.0017567015361451112]13.495450246475503):[&rate=0.002034923054973024]7.382773315465087,(2:[&rate=0.0023045220204777972]34. [...]
+tree STATE_6530000 [&lnP=-22205.227263957015] = [&R] (9:[&rate=0.0015071512018978801]189.84835520496097,(((((16:[&rate=0.001950793566129206]46.351543148022195,(((8:[&rate=0.0025360891431219948]29.448210450231528,10:[&rate=0.0019742332270865793]29.448210450231528):[&rate=0.002322915103163627]14.347045496991065,(((13:[&rate=0.0017319744331321955]28.750070298898287,(11:[&rate=0.0029784755437577645]6.412966095680069,12:[&rate=0.0027073034832415894]6.412966095680069):[&rate=0.0026761509545997 [...]
+tree STATE_6540000 [&lnP=-22223.952849623365] = [&R] (9:[&rate=0.0026321140924282236]154.44140467884907,(33:[&rate=0.0023887289756680885]104.06554201622988,(14:[&rate=0.002458688505033001]91.63948931934233,((((((18:[&rate=0.00255752700537096]30.249327932158327,(26:[&rate=0.0025221935836394725]17.497456059040537,25:[&rate=0.0024331958287014603]17.497456059040537):[&rate=0.0026499959652271535]12.75187187311779):[&rate=0.002437235546507512]5.52525340948517,(2:[&rate=0.002570964202261776]28. [...]
+tree STATE_6550000 [&lnP=-22212.077070855168] = [&R] (9:[&rate=0.003416533519387416]104.38184090026108,((14:[&rate=0.0036123811561781685]68.06557656513942,((((((18:[&rate=0.0037782081204308367]19.691288737603998,(26:[&rate=0.0035599787673940434]15.02531655077505,25:[&rate=0.003038392771597115]15.02531655077505):[&rate=0.00411377198219726]4.665972186828947):[&rate=0.0035432066841114456]5.999295441207984,(2:[&rate=0.003355389073464503]21.322236739887245,((1:[&rate=0.003494442094974159]5.98 [...]
+tree STATE_6560000 [&lnP=-22218.821973853548] = [&R] (9:[&rate=0.0019320177602560604]160.22682338187164,((((((((18:[&rate=0.0025429343234888266]30.48948798159144,(26:[&rate=0.00285958744733882]15.612754491370042,25:[&rate=0.0025832989547870475]15.612754491370042):[&rate=0.0020249827121916653]14.8767334902214):[&rate=0.0022803875806812752]7.2187407945925095,(2:[&rate=0.0020249827121916653]32.1039800309812,((1:[&rate=0.00285958744733882]7.462403674982175,6:[&rate=0.0022399835697188256]7.46 [...]
+tree STATE_6570000 [&lnP=-22220.795212489727] = [&R] (9:[&rate=0.00214252338950448]149.67327347913405,((((((((23:[&rate=0.003406858112679138]10.305826568642384,17:[&rate=0.002232916495864631]10.305826568642384):[&rate=0.0024083588343018505]13.199704243411421,((24:[&rate=0.0023057951465170314]11.23562239962026,(15:[&rate=0.002890341419274357]9.350440478316683,(19:[&rate=0.0031106437717535545]3.2548917851759027,(20:[&rate=0.002601765704871715]2.5217196495142318,21:[&rate=0.0027213983805811 [...]
+tree STATE_6580000 [&lnP=-22211.546087972685] = [&R] (9:[&rate=0.002936630897503412]132.33122365688183,(((((((((24:[&rate=0.0026604247725933337]14.527453444602692,(15:[&rate=0.0027562148422436944]12.467445721702415,(19:[&rate=0.002956158385194047]4.977472456740484,(20:[&rate=0.002783996710042556]2.292023354078479,21:[&rate=0.0027062797077973535]2.292023354078479):[&rate=0.0027062797077973535]2.6854491026620053):[&rate=0.0029271280799928684]7.489973264961931):[&rate=0.0028461062787879608] [...]
+tree STATE_6590000 [&lnP=-22231.588855228147] = [&R] (9:[&rate=0.002206444702253253]174.21352230124157,((((((((22:[&rate=0.002366788886941397]27.80537765884453,(23:[&rate=0.002258241634804534]16.629592835128513,17:[&rate=0.002380219949229707]16.629592835128513):[&rate=0.002360541765230118]11.175784823716018):[&rate=0.002201203257888776]0.8385955408797088,(24:[&rate=0.0021509127467993034]14.744082781362538,(15:[&rate=0.0023273906371211648]10.578378849122474,((19:[&rate=0.00225824163480453 [...]
+tree STATE_6600000 [&lnP=-22226.095006688047] = [&R] (9:[&rate=0.0024879011297125873]141.76669192941472,((14:[&rate=0.0026161403527189286]96.1594809441224,(((((22:[&rate=0.002475981343076142]23.739707866765546,((23:[&rate=0.0025230846501620996]11.396175108099122,17:[&rate=0.002276208275101946]11.396175108099122):[&rate=0.0023438156897381806]11.605659626800993,(24:[&rate=0.002628024327088897]13.4485027446292,(15:[&rate=0.0024517121077188148]11.789926277198157,(19:[&rate=0.0025578338552100 [...]
+tree STATE_6610000 [&lnP=-22225.38579237229] = [&R] (9:[&rate=0.0028848700014640774]128.5528158446393,(14:[&rate=0.0031486292360365063]81.25682407787767,(33:[&rate=0.003298468000281103]73.5974824411668,(((((22:[&rate=0.0031486292360365063]19.68612083041939,((23:[&rate=0.003131529646082427]8.596208417521122,17:[&rate=0.003813632784945955]8.596208417521122):[&rate=0.0031486292360365063]9.320645673300657,(24:[&rate=0.0037862663255266112]11.972859544912891,(15:[&rate=0.0036362848126808584]7. [...]
+tree STATE_6620000 [&lnP=-22223.72199897656] = [&R] (9:[&rate=0.002253488584753265]141.7937275700369,(33:[&rate=0.002620052119621975]93.9971827772839,(14:[&rate=0.0032111129424125143]88.5057095365399,(((((((3:[&rate=0.003651669548884632]14.221825744209639,5:[&rate=0.0022234190873867826]14.221825744209639):[&rate=0.0026809949897129925]15.173614199668876,((13:[&rate=0.0026403762763341702]18.28333584524622,7:[&rate=0.0035137213932266084]18.28333584524622):[&rate=0.0038592143525495405]2.1767 [...]
+tree STATE_6630000 [&lnP=-22208.846268556958] = [&R] (9:[&rate=0.003033688002675892]125.62229967393088,(((((((8:[&rate=0.003105348914882439]20.063203665951328,10:[&rate=0.002880295565022564]20.063203665951328):[&rate=0.004374729235814758]10.533787309951677,((3:[&rate=0.0033402777904474952]11.308384070652194,5:[&rate=0.0033942151509919662]11.308384070652194):[&rate=0.0027320439135469533]15.981637827322883,((13:[&rate=0.003007348510344695]18.719305983021368,7:[&rate=0.0042818139552339655]1 [...]
+tree STATE_6640000 [&lnP=-22212.46977275248] = [&R] (((14:[&rate=0.0030272323816075957]83.63463308630868,((((((23:[&rate=0.002468080613704454]9.999856045028109,17:[&rate=0.002418776468661543]9.999856045028109):[&rate=0.003343196507712845]11.717519650556968,((24:[&rate=0.0027949197367125554]12.73166875105061,(15:[&rate=0.0029958305257867302]10.112750677501015,((20:[&rate=0.002594703223094961]3.358917166637001,21:[&rate=0.0029958305257867302]3.358917166637001):[&rate=0.0033921519489330957] [...]
+tree STATE_6650000 [&lnP=-22231.484006351326] = [&R] ((33:[&rate=0.002491835101880645]109.9560442013778,(((((16:[&rate=0.0022367973140655067]40.96944402064537,(((3:[&rate=0.002412120273580695]15.024600280570848,5:[&rate=0.002272005936748336]15.024600280570848):[&rate=0.002509458062480443]19.65863291435436,((13:[&rate=0.00235125564535654]21.593708516944005,(11:[&rate=0.002429859476270468]5.838995013475698,12:[&rate=0.0025427510003672433]5.838995013475698):[&rate=0.0023317403065180103]15.7 [...]
+tree STATE_6660000 [&lnP=-22210.499520516663] = [&R] (((((((((22:[&rate=0.0027968944880215263]19.115890951624202,(24:[&rate=0.003265870602570633]11.275647683657136,(15:[&rate=0.003265870602570633]8.438017979795855,((21:[&rate=0.003467195588075032]1.9251067241562638,20:[&rate=0.003421146838056496]1.9251067241562638):[&rate=0.0028589069125875273]0.15816898249985112,19:[&rate=0.003727177441904219]2.083275706656115):[&rate=0.003206542414035269]6.35474227313974):[&rate=0.0027968944880215263]2 [...]
+tree STATE_6670000 [&lnP=-22224.609444925405] = [&R] ((33:[&rate=0.0026969551992027576]96.92662538620164,(((((((23:[&rate=0.0027617607496661922]10.559109145624344,17:[&rate=0.002683138040550732]10.559109145624344):[&rate=0.002565659246120491]11.356786238114374,((24:[&rate=0.0025355614175768533]11.006526310724059,(15:[&rate=0.002578287110885866]9.971612974164653,(19:[&rate=0.002683138040550732]4.11971802258462,(20:[&rate=0.0028346418431578527]1.896000300480172,21:[&rate=0.0026199311522738 [...]
+tree STATE_6680000 [&lnP=-22205.509751857153] = [&R] (((((((16:[&rate=0.003317129313687605]30.772451038581654,(((3:[&rate=0.0034486867934490433]11.348227377433139,5:[&rate=0.0029286573506563454]11.348227377433139):[&rate=0.0034284692805106273]15.050208020239966,((13:[&rate=0.0026478506613048843]20.103534140626763,7:[&rate=0.00395125521125781]20.103534140626763):[&rate=0.002717976282685265]2.8196568034464775,(11:[&rate=0.0030525265169153424]6.323147193429482,12:[&rate=0.003052526516915342 [...]
+tree STATE_6690000 [&lnP=-22217.136726822668] = [&R] (((((((((22:[&rate=0.0024892199780545514]23.85321069162467,(24:[&rate=0.0024731284692711135]15.204611562940775,(15:[&rate=0.0026108861303164276]10.645860469932408,((21:[&rate=0.0029998831357840646]2.309175439853359,20:[&rate=0.0030525951316037114]2.309175439853359):[&rate=0.002319743490804731]0.525883388813567,19:[&rate=0.0028465660559919854]2.835058828666926):[&rate=0.002179030695284038]7.810801641265483):[&rate=0.002574095734904557]4 [...]
+tree STATE_6700000 [&lnP=-22229.669832343327] = [&R] ((((((((((23:[&rate=0.002360160424001619]13.289687354595037,17:[&rate=0.0024408998275404306]13.289687354595037):[&rate=0.002910936769762032]8.003796853726998,(24:[&rate=0.0031018283741892356]13.273101938820117,(15:[&rate=0.0022669006766959945]11.573340825695851,((19:[&rate=0.0026639653451433157]3.584029984752035,20:[&rate=0.0032923000801926643]3.584029984752035):[&rate=0.0027939819798115926]1.843935685799417,21:[&rate=0.002023846797272 [...]
+tree STATE_6710000 [&lnP=-22233.84531943003] = [&R] (((((((16:[&rate=0.0031471499979297936]30.918118923684926,(((3:[&rate=0.0031968328626415174]11.387530946485928,5:[&rate=0.0033017313501084527]11.387530946485928):[&rate=0.0032285265193059713]16.146635209264037,((13:[&rate=0.0030948075655030595]19.41135482172118,(11:[&rate=0.0033080134435736707]3.958890584893935,12:[&rate=0.0032384134976274907]3.958890584893935):[&rate=0.0032253019513815346]15.452464236827245):[&rate=0.003331629205333901 [...]
+tree STATE_6720000 [&lnP=-22224.30140853838] = [&R] ((33:[&rate=0.002215265285120769]114.6085012576242,(14:[&rate=0.002396747105580578]101.00716931201879,((((((8:[&rate=0.002654818499774262]24.664633101828283,10:[&rate=0.002523013929342698]24.664633101828283):[&rate=0.003256058939982779]14.141343585936529,((3:[&rate=0.0030501791781601846]11.998826238585124,5:[&rate=0.0024352718820039665]11.998826238585124):[&rate=0.002328731940423889]23.76032863733466,((13:[&rate=0.001964757077467311]27. [...]
+tree STATE_6730000 [&lnP=-22218.918137061468] = [&R] ((14:[&rate=0.0018186399269309625]127.7277125712549,(((((16:[&rate=0.0022117300180522666]42.10354601158888,((((24:[&rate=0.002440484261377529]15.320829451270082,(15:[&rate=0.002298438528294232]13.883323485030232,((21:[&rate=0.0026232417510250006]2.9076264728563013,20:[&rate=0.0033007221062650916]2.9076264728563013):[&rate=0.0021035598736482966]2.2737249362930223,19:[&rate=0.0023682866401212675]5.181351409149324):[&rate=0.00306111908611 [...]
+tree STATE_6740000 [&lnP=-22227.478021478317] = [&R] (((14:[&rate=0.0023589371457890668]109.59965222702202,((((16:[&rate=0.0021783665908206187]44.71249066510735,(((3:[&rate=0.002408757195925091]16.006553942777874,5:[&rate=0.002309874550249158]16.006553942777874):[&rate=0.002316539947394264]22.962089297239576,((13:[&rate=0.0021107727307265785]25.83567322451999,7:[&rate=0.002245645471157765]25.83567322451999):[&rate=0.0022583859370450474]5.438252042762851,(11:[&rate=0.0023825298123497252]8 [...]
+tree STATE_6750000 [&lnP=-22211.85309287504] = [&R] (((14:[&rate=0.002698998722426012]80.0780016192849,(((((((3:[&rate=0.002755994134982658]14.488018474993561,5:[&rate=0.0027421040828463647]14.488018474993561):[&rate=0.0028100238013755007]16.75739642472226,((13:[&rate=0.0025201950679499775]21.23815047406928,(11:[&rate=0.002755994134982658]6.384420506497077,12:[&rate=0.003058124316532961]6.384420506497077):[&rate=0.003073935351645514]14.853729967572203):[&rate=0.002652805364717521]3.38655 [...]
+tree STATE_6760000 [&lnP=-22215.606970770805] = [&R] ((33:[&rate=0.0027864076192140328]103.75370234233046,(14:[&rate=0.00226578290003784]93.51186015730579,(((((((3:[&rate=0.0027864076192140328]16.66457455896148,5:[&rate=0.002242917319588977]16.66457455896148):[&rate=0.002763634043078513]19.25353721308414,((13:[&rate=0.0018933441435916801]21.69406979271143,7:[&rate=0.003300251242531507]21.69406979271143):[&rate=0.0025140564528125533]6.138657218572856,(11:[&rate=0.0034924960192979386]5.781 [...]
+tree STATE_6770000 [&lnP=-22219.704301475795] = [&R] ((33:[&rate=0.0020413308805839535]114.31096721369474,(14:[&rate=0.0025678011566730224]97.7139930661762,((((((8:[&rate=0.00249625268637711]23.970394109348167,10:[&rate=0.002509757114326803]23.970394109348167):[&rate=0.0029497410550512818]14.64571523894384,((3:[&rate=0.0030339959284254475]14.121271085172218,5:[&rate=0.0020788527652501106]14.121271085172218):[&rate=0.0019391012243601255]22.885549778488866,((13:[&rate=0.002327500635714954] [...]
+tree STATE_6780000 [&lnP=-22221.10155912409] = [&R] (((33:[&rate=0.002599361685332031]101.15438930216965,((((((8:[&rate=0.00253243381981472]21.076807770249086,10:[&rate=0.002647178426131385]21.076807770249086):[&rate=0.002728457431767948]14.855446884940594,((3:[&rate=0.002445706531261304]14.973693294146857,5:[&rate=0.002576588509683813]14.973693294146857):[&rate=0.002634875051702196]18.231699484117016,((13:[&rate=0.0023654079326920724]22.95973774895985,(11:[&rate=0.0026726754949941167]7. [...]
+tree STATE_6790000 [&lnP=-22214.12209010707] = [&R] (((14:[&rate=0.0030803019441145814]90.17532228174112,((((((((13:[&rate=0.0029913032898592363]18.205969942170324,7:[&rate=0.0037248668897296566]18.205969942170324):[&rate=0.0027817283554586237]5.965298893309761,(11:[&rate=0.0037248668897296566]6.3102046363202255,12:[&rate=0.0025851070057873685]6.3102046363202255):[&rate=0.002728475792743183]17.86106419915986):[&rate=0.00283589351815257]7.454737468983151,(3:[&rate=0.0024884637953754094]17 [...]
+tree STATE_6800000 [&lnP=-22227.499899070266] = [&R] ((14:[&rate=0.00205563891716927]123.04462419273634,(33:[&rate=0.0021338858452961624]107.13675209556249,((((((((13:[&rate=0.0020151043257290464]26.62619821578351,7:[&rate=0.002325639097523437]26.62619821578351):[&rate=0.00202879192429132]8.385709950078812,(11:[&rate=0.002615375274890988]6.681362198988436,12:[&rate=0.0017673996147842877]6.681362198988436):[&rate=0.001957738700449142]28.330545966873885):[&rate=0.0021338858452961624]6.3731 [...]
+tree STATE_6810000 [&lnP=-22210.994816378025] = [&R] ((33:[&rate=0.003182443844437961]83.76393352071344,(14:[&rate=0.0038044196341763506]72.21650568606478,((((((8:[&rate=0.0028960826217148726]21.49525062236965,10:[&rate=0.002559520116614856]21.49525062236965):[&rate=0.0029440016339338177]11.497061749881471,16:[&rate=0.0026908629144747533]32.99231237225112):[&rate=0.00330924161707]2.3583258686956583,(((13:[&rate=0.002532852556156831]18.46110948295273,7:[&rate=0.0030747449936829943]18.4611 [...]
+tree STATE_6820000 [&lnP=-22204.529950300468] = [&R] ((33:[&rate=0.0025365490602965984]100.72828863401782,(14:[&rate=0.0027523682321533433]81.80639939968626,((((16:[&rate=0.0028549504691216827]33.64911627961112,((8:[&rate=0.0034288313258551886]19.979765955707155,10:[&rate=0.0030635752205028736]19.979765955707155):[&rate=0.0028979702069441143]12.35212256347101,(((13:[&rate=0.0028549504691216827]18.350775615964448,7:[&rate=0.003360430036372427]18.350775615964448):[&rate=0.00282616214871719 [...]
+tree STATE_6830000 [&lnP=-22227.088402068748] = [&R] (((14:[&rate=0.0021878508905778114]119.64021293118971,((((((8:[&rate=0.002198005453973932]24.469439742117263,10:[&rate=0.0022484788120939935]24.469439742117263):[&rate=0.0024023007162153774]17.798659465181597,(((13:[&rate=0.002067363267398414]23.674566339659915,(11:[&rate=0.00228944850181543]7.612611707290711,12:[&rate=0.002102776173436364]7.612611707290711):[&rate=0.002669437160425751]16.061954632369204):[&rate=0.00205481748436467]7.7 [...]
+tree STATE_6840000 [&lnP=-22234.269386054777] = [&R] (((((((((2:[&rate=0.0018145951293633334]42.00124622833706,((1:[&rate=0.0018324850855818873]12.861437275573177,6:[&rate=0.0018643774047912183]12.861437275573177):[&rate=0.0017458831372442086]22.383636784284743,4:[&rate=0.0019818174115233704]35.24507405985792):[&rate=0.0018891060886291344]6.756172168479139):[&rate=0.0020217353294946267]4.594826719507083,(18:[&rate=0.001892135703455733]39.737291181107096,(26:[&rate=0.0018475835550508903]2 [...]
+tree STATE_6850000 [&lnP=-22215.249236920095] = [&R] (((((((((2:[&rate=0.0022384062072094294]29.447523245570228,((1:[&rate=0.0032119153383290576]6.989911953494713,6:[&rate=0.0027688420469130144]6.989911953494713):[&rate=0.002551144285790268]16.995459954057857,4:[&rate=0.002947062016504614]23.98537190755257):[&rate=0.002551144285790268]5.462151338017659):[&rate=0.002358445251757578]4.876461753959301,(18:[&rate=0.003035118867946686]22.893328489283896,(26:[&rate=0.002835482112548417]14.5140 [...]
+tree STATE_6860000 [&lnP=-22218.049992852273] = [&R] (((14:[&rate=0.0023491507448423427]96.82478314178262,((((((8:[&rate=0.0018574812930121142]30.779244480961513,10:[&rate=0.00178715890929448]30.779244480961513):[&rate=0.0021583191294975014]12.95637610654487,(((13:[&rate=0.002149833670799712]27.784288345021206,(11:[&rate=0.0024544302173653805]6.9788639158988754,12:[&rate=0.002192397953779178]6.9788639158988754):[&rate=0.002149833670799712]20.805424429122333):[&rate=0.002192397953779178]4 [...]
+tree STATE_6870000 [&lnP=-22212.165477866987] = [&R] (((((((((23:[&rate=0.003273671402941666]8.38728077809361,17:[&rate=0.003046968539775704]8.38728077809361):[&rate=0.0024196076078637843]12.167888732042234,(((15:[&rate=0.002707082593755465]9.040485216316823,(19:[&rate=0.002462936409207886]3.3690149471078823,(20:[&rate=0.00272467707843029]2.559219123874824,21:[&rate=0.0026096630827967086]2.559219123874824):[&rate=0.0021898518168016216]0.8097958232330584):[&rate=0.0024485422473998174]5.67 [...]
+tree STATE_6880000 [&lnP=-22220.906810081706] = [&R] ((((((16:[&rate=0.002077448923924779]51.241413452170015,(((8:[&rate=0.002132213841198855]31.211925912026256,10:[&rate=0.0019701972114013248]31.211925912026256):[&rate=0.0018094544055295395]16.853118995453453,(((13:[&rate=0.0017530241969066984]26.954315103563783,7:[&rate=0.002369101809975571]26.954315103563783):[&rate=0.0020308266383388313]7.680393430312588,(11:[&rate=0.0023531798081265266]8.942775572029069,12:[&rate=0.00163107403024461 [...]
+tree STATE_6890000 [&lnP=-22214.21828200616] = [&R] (((14:[&rate=0.0032752572224504415]76.7200970773131,((((((23:[&rate=0.003205482948636144]8.39558503807431,17:[&rate=0.0027839906237247295]8.39558503807431):[&rate=0.0022619383674947536]15.488623433821408,((((21:[&rate=0.002568554593752571]4.290543874586888,(20:[&rate=0.0027839906237247295]1.4384097374750275,19:[&rate=0.0031026289964966366]1.4384097374750275):[&rate=0.0029569452932013]2.85213413711186):[&rate=0.003679763404011603]6.27221 [...]
+tree STATE_6900000 [&lnP=-22216.30891167208] = [&R] (((14:[&rate=0.0026586357086289566]93.4109217335714,((((((22:[&rate=0.002809590012130192]20.632619027064983,((((21:[&rate=0.002809590012130192]2.7918970411416493,20:[&rate=0.002148481518480303]2.7918970411416493):[&rate=0.002209235037595581]0.9513942768640891,19:[&rate=0.002209235037595581]3.7432913180057383):[&rate=0.0022947863018978614]6.137615744910951,15:[&rate=0.0027267791533621476]9.88090706291669):[&rate=0.0020439600871988116]3.1 [...]
+tree STATE_6910000 [&lnP=-22218.21941231467] = [&R] (((((((((22:[&rate=0.0030632270401915254]22.048334724683944,(((19:[&rate=0.0022422444074550307]3.9007901235448275,(20:[&rate=0.0027834186929877386]2.884172170719688,21:[&rate=0.0021283589311895065]2.884172170719688):[&rate=0.002734040823740236]1.0166179528251393):[&rate=0.0024620569895252315]3.9960265793175136,15:[&rate=0.0028181545566466]7.896816702862341):[&rate=0.002217733237539108]3.422938727706298,24:[&rate=0.0031666681337148097]11 [...]
+tree STATE_6920000 [&lnP=-22210.255653046002] = [&R] (((14:[&rate=0.0030388507381922795]75.25805176360089,((((((22:[&rate=0.0034568461258155684]18.928555045798927,(((21:[&rate=0.0036688440561053327]3.5028137602537597,(20:[&rate=0.0031666143529865305]2.873957277750342,19:[&rate=0.0029942267853666615]2.873957277750342):[&rate=0.0030677208682994518]0.6288564825034175):[&rate=0.002596615526513462]7.204741786045414,15:[&rate=0.0034176467159672975]10.707555546299174):[&rate=0.00398005969801960 [...]
+tree STATE_6930000 [&lnP=-22217.271366139616] = [&R] ((14:[&rate=0.002409390547196465]107.33136225502743,((((((22:[&rate=0.0025732729202493114]26.71510345791263,((23:[&rate=0.001808237589966087]12.12351079299511,17:[&rate=0.002543726905579892]12.12351079299511):[&rate=0.002928157002838371]11.66540269191512,(((19:[&rate=0.002696410441424665]4.019353664483882,(21:[&rate=0.002904423443884576]1.8600218373658202,20:[&rate=0.002696410441424665]1.8600218373658202):[&rate=0.002255118673270527]2. [...]
+tree STATE_6940000 [&lnP=-22219.513660713714] = [&R] ((14:[&rate=0.003854245297574927]75.1914599843656,((((((22:[&rate=0.003444623828702662]19.93083304507258,((23:[&rate=0.0033590400157378496]7.144706959726171,17:[&rate=0.004182344772075537]7.144706959726171):[&rate=0.003600030260692217]9.782087389353826,((15:[&rate=0.0032806742213583612]6.979628513094216,((19:[&rate=0.003708609090028083]1.7574245942759787,20:[&rate=0.003690896792878838]1.7574245942759787):[&rate=0.003762879968277621]0.1 [...]
+tree STATE_6950000 [&lnP=-22211.82679129327] = [&R] ((33:[&rate=0.0016206283103974448]130.0714686470447,(14:[&rate=0.0024208600275995693]109.56223455067229,(((((8:[&rate=0.0025147153471475362]27.653360702842075,10:[&rate=0.0021518868681053216]27.653360702842075):[&rate=0.0027748972839302948]15.581459171452696,((3:[&rate=0.002931429246726483]15.906341241070253,5:[&rate=0.0022100251876963867]15.906341241070253):[&rate=0.0022935653904569975]21.95262889676713,((13:[&rate=0.002055029404704284 [...]
+tree STATE_6960000 [&lnP=-22214.222294933705] = [&R] ((33:[&rate=0.0027275604739001175]93.21596880422896,(14:[&rate=0.002784780779335247]84.66605815179398,((((16:[&rate=0.002883603376874002]38.49812779786875,(((3:[&rate=0.0030750861476604584]14.767545470160824,5:[&rate=0.002469092644073736]14.767545470160824):[&rate=0.0025852042797222356]18.569080081572224,((13:[&rate=0.002331225723136213]20.609421832641317,(11:[&rate=0.0031520019516080434]6.329904867435438,12:[&rate=0.002769999961483253 [...]
+tree STATE_6970000 [&lnP=-22218.28748456082] = [&R] ((33:[&rate=0.0018947322936302964]116.2414559454746,(14:[&rate=0.0021744525446268535]104.37577576128547,((((16:[&rate=0.0017677304504016163]47.55009670690651,((8:[&rate=0.002386193313252708]28.732587467690443,10:[&rate=0.0021016671638842462]28.732587467690443):[&rate=0.0017677304504016163]16.833015215999694,((3:[&rate=0.002211254795582055]18.126733028638736,5:[&rate=0.0018690426897286661]18.126733028638736):[&rate=0.0022758783041121012] [...]
+tree STATE_6980000 [&lnP=-22210.126566123963] = [&R] ((14:[&rate=0.0030530987731592018]87.83052256563171,(33:[&rate=0.0034330315823694678]79.64279788001272,((((((23:[&rate=0.003000106215844482]9.775188335641909,17:[&rate=0.003070794415832624]9.775188335641909):[&rate=0.003408286652814674]11.589905098537924,(22:[&rate=0.0029645750475029518]18.878945058202714,(24:[&rate=0.0036485424745659432]11.615157833236541,(15:[&rate=0.0036111484387085484]8.62522146479962,(21:[&rate=0.00281498285502018 [...]
+tree STATE_6990000 [&lnP=-22205.36970075579] = [&R] ((33:[&rate=0.0027993831784864425]85.61337785206756,(14:[&rate=0.003291165084339844]77.33957986585018,((((((23:[&rate=0.0025827888125862846]12.05400394731769,17:[&rate=0.0026403442275562093]12.05400394731769):[&rate=0.0025827888125862846]9.200592175426982,(22:[&rate=0.003126968393899947]19.541538161759096,(24:[&rate=0.004092235079172441]11.522479065387106,(15:[&rate=0.0029995222216210237]9.739607488117723,(19:[&rate=0.002552659167272867 [...]
+tree STATE_7000000 [&lnP=-22223.818686320785] = [&R] (((14:[&rate=0.0036054766186162414]61.862653643700526,((((((23:[&rate=0.003516386691865055]8.324521094623293,17:[&rate=0.00357537207421498]8.324521094623293):[&rate=0.0035418291540516675]8.216923587317002,(22:[&rate=0.00362706166743737]16.197507546958352,(((19:[&rate=0.003567051225087781]2.5549912212126933,(20:[&rate=0.0035691483258611925]1.4520921614451723,21:[&rate=0.003486884523713149]1.4520921614451723):[&rate=0.0036461216352274067 [...]
+tree STATE_7010000 [&lnP=-22205.19695074221] = [&R] ((14:[&rate=0.0026336644283843054]94.98523610757111,(33:[&rate=0.0028507638638304153]87.74449737578621,((((((((((19:[&rate=0.0022830516965160167]3.097714965347918,20:[&rate=0.0031160977266822733]3.097714965347918):[&rate=0.002579832127246394]0.8300169226639209,21:[&rate=0.0021886323167811202]3.927731888011839):[&rate=0.0026869007687359077]7.9088289775436005,15:[&rate=0.002404318552698232]11.83656086555544):[&rate=0.0026514361600237534]0 [...]
+tree STATE_7020000 [&lnP=-22219.81430862965] = [&R] ((33:[&rate=0.0024041343713581405]115.88427539408576,(((((((23:[&rate=0.002020541441348714]10.684942867231854,17:[&rate=0.0021695185242349324]10.684942867231854):[&rate=0.0023400175119088085]12.461782919114018,(22:[&rate=0.0025801255406489097]23.065600593963094,(((19:[&rate=0.0025104028396283422]3.875265792100284,(21:[&rate=0.00276972118021453]2.2812989707577986,20:[&rate=0.0026771060033338456]2.2812989707577986):[&rate=0.00242508641414 [...]
+tree STATE_7030000 [&lnP=-22225.47098089026] = [&R] (((14:[&rate=0.0027829342172590076]79.36977093570981,((((((8:[&rate=0.002922114875210498]19.070740853891508,10:[&rate=0.0029916034877587715]19.070740853891508):[&rate=0.0032737232062577185]12.148264202800416,16:[&rate=0.002761946910855851]31.219005056691923):[&rate=0.002717695753117596]1.3985161455148223,((3:[&rate=0.0028312976752481187]13.700136831895133,5:[&rate=0.00289741209861553]13.700136831895133):[&rate=0.0030038495475986924]17.6 [...]
+tree STATE_7040000 [&lnP=-22208.09533947364] = [&R] (((14:[&rate=0.0024848409746099287]106.4418972846746,(((((((3:[&rate=0.0024522221398757854]18.63670608301717,5:[&rate=0.0018364509042418959]18.63670608301717):[&rate=0.0026912302903919446]16.891443286904238,((13:[&rate=0.0029275903571086854]19.806419249429457,7:[&rate=0.0034780253565032007]19.806419249429457):[&rate=0.0026912302903919446]4.859576285569073,(11:[&rate=0.003406844153129286]6.073247838247763,12:[&rate=0.0029275903571086854] [...]
+tree STATE_7050000 [&lnP=-22217.79256444772] = [&R] ((33:[&rate=0.0022372536790036734]113.96854410798987,(14:[&rate=0.002524222433128187]91.67479260418466,((((16:[&rate=0.002439304460825502]33.10627762914235,((8:[&rate=0.002957223230362092]21.369021916196314,10:[&rate=0.002524222433128187]21.369021916196314):[&rate=0.003626276465024668]10.85233317983376,((3:[&rate=0.003492859203642432]11.902540917222993,5:[&rate=0.0024611053167491927]11.902540917222993):[&rate=0.0027240476712948386]19.47 [...]
+tree STATE_7060000 [&lnP=-22212.345764702695] = [&R] (((((((((8:[&rate=0.0031019369410832847]19.434362830465265,10:[&rate=0.0033919962337465026]19.434362830465265):[&rate=0.00320785483803277]10.736680756578181,(((13:[&rate=0.002680989911139536]16.614291774971022,7:[&rate=0.003538257927064598]16.614291774971022):[&rate=0.002448137983341035]6.139802739411053,(11:[&rate=0.003279764978402748]6.556665132596384,12:[&rate=0.002792495073442495]6.556665132596384):[&rate=0.002477109648143715]16.19 [...]
+tree STATE_7070000 [&lnP=-22208.401183418217] = [&R] ((((((((((23:[&rate=0.002955594118488493]9.190019066422291,17:[&rate=0.0030687046087517393]9.190019066422291):[&rate=0.003440248262589992]10.608565503393148,((((21:[&rate=0.004120281044217704]2.039935778837908,(19:[&rate=0.0032539760526083057]1.6551357165148763,20:[&rate=0.004542316314280205]1.6551357165148763):[&rate=0.004274980376629689]0.3848000623230319):[&rate=0.0031729743531790505]4.59719234508375,15:[&rate=0.002955594118488493]6 [...]
+tree STATE_7080000 [&lnP=-22221.699474288027] = [&R] ((((((((22:[&rate=0.0026780378832071392]24.80994763272519,((23:[&rate=0.0026332603097162332]9.5300467626085,17:[&rate=0.002582912538958302]9.5300467626085):[&rate=0.0026009063844483335]13.922995238499915,(((21:[&rate=0.0027133112071850525]4.622012191295159,(19:[&rate=0.0028845903577256714]4.1061602628517635,20:[&rate=0.002569807396273365]4.1061602628517635):[&rate=0.002562805621709993]0.5158519284433956):[&rate=0.0025392383475860013]8. [...]
+tree STATE_7090000 [&lnP=-22222.879466833154] = [&R] (9:[&rate=0.0038430322914959777]97.52390796931941,(14:[&rate=0.0034039555666404847]94.95512103401548,(33:[&rate=0.002961233812558487]90.02207541170183,((((((8:[&rate=0.0032714761038080235]23.665311314150394,10:[&rate=0.0032088839751741247]23.665311314150394):[&rate=0.003894137410063396]6.551347967792903,(((13:[&rate=0.003098955602515057]19.00590660137067,7:[&rate=0.0037990298287269627]19.00590660137067):[&rate=0.003098955602515057]4.65 [...]
+tree STATE_7100000 [&lnP=-22219.32627876679] = [&R] (9:[&rate=0.0028658892421702742]130.87958372422182,(14:[&rate=0.0024941525447422248]108.72038814060956,(33:[&rate=0.003131458160612651]93.53036378893299,(((((((23:[&rate=0.0028658892421702742]9.239071775204398,17:[&rate=0.003131458160612651]9.239071775204398):[&rate=0.0019191003612114904]15.152790072554073,(22:[&rate=0.0026936941185367437]21.9346356611005,((((19:[&rate=0.003131458160612651]2.673742987948202,20:[&rate=0.00387725890319409 [...]
+tree STATE_7110000 [&lnP=-22210.02831752852] = [&R] (9:[&rate=0.0029006607738201557]113.58634549062309,(33:[&rate=0.002536354505803596]102.77853956525033,(((((((22:[&rate=0.003049020453656319]20.215323261034147,((((21:[&rate=0.0022520981410974203]4.569057206015064,20:[&rate=0.002922913423496011]4.569057206015064):[&rate=0.002338300242503798]0.740699651036608,19:[&rate=0.0022750045040361667]5.3097568570516716):[&rate=0.0031433434668928567]4.408423285944962,15:[&rate=0.003179873004900906]9 [...]
+tree STATE_7120000 [&lnP=-22214.645793423697] = [&R] (9:[&rate=0.0026582159687590806]136.63908949989883,(33:[&rate=0.0025616236514542892]105.21584980653316,((((((((13:[&rate=0.003073755732979303]20.534562039112153,(11:[&rate=0.003549641660802306]3.98428386263976,12:[&rate=0.003464424881695038]3.98428386263976):[&rate=0.0031070568595498588]16.550278176472393):[&rate=0.0027755180139637523]4.393213090103782,7:[&rate=0.002787706496439541]24.927775129215934):[&rate=0.0027632340133404813]4.757 [...]
+tree STATE_7130000 [&lnP=-22227.311256272253] = [&R] (9:[&rate=0.0019857763469333066]165.79380748396554,((33:[&rate=0.0024767638196044094]102.65135525656238,(((((((((21:[&rate=0.002622461828136071]2.4659266690904422,20:[&rate=0.002184358328484293]2.4659266690904422):[&rate=0.0022350398805909017]1.3271281201076315,19:[&rate=0.0025691555759916633]3.7930547891980737):[&rate=0.0025496042560779215]5.68969897906006,(24:[&rate=0.00224927578433012]8.861005565823868,15:[&rate=0.002512401193828005 [...]
+tree STATE_7140000 [&lnP=-22230.47022573118] = [&R] (9:[&rate=0.0028013647224008923]134.2275588520166,(((((((22:[&rate=0.0026992892966952236]21.346168064237585,(24:[&rate=0.0028411715375813752]14.795470734991515,((19:[&rate=0.002891434045445182]2.9976812192646833,(20:[&rate=0.002765636793146754]2.0356700084654724,21:[&rate=0.0027118133055074093]2.0356700084654724):[&rate=0.0028386012234675904]0.9620112107992109):[&rate=0.0029149753935644456]6.367398677017285,15:[&rate=0.00282179217660738 [...]
+tree STATE_7150000 [&lnP=-22219.294790617183] = [&R] (9:[&rate=0.0028656238200533255]133.0942247400168,(14:[&rate=0.003099710505017088]85.95366720640165,((((((((24:[&rate=0.003372348362565885]12.971508958236429,(((19:[&rate=0.003248662319883961]2.324365396812511,20:[&rate=0.0031926224168847264]2.324365396812511):[&rate=0.0029233356818544934]1.5852141968876596,21:[&rate=0.0027649809157639232]3.9095795937001707):[&rate=0.0030382001801374505]6.651476710813821,15:[&rate=0.002735835728725907] [...]
+tree STATE_7160000 [&lnP=-22226.217000635595] = [&R] (9:[&rate=0.0017666088928056005]171.87815968980414,(33:[&rate=0.002541305823979291]106.3871431481683,(14:[&rate=0.002500027659538563]104.90273552144939,((((((3:[&rate=0.002428590429326839]20.064865419440046,5:[&rate=0.0018748810246359442]20.064865419440046):[&rate=0.0026754463935741795]17.877510526560272,((13:[&rate=0.002067827553030829]22.140514132382318,7:[&rate=0.0029663902227009082]22.140514132382318):[&rate=0.002428590429326839]8. [...]
+tree STATE_7170000 [&lnP=-22203.985055446046] = [&R] (9:[&rate=0.002772996333345848]126.22718096909712,((14:[&rate=0.002982793423780992]81.20555483224368,((((((2:[&rate=0.003192837842997058]24.663078886234903,((1:[&rate=0.002567957126323204]7.497599893367305,6:[&rate=0.0025300751951750694]7.497599893367305):[&rate=0.002982793423780992]12.626769637919498,4:[&rate=0.0034125463820978376]20.124369531286803):[&rate=0.003050664980114387]4.5387093549481):[&rate=0.0028751082750589725]2.800994263 [...]
+tree STATE_7180000 [&lnP=-22222.050846450933] = [&R] (9:[&rate=0.0024023500887330627]140.18948316484645,((33:[&rate=0.0027371406023904343]89.46517812229652,((((((8:[&rate=0.002359906428506928]23.993828178376784,10:[&rate=0.0021850256470620135]23.993828178376784):[&rate=0.0020367590281652904]17.576950033634628,((3:[&rate=0.0021738191260360184]17.68235156025111,5:[&rate=0.0022382494547685213]17.68235156025111):[&rate=0.002676816750996365]16.05106221833964,((13:[&rate=0.0020179607440839464] [...]
+tree STATE_7190000 [&lnP=-22210.646861598747] = [&R] (9:[&rate=0.0026088097749266403]135.94044050995714,((14:[&rate=0.0034080560892327133]69.67317890031558,((((16:[&rate=0.0027390300286705377]32.40381128655881,(((2:[&rate=0.0032777974793244445]22.250502670610622,((1:[&rate=0.00334172379029637]6.437143603892914,6:[&rate=0.002978259500282689]6.437143603892914):[&rate=0.0034080560892327133]10.200099277332253,4:[&rate=0.004861311262510347]16.637242881225166):[&rate=0.003185322924203]5.613259 [...]
+tree STATE_7200000 [&lnP=-22200.450901100932] = [&R] (9:[&rate=0.003610332089154842]99.21809729377125,(33:[&rate=0.0032543349164828998]66.88475502495844,(14:[&rate=0.004315274148160484]56.99424644782931,((((16:[&rate=0.0034486503817373576]24.79168690824128,((8:[&rate=0.00415546769145784]13.768575008306252,10:[&rate=0.004255001815811407]13.768575008306252):[&rate=0.003661597338262907]9.865814292757408,((3:[&rate=0.003423631751634195]11.289675348923652,5:[&rate=0.0034486503817373576]11.289 [...]
+tree STATE_7210000 [&lnP=-22220.37523917688] = [&R] (9:[&rate=0.002477380380936618]156.77208961013477,(33:[&rate=0.002036917332698141]121.06164473464318,(14:[&rate=0.0025932188318397806]90.80204230575227,((((((2:[&rate=0.002344003312820286]29.420439766848304,((1:[&rate=0.002501698323336892]8.086708411808127,6:[&rate=0.002313321505974398]8.086708411808127):[&rate=0.0024485736837831238]16.60429734937788,4:[&rate=0.002659496333455336]24.69100576118601):[&rate=0.0026323686350912755]4.7294340 [...]
+tree STATE_7220000 [&lnP=-22224.05671806263] = [&R] (9:[&rate=0.002363894523110553]145.71850487885715,(33:[&rate=0.002290771998050842]119.06806012466858,(((((((2:[&rate=0.002569098859842164]27.54855503186801,((1:[&rate=0.0031126825382614596]8.004678316517529,6:[&rate=0.002656276495402983]8.004678316517529):[&rate=0.0025380594605395124]15.165746213166775,4:[&rate=0.00301219219251734]23.170424529684304):[&rate=0.00233217959755479]4.378130502183705):[&rate=0.002569098859842164]5.52501707298 [...]
+tree STATE_7230000 [&lnP=-22216.92358718602] = [&R] (9:[&rate=0.0026849933211148228]131.45143679192165,(14:[&rate=0.002400655443964099]104.8637338455789,(((((((2:[&rate=0.00283784178454507]26.36179381811877,((1:[&rate=0.0029043986597047087]6.7528145453541555,6:[&rate=0.0026907722373130153]6.7528145453541555):[&rate=0.002537724482492411]16.19006700831236,4:[&rate=0.0027893059051657468]22.942881553666517):[&rate=0.002882088597114936]3.4189122644522527):[&rate=0.002783056411515691]5.0844658 [...]
+tree STATE_7240000 [&lnP=-22204.147767503946] = [&R] (9:[&rate=0.002795053100627699]152.1879732398292,(33:[&rate=0.0021695691909566503]112.94836488191538,((((((((13:[&rate=0.002266692916894983]23.708745367707056,(11:[&rate=0.0030603491009905835]7.233743935027799,12:[&rate=0.0021695691909566503]7.233743935027799):[&rate=0.002413195981743578]16.475001432679257):[&rate=0.0027214615192713482]4.0588157388360315,7:[&rate=0.002968801881524383]27.767561106543088):[&rate=0.002337168849737956]7.02 [...]
+tree STATE_7250000 [&lnP=-22210.588214237785] = [&R] (9:[&rate=0.0038204856913023823]104.42999621020945,((14:[&rate=0.0032681684857206552]77.01745278341156,((((((2:[&rate=0.0028344721959491967]25.698046872943795,((1:[&rate=0.004029705177844299]4.715165209755002,6:[&rate=0.0034835941714807467]4.715165209755002):[&rate=0.0028344721959491967]16.624806493287096,4:[&rate=0.0031337333564587858]21.3399717030421):[&rate=0.003538174279593411]4.358075169901696):[&rate=0.0034317863434407164]2.82469 [...]
+tree STATE_7260000 [&lnP=-22218.698884970043] = [&R] (9:[&rate=0.0026141700774097356]123.64974609327014,((14:[&rate=0.0038353530614487156]67.23373145785585,((((((2:[&rate=0.00269779617848742]24.23954499245803,((1:[&rate=0.0033130120925564744]6.5349150937224145,6:[&rate=0.0022248609424823417]6.5349150937224145):[&rate=0.002876625826861566]13.497418108169786,4:[&rate=0.002992605318694927]20.0323332018922):[&rate=0.0031954683888960594]4.207211790565829):[&rate=0.0025631268234269705]3.176767 [...]
+tree STATE_7270000 [&lnP=-22222.146231940806] = [&R] (9:[&rate=0.0018289965906926181]183.42139734531008,((14:[&rate=0.0026332191767265366]104.02218611233941,((((((2:[&rate=0.0023296813621710368]33.463219517861674,((1:[&rate=0.002479942638016759]6.233899293106413,6:[&rate=0.002751808230121086]6.233899293106413):[&rate=0.0021707307768014067]20.506270227202172,4:[&rate=0.0025261271055642964]26.740169520308584):[&rate=0.0021847046482286418]6.72304999755309):[&rate=0.0017712571189950352]4.623 [...]
+tree STATE_7280000 [&lnP=-22229.98745818699] = [&R] (9:[&rate=0.0021086156503419998]141.97218684482144,(33:[&rate=0.002121336269490722]108.71819690395948,(((((((2:[&rate=0.002315895161065758]30.085165235472445,((1:[&rate=0.002561375393565455]8.361147989253828,6:[&rate=0.0018175756094847256]8.361147989253828):[&rate=0.0020956802804221295]16.738672576774604,4:[&rate=0.0025828737137291724]25.099820566028434):[&rate=0.0018175756094847256]4.985344669444011):[&rate=0.0025221072880465562]6.9064 [...]
+tree STATE_7290000 [&lnP=-22224.153144133208] = [&R] (9:[&rate=0.002106292780433681]190.05922202640696,(((((16:[&rate=0.0021441173414193096]46.79102129388278,((((13:[&rate=0.0019420882656558467]32.629757336459434,(11:[&rate=0.0018041465718276215]10.65893951339255,12:[&rate=0.0020068422845712637]10.65893951339255):[&rate=0.0018220459549941022]21.970817823066884):[&rate=0.0021830763092520805]4.368982492156292,7:[&rate=0.002081432024193947]36.998739828615726):[&rate=0.0021699269853368967]7. [...]
+tree STATE_7300000 [&lnP=-22231.167182563604] = [&R] (9:[&rate=0.0027142066361006386]144.22596120314523,(14:[&rate=0.00261864501945534]106.04510677655064,(((((16:[&rate=0.00261864501945534]37.930899804756585,((8:[&rate=0.0028114692373449367]21.522227306512935,10:[&rate=0.0027443751403641776]21.522227306512935):[&rate=0.002724800951265005]13.05179597687447,(((13:[&rate=0.0027333160323783786]23.41484871183973,(11:[&rate=0.0027398686269168274]6.4306497131853115,12:[&rate=0.00261864501945534 [...]
+tree STATE_7310000 [&lnP=-22221.584561115622] = [&R] (9:[&rate=0.002533608645503188]138.6948222516638,(((((16:[&rate=0.0024252124511981656]39.10314759243739,((((13:[&rate=0.002901466556909466]21.13327689299237,7:[&rate=0.0030504118167449575]21.13327689299237):[&rate=0.002668552424319821]5.809311566936337,(11:[&rate=0.0023435825565073076]7.027917609016142,12:[&rate=0.0022709642262911285]7.027917609016142):[&rate=0.0020254612418479675]19.914670850912565):[&rate=0.0024520142232016037]4.6767 [...]
+tree STATE_7320000 [&lnP=-22220.941448933776] = [&R] (9:[&rate=0.002632826315018469]152.71265553983812,((((((((2:[&rate=0.0027581198374227595]27.834610639175715,((1:[&rate=0.0028883959889405546]9.80655091524029,6:[&rate=0.0025105255358261035]9.80655091524029):[&rate=0.0027494492097788186]11.496071256166541,4:[&rate=0.0030665624149296645]21.30262217140683):[&rate=0.0028178439894156774]6.531988467768883):[&rate=0.0026086215038848104]2.714290770811264,(18:[&rate=0.0027581198374227595]24.367 [...]
+tree STATE_7330000 [&lnP=-22218.2301431782] = [&R] (9:[&rate=0.0025777762840370404]134.2154204655952,((14:[&rate=0.0026973822946116492]95.43362890568325,33:[&rate=0.0026830094339600364]95.43362890568325):[&rate=0.0022223240803790094]7.869621265648561,((((((2:[&rate=0.0024121827431245932]30.641574050197974,((1:[&rate=0.0029154757660891486]6.723709317809819,6:[&rate=0.0026320628749844427]6.723709317809819):[&rate=0.002064963511535108]18.546327059790677,4:[&rate=0.002252377274476051]25.2700 [...]
+tree STATE_7340000 [&lnP=-22217.740057158608] = [&R] (9:[&rate=0.002348681827533269]147.86103812489034,((14:[&rate=0.0030333657057817487]84.88573746814008,((((((2:[&rate=0.0029445320971317242]25.96433778493538,((1:[&rate=0.0026086040412039136]7.123758376347785,6:[&rate=0.003275541635289564]7.123758376347785):[&rate=0.0029734653212244097]15.778730125822685,4:[&rate=0.003327451968785405]22.90248850217047):[&rate=0.003081006436737652]3.0618492827649106):[&rate=0.003168918958184751]4.3365890 [...]
+tree STATE_7350000 [&lnP=-22201.438862797582] = [&R] (9:[&rate=0.003245406058816974]119.04726165314163,((((((((2:[&rate=0.0030571079374304595]22.149221164957027,((1:[&rate=0.0037706982567453275]6.826314508063137,6:[&rate=0.0028819490423273543]6.826314508063137):[&rate=0.0025466883655208025]11.909433017019975,4:[&rate=0.0042269825547854144]18.735747525083113):[&rate=0.0033411915679329297]3.4134736398739136):[&rate=0.004090463937482625]2.8107611334245526,(18:[&rate=0.0034940540286987963]21 [...]
+tree STATE_7360000 [&lnP=-22216.425626563203] = [&R] (9:[&rate=0.002297317326402943]155.48197175053315,((33:[&rate=0.0021654905237970556]114.49826918109487,((((((2:[&rate=0.002135612529225361]32.83184512612342,((1:[&rate=0.0025020998565628283]9.70380841142026,6:[&rate=0.0022722872580075676]9.70380841142026):[&rate=0.0023344445187770324]15.1973612514411,4:[&rate=0.002895665510368446]24.90116966286136):[&rate=0.0018228108269674663]7.930675463262059):[&rate=0.0030094494063271403]4.798613565 [...]
+tree STATE_7370000 [&lnP=-22228.023588418546] = [&R] (9:[&rate=0.0021374036443530414]174.67575345496556,((14:[&rate=0.002364716091326192]111.90189170323801,(((((((3:[&rate=0.002063411881412475]19.465268573523343,5:[&rate=0.00256892024144871]19.465268573523343):[&rate=0.0020782105060071954]24.573554162486143,((13:[&rate=0.00222876324829806]29.05899497497662,(11:[&rate=0.0017490567861464773]8.59781428474198,12:[&rate=0.001772786817109179]8.59781428474198):[&rate=0.0023108661402486737]20.46 [...]
+tree STATE_7380000 [&lnP=-22204.28714765877] = [&R] (9:[&rate=0.003731387958086715]113.34865364937252,((14:[&rate=0.004225066116974973]67.06648997435427,((((((((13:[&rate=0.002820767749251744]17.85849999246326,7:[&rate=0.004109395688722601]17.85849999246326):[&rate=0.003118867241113258]3.2701926634268084,(11:[&rate=0.004288037271620322]4.0742324936728815,12:[&rate=0.004321210385363696]4.0742324936728815):[&rate=0.0030652336400959494]17.054460162217186):[&rate=0.004109395688722601]4.16789 [...]
+tree STATE_7390000 [&lnP=-22220.56719221829] = [&R] (((14:[&rate=0.002979423187189951]82.01503233828993,((((((((13:[&rate=0.002343925546211354]22.76272373080931,(11:[&rate=0.0034470232412774538]5.666557895588876,12:[&rate=0.002800689566980822]5.666557895588876):[&rate=0.0024986690800767506]17.096165835220432):[&rate=0.002697311356687754]3.2454885601466437,7:[&rate=0.003339522659144695]26.008212290955953):[&rate=0.0028623308511584408]6.206514979118801,(3:[&rate=0.0034470232412774538]11.00 [...]
+tree STATE_7400000 [&lnP=-22215.09057522188] = [&R] (((((((16:[&rate=0.002254630615627033]39.31104180210351,((8:[&rate=0.0031074445336732974]21.825437145478773,10:[&rate=0.0033489303775554313]21.825437145478773):[&rate=0.0026274648613361464]16.58687569809515,(((13:[&rate=0.00220831322332402]26.72061429265589,7:[&rate=0.0023530939729577527]26.72061429265589):[&rate=0.002939127018826279]3.500213841265161,(11:[&rate=0.0023347785959917664]8.335903593038251,12:[&rate=0.0019311281590140132]8.3 [...]
+tree STATE_7410000 [&lnP=-22215.0472863491] = [&R] ((14:[&rate=0.0026432575907594305]98.98380332628744,((((((8:[&rate=0.002416955713730596]24.459795526066472,10:[&rate=0.0025521830862393563]24.459795526066472):[&rate=0.002745888555800111]13.069654477943168,(((13:[&rate=0.0023384993501499324]22.42941247343295,7:[&rate=0.0029391116555007664]22.42941247343295):[&rate=0.0033424701859037296]3.1326209594928542,(11:[&rate=0.003186230607175132]7.319667138699184,12:[&rate=0.0027997892353433624]7. [...]
+tree STATE_7420000 [&lnP=-22212.924598579015] = [&R] ((33:[&rate=0.00353587475673726]82.99116283254982,(((((((2:[&rate=0.003395032294820954]19.662950644054387,((1:[&rate=0.004798992007206173]5.5260390126797,6:[&rate=0.0027942419857139664]5.5260390126797):[&rate=0.003691311346235943]10.807858007889774,4:[&rate=0.004798992007206173]16.333897020569474):[&rate=0.0037512543301571866]3.3290536234849135):[&rate=0.0027942419857139664]4.554977832824665,(18:[&rate=0.003373700269537811]20.243815538 [...]
+tree STATE_7430000 [&lnP=-22223.618432318577] = [&R] ((33:[&rate=0.0024754255479108346]100.91867435626762,(14:[&rate=0.0031052048551078303]95.35426740377335,((((((2:[&rate=0.0028844836151484003]28.15273456182069,((1:[&rate=0.002968491468961244]7.611075240238793,6:[&rate=0.0024754255479108346]7.611075240238793):[&rate=0.003291844104250871]15.614124431237641,4:[&rate=0.0034449569412904413]23.225199671476435):[&rate=0.002769254549641353]4.927534890344255):[&rate=0.0023404246507783945]3.5634 [...]
+tree STATE_7440000 [&lnP=-22207.7131272928] = [&R] ((33:[&rate=0.0020835459414838492]99.19027479456769,(14:[&rate=0.002593217605788746]86.60957214530065,((((((2:[&rate=0.002869050317637987]25.25966434678806,((1:[&rate=0.002677470041530861]7.697645269078762,6:[&rate=0.0025113026960397124]7.697645269078762):[&rate=0.002428442233694226]13.700818726945222,4:[&rate=0.0029145538538182187]21.398463996023985):[&rate=0.002806510420384516]3.8612003507640758):[&rate=0.0026602712492652687]3.60315252 [...]
+tree STATE_7450000 [&lnP=-22219.97297541087] = [&R] ((((((((((2:[&rate=0.0023093495663776926]31.966082041439694,((1:[&rate=0.002276753340434364]9.509886438974739,6:[&rate=0.0024348430469324513]9.509886438974739):[&rate=0.00239552195111621]15.487844548679746,4:[&rate=0.0034582512088486375]24.997730987654485):[&rate=0.0025352525970444586]6.968351053785209):[&rate=0.002510350777519435]3.7600300512205003,(18:[&rate=0.002324733443738025]29.415502440288684,(26:[&rate=0.0029365315824598903]15.4 [...]
+tree STATE_7460000 [&lnP=-22232.10532714327] = [&R] (((14:[&rate=0.003048665645184235]82.48642257652001,((((((8:[&rate=0.003107600468380778]18.739366255939707,10:[&rate=0.0031397103206055317]18.739366255939707):[&rate=0.0031172665140489653]14.08077704875604,(((13:[&rate=0.003012600066661328]18.35993491857896,7:[&rate=0.002912172862296933]18.35993491857896):[&rate=0.0030631288060536856]3.4725215581501274,(11:[&rate=0.0028311335588817777]5.76653086342763,12:[&rate=0.0029889954870277635]5.7 [...]
+tree STATE_7470000 [&lnP=-22224.242597312503] = [&R] (((((((16:[&rate=0.0032679307588766035]32.863307637579865,((8:[&rate=0.0031169304073103937]22.317804815757505,10:[&rate=0.0032795212977774083]22.317804815757505):[&rate=0.0029240389469950105]9.34438186448316,(((13:[&rate=0.0028581599121638695]20.342336798633937,7:[&rate=0.0032210547583818847]20.342336798633937):[&rate=0.003146122679042892]3.412959904011302,(11:[&rate=0.0032381201940263537]5.548244665368888,12:[&rate=0.00311693040731039 [...]
+tree STATE_7480000 [&lnP=-22215.150188412095] = [&R] (((((((16:[&rate=0.0018928489944026598]42.340755332343115,(((8:[&rate=0.002238127163034039]24.15592234992975,10:[&rate=0.002345762764840777]24.15592234992975):[&rate=0.002832168005011639]13.348632482810007,(3:[&rate=0.0026998440069809304]15.051322481281902,5:[&rate=0.0020206894571088885]15.051322481281902):[&rate=0.0019384559471376035]22.453232351457856):[&rate=0.002132024254846888]3.94075170971319,((13:[&rate=0.0017147649617613057]29. [...]
+tree STATE_7490000 [&lnP=-22204.91332498116] = [&R] ((14:[&rate=0.002172622230900575]111.04408851514985,(((((((8:[&rate=0.003301118327102116]20.952346110157762,10:[&rate=0.0027722848412210163]20.952346110157762):[&rate=0.0033832774695377573]12.381433368908091,((3:[&rate=0.003558879263531134]12.508286911652414,5:[&rate=0.002805972489241611]12.508286911652414):[&rate=0.0027389507108859686]17.16716244894652,((13:[&rate=0.0027223514566538896]22.219816846821466,7:[&rate=0.0034896045282530016] [...]
+tree STATE_7500000 [&lnP=-22219.77601498009] = [&R] ((33:[&rate=0.0021306666600436016]119.11941600908867,(14:[&rate=0.0021306666600436016]105.36274806448151,((((16:[&rate=0.003151746926740819]35.103966193631535,(((3:[&rate=0.0029372575499946544]13.107214289620215,5:[&rate=0.002807399487407641]13.107214289620215):[&rate=0.0027129388951729685]15.857815361246358,((13:[&rate=0.0023286269563443057]20.97505174183073,(11:[&rate=0.0022931778407254263]6.237671142100744,12:[&rate=0.003495205745442 [...]
+tree STATE_7510000 [&lnP=-22206.99275420649] = [&R] ((33:[&rate=0.0025276260804911344]95.91454482534886,(14:[&rate=0.0035717181384028324]85.14073602958673,((((16:[&rate=0.002576665302828897]39.64193195995724,(((3:[&rate=0.0030317104984391293]13.932778676224636,5:[&rate=0.0024490226638248006]13.932778676224636):[&rate=0.002781422673044964]19.117979735671618,((13:[&rate=0.0025276260804911344]21.060071958429862,(11:[&rate=0.0024490226638248006]7.759227205836431,12:[&rate=0.00229347849345086 [...]
+tree STATE_7520000 [&lnP=-22211.828196176706] = [&R] ((33:[&rate=0.0023140781468872543]108.35186681144025,(((((((8:[&rate=0.0026487757169204587]25.467205791579225,10:[&rate=0.0023581922150541783]25.467205791579225):[&rate=0.0038775851295821326]11.204800078314172,(((13:[&rate=0.002236189087269485]20.127247916250518,(11:[&rate=0.002727477340722061]6.389074356208362,12:[&rate=0.0021293192671961217]6.389074356208362):[&rate=0.0029578732775443254]13.738173560042156):[&rate=0.00302379090542703 [...]
+tree STATE_7530000 [&lnP=-22220.067069402492] = [&R] ((33:[&rate=0.002077162877654481]118.34137621789363,(((((16:[&rate=0.0022787469815329377]42.35714205299157,((3:[&rate=0.002193153193014812]16.01912146225028,5:[&rate=0.0024138507531606575]16.01912146225028):[&rate=0.0023197930691999276]23.686512385085532,(((13:[&rate=0.0021031286889756505]24.708151711944076,(11:[&rate=0.002376911036989302]8.314662149386383,12:[&rate=0.0021031286889756505]8.314662149386383):[&rate=0.0025628460888226325] [...]
+tree STATE_7540000 [&lnP=-22218.902097475206] = [&R] ((33:[&rate=0.002878035577676536]95.34142491093345,(14:[&rate=0.002977638483196033]86.76242274927888,(((((((3:[&rate=0.002749412616396586]12.73014076080352,5:[&rate=0.003003888065668302]12.73014076080352):[&rate=0.0029082034114165413]13.898058789061961,((13:[&rate=0.002749412616396586]21.861412162807486,7:[&rate=0.0035878494716612115]21.861412162807486):[&rate=0.003494532691111202]1.5208613683839367,(11:[&rate=0.0031831259364958485]6.4 [...]
+tree STATE_7550000 [&lnP=-22209.651702318148] = [&R] ((33:[&rate=0.001959423875507928]130.35874416648903,(14:[&rate=0.0024242961088483854]108.31017123356152,(((((((24:[&rate=0.0027144537547098817]14.590626893162286,(((20:[&rate=0.0023259694883078705]3.902312928844629,21:[&rate=0.002270078395405996]3.902312928844629):[&rate=0.0027769570190441938]0.3841454163393143,19:[&rate=0.0024850652124597914]4.286458345183943):[&rate=0.002443371010354213]6.715907640309062,15:[&rate=0.00206195948156094 [...]
+tree STATE_7560000 [&lnP=-22223.402718631016] = [&R] ((14:[&rate=0.0020154476298889223]111.22241814971903,(33:[&rate=0.0026201295342913496]89.5916565807528,((((((((24:[&rate=0.002483586041364818]11.362924910584919,(((19:[&rate=0.002214181776035824]2.926509064034566,20:[&rate=0.0027536419309035273]2.926509064034566):[&rate=0.0019012696282674512]0.6581455361994015,21:[&rate=0.002326475371756115]3.5846546002339674):[&rate=0.002701155623263193]4.515694475181036,15:[&rate=0.002214181776035824 [...]
+tree STATE_7570000 [&lnP=-22207.51323974887] = [&R] ((((((((((3:[&rate=0.0024702012984293793]12.527525021488612,5:[&rate=0.002815953218711356]12.527525021488612):[&rate=0.002974417471094384]15.938725605326612,((13:[&rate=0.002039098460622052]22.667466523464363,(11:[&rate=0.004249055331357674]5.908884891893608,12:[&rate=0.00302787304152276]5.908884891893608):[&rate=0.0035248605199371414]16.758581631570756):[&rate=0.004536256133117716]1.3127827387304372,7:[&rate=0.0030549077755835444]23.98 [...]
+tree STATE_7580000 [&lnP=-22214.273591781453] = [&R] ((((((((((24:[&rate=0.003117792024323722]14.306613462730843,((21:[&rate=0.002196482474232013]2.9362579177122794,(19:[&rate=0.0025078536917000835]2.7700587614579617,20:[&rate=0.0022203526678020503]2.7700587614579617):[&rate=0.002667804627068532]0.1661991562543177):[&rate=0.0026984294861364592]5.544677997738464,15:[&rate=0.002894427745501762]8.480935915450743):[&rate=0.0025509951514795445]5.825677547280099):[&rate=0.002989725221793799]5. [...]
+tree STATE_7590000 [&lnP=-22221.533558610787] = [&R] (((((((16:[&rate=0.0027106974752696524]40.560024889009554,((8:[&rate=0.0025208434597865505]27.310843118839205,10:[&rate=0.0021740912936008593]27.310843118839205):[&rate=0.0031605326025718633]12.458818921846355,((3:[&rate=0.002343035777801809]14.709422380452787,5:[&rate=0.0023092347188288544]14.709422380452787):[&rate=0.0021740912936008593]21.801851028871603,((13:[&rate=0.0024308504134766856]22.735181373886036,7:[&rate=0.002783167974364 [...]
+tree STATE_7600000 [&lnP=-22214.26164619992] = [&R] (((((((((((13:[&rate=0.002018348480018084]25.016136834482424,7:[&rate=0.002364769859929804]25.016136834482424):[&rate=0.002332873485374723]5.278427983684736,(11:[&rate=0.0026203412137000654]7.380289193183126,12:[&rate=0.002375211784840785]7.380289193183126):[&rate=0.002106611460796327]22.914275624984036):[&rate=0.0031727387182195707]4.423671052379355,(3:[&rate=0.002478930338810527]16.58051285736198,5:[&rate=0.0021280386312441963]16.5805 [...]
+tree STATE_7610000 [&lnP=-22216.271749013977] = [&R] ((((((((((24:[&rate=0.0033778363441069265]13.612353771897906,(((20:[&rate=0.0031196088316897724]2.8021738615625917,21:[&rate=0.003041551512231831]2.8021738615625917):[&rate=0.0029708924297880863]0.7390602779642466,19:[&rate=0.0029227870551336898]3.5412341395268383):[&rate=0.0035079014361633696]6.374611713739617,15:[&rate=0.003168325907844926]9.915845853266456):[&rate=0.0031773144388929256]3.6965079186314505):[&rate=0.002904614754636732 [...]
+tree STATE_7620000 [&lnP=-22214.175826237417] = [&R] ((33:[&rate=0.002613263601604683]108.45745890767527,(((((((22:[&rate=0.002592784971378393]24.760396295543757,((((20:[&rate=0.0030093484219793722]3.2751842800839155,21:[&rate=0.0022032334542802763]3.2751842800839155):[&rate=0.002844666442790746]0.8613685092122356,19:[&rate=0.0019281344113094455]4.136552789296151):[&rate=0.00287958674166951]8.080976391411262,15:[&rate=0.002299869704971777]12.217529180707414):[&rate=0.002613263601604683]2 [...]
+tree STATE_7630000 [&lnP=-22212.379567257332] = [&R] ((33:[&rate=0.002003622979518558]115.98749168449562,(((((((23:[&rate=0.00251174181108055]10.76276754902702,17:[&rate=0.002880098584071573]10.76276754902702):[&rate=0.0026842642585343844]14.677250545247174,((((19:[&rate=0.002291409062048994]4.066235033459872,(20:[&rate=0.002147354049520922]1.8696715195982172,21:[&rate=0.0030023103745918127]1.8696715195982172):[&rate=0.0030918632282857937]2.196563513861655):[&rate=0.002738944412628073]7. [...]
+tree STATE_7640000 [&lnP=-22217.716983378185] = [&R] (((14:[&rate=0.0029723576363966716]81.22244748333672,(((((((23:[&rate=0.002836515017711167]8.577468984392434,17:[&rate=0.0030190154410664302]8.577468984392434):[&rate=0.0024970631445403618]13.093130920517078,(((19:[&rate=0.0024970631445403618]4.213067991675417,(20:[&rate=0.0025903196780268723]2.908662195090465,21:[&rate=0.0027063378687204455]2.908662195090465):[&rate=0.0029131414546096756]1.3044057965849518):[&rate=0.003154234546306479 [...]
+tree STATE_7650000 [&lnP=-22221.860708694] = [&R] ((33:[&rate=0.002082196235803963]118.94041215100634,(((((16:[&rate=0.0024629343753135953]42.079405563149095,((22:[&rate=0.0026077891702965808]26.495937996404287,(((((19:[&rate=0.002797612386457384]1.8354156709209781,20:[&rate=0.0023881100303093645]1.8354156709209781):[&rate=0.002681766651249725]1.083540194770869,21:[&rate=0.0022695321524836614]2.918955865691847):[&rate=0.002867271144015253]6.298980841056869,15:[&rate=0.0025486077120496996 [...]
+tree STATE_7660000 [&lnP=-22213.18268324726] = [&R] ((33:[&rate=0.0021704749609051323]110.0470303826878,(((((((((11:[&rate=0.0033075721892781356]6.356045427624737,12:[&rate=0.0027856774865592975]6.356045427624737):[&rate=0.0027154166598202935]20.568808940511637,(7:[&rate=0.0031731727150523456]22.313923982411122,13:[&rate=0.0023529161041215708]22.313923982411122):[&rate=0.0025930896702587754]4.610930385725251):[&rate=0.002858440059910476]6.844957579236571,(3:[&rate=0.0029561920839313817]1 [...]
+tree STATE_7670000 [&lnP=-22227.664739251297] = [&R] ((33:[&rate=0.0020194074855737613]129.62824907364094,(((((((8:[&rate=0.0025953941964539013]28.249775322376014,10:[&rate=0.0023499687665492733]28.249775322376014):[&rate=0.0028339654685027023]14.70661346177764,((3:[&rate=0.0029572760490152936]17.243697015459233,5:[&rate=0.002319338149082365]17.243697015459233):[&rate=0.0021472492378844573]23.368535650044926,((11:[&rate=0.0027081965478382864]7.644382482452466,12:[&rate=0.0018198337422941 [...]
+tree STATE_7680000 [&lnP=-22206.46338686964] = [&R] ((33:[&rate=0.0035194208030559135]69.85676086013724,(((((((8:[&rate=0.004205560819293563]16.03524679766936,10:[&rate=0.00406678259677471]16.03524679766936):[&rate=0.0033100266021239776]7.706292682931533,((3:[&rate=0.0036242719571734023]10.042028479334578,5:[&rate=0.003853662427034745]10.042028479334578):[&rate=0.003763607967616321]12.61240521737741,(7:[&rate=0.003355739298251053]18.808494041595353,((11:[&rate=0.003750550760146646]5.0558 [...]
+tree STATE_7690000 [&lnP=-22214.314290844366] = [&R] ((33:[&rate=0.0017784940939424467]121.08250789927554,(14:[&rate=0.002878978212116233]89.39747991206443,((((((((15:[&rate=0.0017173141264839387]11.370284631736915,(19:[&rate=0.0020161388677173626]3.8105255395442463,(21:[&rate=0.002130438645828852]2.737293943260949,20:[&rate=0.002335199072670181]2.737293943260949):[&rate=0.001671389532640927]1.0732315962832972):[&rate=0.002695705069465948]7.559759092192669):[&rate=0.002048342941903774]3. [...]
+tree STATE_7700000 [&lnP=-22222.878477866732] = [&R] ((((((((((24:[&rate=0.0023951226315121004]13.027830677174947,(15:[&rate=0.0023795500736740445]10.16238469833132,(19:[&rate=0.0023701356609279987]5.110260632266421,(21:[&rate=0.0024056705464768884]4.439169931227748,20:[&rate=0.0023826872550049756]4.439169931227748):[&rate=0.0023615476974585456]0.6710907010386729):[&rate=0.002360656445140427]5.0521240660648985):[&rate=0.002390116343998722]2.8654459788436277):[&rate=0.002373717690616986]1 [...]
+tree STATE_7710000 [&lnP=-22224.790091283252] = [&R] ((33:[&rate=0.002137113816830849]120.31152390974157,((((((((24:[&rate=0.002209323899645909]13.506524691873432,(15:[&rate=0.002149448800686123]12.266377225361012,(19:[&rate=0.0020856218964450182]4.115435555214493,(21:[&rate=0.0021616310582507734]3.552510996859431,20:[&rate=0.002366107223397497]3.552510996859431):[&rate=0.0029625390721990043]0.5629245583550619):[&rate=0.00201296731750397]8.15094167014652):[&rate=0.0019966029376909653]1.2 [...]
+tree STATE_7720000 [&lnP=-22222.37890259316] = [&R] ((14:[&rate=0.0017546943901813805]145.16965032048637,(33:[&rate=0.0026752345614887343]104.67477279420285,(((((((24:[&rate=0.0028293405906553108]16.334909150938312,(15:[&rate=0.0023374954259850183]8.901592644971352,(19:[&rate=0.0019175407401054887]4.000742052103588,(21:[&rate=0.0026404105084294038]2.2751148910677674,20:[&rate=0.0027550524747348305]2.2751148910677674):[&rate=0.002427268450497456]1.7256271610358205):[&rate=0.00253388314603 [...]
+tree STATE_7730000 [&lnP=-22224.613178413292] = [&R] ((33:[&rate=0.0021187929531137468]123.17733472581371,((((((((24:[&rate=0.0023737742036009256]16.165975762769207,(15:[&rate=0.0024035721488092267]10.953552520531499,(19:[&rate=0.0021799629304502656]4.609342926061098,(20:[&rate=0.002297947893238299]3.3657656135104315,21:[&rate=0.0024226956741775214]3.3657656135104315):[&rate=0.0022286473950353975]1.2435773125506664):[&rate=0.002297947893238299]6.344209594470401):[&rate=0.0018873727223564 [...]
+tree STATE_7740000 [&lnP=-22218.334580829724] = [&R] ((33:[&rate=0.001990670669204841]137.95787351368537,(((((((23:[&rate=0.003138777185485356]9.173459296998205,17:[&rate=0.0029326536112558397]9.173459296998205):[&rate=0.001526277141808981]16.645156753318382,(22:[&rate=0.002327226678571047]24.523608786211188,(((19:[&rate=0.0025853560117428606]6.341256435413021,(20:[&rate=0.0024137065200899293]4.901511725635776,21:[&rate=0.0025853560117428606]4.901511725635776):[&rate=0.002023644540110859 [...]
+tree STATE_7750000 [&lnP=-22218.237036363174] = [&R] ((14:[&rate=0.002476250084458227]105.9063318266201,(((((((23:[&rate=0.0023214455275580356]14.328790970730989,17:[&rate=0.0022627826065612423]14.328790970730989):[&rate=0.002071514032997357]11.873424294817823,(22:[&rate=0.0024649209662212334]22.102327859478915,(((21:[&rate=0.002442582473842332]5.834803221231156,(20:[&rate=0.002398679027447433]4.092224696997418,19:[&rate=0.002737055128695327]4.092224696997418):[&rate=0.002310072799520260 [...]
+tree STATE_7760000 [&lnP=-22206.368026525444] = [&R] ((33:[&rate=0.002266533537160578]103.47134186775571,((((((((((19:[&rate=0.0024705664013498466]4.274951210105902,(20:[&rate=0.003252309772174893]3.21285682097527,21:[&rate=0.0029946148470735958]3.21285682097527):[&rate=0.002323356504953576]1.0620943891306323):[&rate=0.0034556166168527133]4.936784804903356,15:[&rate=0.0030716482537491694]9.211736015009258):[&rate=0.0030716482537491694]3.770529576781099,24:[&rate=0.0029946148470735958]12. [...]
+tree STATE_7770000 [&lnP=-22218.565990827617] = [&R] ((14:[&rate=0.002444358714110933]101.53629011654215,((((((8:[&rate=0.003011945068349508]21.71831354739117,10:[&rate=0.0023478127492421877]21.71831354739117):[&rate=0.0025241278229945095]18.676841103911343,((3:[&rate=0.002596276983479227]14.833756726462658,5:[&rate=0.0029167596455195784]14.833756726462658):[&rate=0.002706452519284263]19.399402810906608,((11:[&rate=0.002762288022164763]6.717321155290622,12:[&rate=0.0023478127492421877]6. [...]
+tree STATE_7780000 [&lnP=-22223.91444083209] = [&R] ((33:[&rate=0.002168851309442199]103.52246445614757,(14:[&rate=0.0031438145070703818]81.82839439330014,((((((22:[&rate=0.002562881056681564]21.327306842319146,((15:[&rate=0.002029813036090194]11.687758237858107,((20:[&rate=0.002136680936232953]2.2937276298616163,19:[&rate=0.0027660108858414984]2.2937276298616163):[&rate=0.002029813036090194]0.9347386507986819,21:[&rate=0.0019882663758947873]3.228466280660298):[&rate=0.001965649220122011 [...]
+tree STATE_7790000 [&lnP=-22219.086148507107] = [&R] ((33:[&rate=0.0026026085835181506]103.306179614688,((((16:[&rate=0.00259082585436764]39.946140970678755,((((((15:[&rate=0.002519965848506916]7.5833403679996145,((20:[&rate=0.00259082585436764]2.566931832999813,21:[&rate=0.0024308825883041668]2.566931832999813):[&rate=0.0026210690904583385]0.603088737112544,19:[&rate=0.0024669412878469254]3.170020570112357):[&rate=0.002573668799010872]4.413319797887258):[&rate=0.002767055134768652]2.332 [...]
+tree STATE_7800000 [&lnP=-22211.575925383237] = [&R] ((33:[&rate=0.002906101456062139]100.11565374635097,(14:[&rate=0.002699709778198528]78.85178287958648,((((((22:[&rate=0.002699709778198528]20.97300502358778,((15:[&rate=0.004103770273985885]8.358429063838626,(19:[&rate=0.00248609107587806]3.622762636978611,(20:[&rate=0.0031694064026866115]2.8759806876080685,21:[&rate=0.002994477250107811]2.8759806876080685):[&rate=0.0028428677303709345]0.7467819493705425):[&rate=0.0034409811845561894]4 [...]
+tree STATE_7810000 [&lnP=-22220.119450918344] = [&R] (((((((((23:[&rate=0.002692447323598491]11.721081713378398,17:[&rate=0.002395576684746921]11.721081713378398):[&rate=0.002107512131059549]17.385820318570005,((24:[&rate=0.0026785223812962815]13.751102065017058,(15:[&rate=0.002692447323598491]10.936486130027767,((21:[&rate=0.0031141743807460115]2.729051119797,20:[&rate=0.0027219366305905467]2.729051119797):[&rate=0.002651991866267674]1.6041989796992668,19:[&rate=0.0025139218867589177]4. [...]
+tree STATE_7820000 [&lnP=-22209.676938139826] = [&R] ((33:[&rate=0.0017760116313171204]120.8358970673566,(14:[&rate=0.0023639345494977848]95.88525852089302,((((16:[&rate=0.0022993056352246153]42.64245067122005,((8:[&rate=0.0029979880455445196]22.926045365225026,10:[&rate=0.002714460706244839]22.926045365225026):[&rate=0.002501133448969246]18.496630773721453,((3:[&rate=0.0024305892745990314]15.647581729175053,5:[&rate=0.002380341819949544]15.647581729175053):[&rate=0.0021695057835153554]2 [...]
+tree STATE_7830000 [&lnP=-22209.62348446677] = [&R] ((33:[&rate=0.003022478742068957]80.78792882901274,(14:[&rate=0.0032701303809637644]75.28246256671731,((((((((24:[&rate=0.004551718312459372]9.543094174297277,15:[&rate=0.002939494135602786]9.543094174297277):[&rate=0.002939494135602786]0.29651850624762943,((20:[&rate=0.004223226677255636]2.856872127765582,21:[&rate=0.0025424515357122712]2.856872127765582):[&rate=0.003899172522288164]1.2189095458236014,19:[&rate=0.0025424515357122712]4. [...]
+tree STATE_7840000 [&lnP=-22218.77558833176] = [&R] ((((((16:[&rate=0.0024626108505383776]43.783802217668935,((((3:[&rate=0.00290423249952314]15.764261439395773,5:[&rate=0.0024096378827775803]15.764261439395773):[&rate=0.0024278347895026154]19.037020737698192,((13:[&rate=0.002190765918664999]25.94797822779256,(11:[&rate=0.0027235329820231354]7.8193938375115675,12:[&rate=0.002281060970026348]7.8193938375115675):[&rate=0.002922924823300666]18.12858439028099):[&rate=0.0024957096850429174]4. [...]
+tree STATE_7850000 [&lnP=-22216.192503069964] = [&R] (((14:[&rate=0.003805687271846242]73.05600359822519,(((((8:[&rate=0.0037445762640289162]17.968827748377596,10:[&rate=0.0033941224136943386]17.968827748377596):[&rate=0.0033730020619754564]12.563760829957456,((3:[&rate=0.0026707365264296437]13.87958523208118,5:[&rate=0.0026707365264296437]13.87958523208118):[&rate=0.0032256207367173865]15.105103978644259,((13:[&rate=0.0025347144903250496]20.829019649900843,(11:[&rate=0.00322562073671738 [...]
+tree STATE_7860000 [&lnP=-22217.651379938634] = [&R] (((14:[&rate=0.00324564825778409]76.90120872410502,(((((22:[&rate=0.0022443024734913157]23.317253495757598,(((((20:[&rate=0.0032907751771620143]2.3996949896823745,21:[&rate=0.0030988155277752202]2.3996949896823745):[&rate=0.0024354617687824976]1.3363990481152763,19:[&rate=0.002473703646425091]3.736094037797651):[&rate=0.003038431244457017]3.5467654049271378,15:[&rate=0.0031668077021346952]7.282859442724789):[&rate=0.0033411199011386776 [...]
+tree STATE_7870000 [&lnP=-22225.326304103175] = [&R] (((((((((3:[&rate=0.0029405694962588926]12.367003565644705,5:[&rate=0.002964701782905044]12.367003565644705):[&rate=0.0030623690478464314]17.52727394223809,((13:[&rate=0.0026999567918447468]20.6754976762247,(11:[&rate=0.0024955120473164017]5.754052195056121,12:[&rate=0.0028108262692765956]5.754052195056121):[&rate=0.002586269810816116]14.921445481168579):[&rate=0.002648107606557988]5.192619034357797,7:[&rate=0.002898328944173872]25.868 [...]
+tree STATE_7880000 [&lnP=-22221.641334927815] = [&R] (((((((((8:[&rate=0.002035369649380874]29.638754879257988,10:[&rate=0.00218446546173404]29.638754879257988):[&rate=0.001882165422714761]17.68824003328084,((3:[&rate=0.002329565614509492]18.84804643432578,5:[&rate=0.0019523039656159041]18.84804643432578):[&rate=0.0019270765840061882]21.75246326085383,((13:[&rate=0.001913261112610651]26.622542103202882,7:[&rate=0.0022307144697070493]26.622542103202882):[&rate=0.0019859528171976555]6.1352 [...]
+tree STATE_7890000 [&lnP=-22229.55489573211] = [&R] (9:[&rate=0.0022806296859950027]176.48430248565435,(33:[&rate=0.001980306441514907]123.49077356726303,((((((((23:[&rate=0.002418259507204689]10.580844355896867,17:[&rate=0.002351600920056223]10.580844355896867):[&rate=0.0021431470579791295]15.321555320564189,(24:[&rate=0.002085553884798224]15.620135235841266,((19:[&rate=0.0020622359493738763]6.145125433641756,(20:[&rate=0.0023876927067913787]4.591387648417383,21:[&rate=0.002191596181613 [...]
+tree STATE_7900000 [&lnP=-22215.916361547464] = [&R] (9:[&rate=0.0018352465619441657]192.42484102583927,(33:[&rate=0.0015508569682140794]138.618385729979,((((16:[&rate=0.001899024514442008]51.24341891943328,((((3:[&rate=0.0021796046385531044]21.074550464552374,5:[&rate=0.0018352465619441657]21.074550464552374):[&rate=0.0021132101833872615]22.401847250308847,((13:[&rate=0.0017582468729679028]27.89791790645197,7:[&rate=0.0025251815433604253]27.89791790645197):[&rate=0.002627658641906123]5. [...]
+tree STATE_7910000 [&lnP=-22215.97580027009] = [&R] (9:[&rate=0.0020889849405267575]134.99693202736066,((((((((22:[&rate=0.0026959650589184272]20.955232038156062,((15:[&rate=0.0023668622032097086]10.139484191989975,((20:[&rate=0.002152298661275284]2.506268484447153,19:[&rate=0.00197346358446116]2.506268484447153):[&rate=0.002554328952552874]0.10696066926019965,21:[&rate=0.002345039667571225]2.6132291537073526):[&rate=0.002890044484039655]7.526255038282622):[&rate=0.0025089195461506877]3. [...]
+tree STATE_7920000 [&lnP=-22218.282946988635] = [&R] (9:[&rate=0.0025736719207209617]122.39114060293763,((((((((22:[&rate=0.003549199269975477]20.53890971872517,((15:[&rate=0.0034279687038195326]9.629520230626753,((21:[&rate=0.0032350450468446998]2.308407507724669,20:[&rate=0.003934643166652662]2.308407507724669):[&rate=0.003517814929412059]1.1556683775082668,19:[&rate=0.0036146608602410897]3.4640758852329356):[&rate=0.0030780899242700234]6.1654443453938175):[&rate=0.0024410326407691596] [...]
+tree STATE_7930000 [&lnP=-22237.883907418167] = [&R] (9:[&rate=0.0022610596023957307]173.9394374474497,((33:[&rate=0.0021796881889744]122.1051067957952,(((((((23:[&rate=0.001989294762046987]10.061660573067229,17:[&rate=0.002329584634589558]10.061660573067229):[&rate=0.002638600172296681]13.265298991247594,22:[&rate=0.002167547761751755]23.326959564314823):[&rate=0.00255967883857823]1.2765711823871904,((15:[&rate=0.0023411949669490603]10.371757497869702,((20:[&rate=0.001888281584694358]3. [...]
+tree STATE_7940000 [&lnP=-22214.665394625426] = [&R] (9:[&rate=0.002349577417254295]141.1165105910806,(((((((((2:[&rate=0.002349577417254295]29.075535181727208,((1:[&rate=0.002490670714882167]8.727433085014708,6:[&rate=0.0023072110866965657]8.727433085014708):[&rate=0.002523314246074039]16.693182556578073,4:[&rate=0.0028016558391424796]25.420615641592782):[&rate=0.002738515414394177]3.6549195401344257):[&rate=0.0025341823685264913]4.168888578241589,(18:[&rate=0.002738515414394177]27.8793 [...]
+tree STATE_7950000 [&lnP=-22216.393601535798] = [&R] (9:[&rate=0.002236480549336519]194.88314834532164,((((((16:[&rate=0.001971259885727299]45.72767246992698,((8:[&rate=0.0026025352888804044]25.928181467407292,10:[&rate=0.002057366981621993]25.928181467407292):[&rate=0.002628554002104046]16.03655412742495,((3:[&rate=0.0033088506956746545]11.159621104732418,5:[&rate=0.002483320300602846]11.159621104732418):[&rate=0.0019483979213752766]28.53525373269911,((13:[&rate=0.001900336902908945]26. [...]
+tree STATE_7960000 [&lnP=-22210.29906695325] = [&R] (9:[&rate=0.0026658905165636163]126.34178912641127,(33:[&rate=0.0032861171805895653]80.24125409015411,(14:[&rate=0.003672324654136989]71.30294059909619,((((16:[&rate=0.0027254205295947705]33.88552022273471,(((3:[&rate=0.0025810598493387596]13.481709064324363,5:[&rate=0.0023658731586146077]13.481709064324363):[&rate=0.0025810598493387596]15.977928715248584,((13:[&rate=0.002624472802341795]22.617504813607876,7:[&rate=0.0033141217893195916 [...]
+tree STATE_7970000 [&lnP=-22215.46490002776] = [&R] (9:[&rate=0.003506545300088592]109.97143290642113,((14:[&rate=0.0029172805311629764]80.62230463759866,((((((2:[&rate=0.00284691282979013]21.613220080608816,((1:[&rate=0.00364794730155905]5.219628170689283,6:[&rate=0.0026477329147300294]5.219628170689283):[&rate=0.002822109360093975]11.722653019619251,4:[&rate=0.0038734393701175592]16.942281190308535):[&rate=0.00284691282979013]4.670938890300281):[&rate=0.0032540915088983147]3.7434052735 [...]
+tree STATE_7980000 [&lnP=-22225.14606951373] = [&R] (9:[&rate=0.00216869214463453]169.28504447525532,(33:[&rate=0.0023209845001376647]127.64222289139349,(((((((2:[&rate=0.0021155937005650056]35.653942072375806,((1:[&rate=0.0021155937005650056]10.398334829847384,6:[&rate=0.002232191670081285]10.398334829847384):[&rate=0.002166049994318524]20.527115604969197,4:[&rate=0.0021965370492983754]30.925450434816582):[&rate=0.002253636446638111]4.728491637559223):[&rate=0.0022579304368922897]4.0985 [...]
+tree STATE_7990000 [&lnP=-22204.75986356166] = [&R] (9:[&rate=0.003527349069683758]124.95781190864659,((14:[&rate=0.0028789672098187835]84.08517365795107,((((((2:[&rate=0.0031601599857833274]23.86666967886856,((1:[&rate=0.003342909707257731]7.063405508808532,6:[&rate=0.0027077002390886434]7.063405508808532):[&rate=0.003608706026762034]11.422130854364752,4:[&rate=0.003946231156721792]18.485536363173285):[&rate=0.0033696064772598504]5.381133315695276):[&rate=0.002546485107328007]4.87834901 [...]
+tree STATE_8000000 [&lnP=-22223.688342809364] = [&R] (9:[&rate=0.002495010991766998]132.95721431214642,((14:[&rate=0.0029186735502755036]88.01648082864294,(((((((2:[&rate=0.0031697464108709546]26.542595234518785,((1:[&rate=0.003324552818459812]6.237694045972108,6:[&rate=0.0025367045463547313]6.237694045972108):[&rate=0.003117648110656674]14.188203165384682,4:[&rate=0.003402432472693037]20.42589721135679):[&rate=0.00328983350228143]6.1166980231619945):[&rate=0.0031697464108709546]3.550463 [...]
+tree STATE_8010000 [&lnP=-22213.084974339086] = [&R] (9:[&rate=0.0025130504053749217]161.25269557720193,(33:[&rate=0.0021542672995838105]107.87766249856625,(14:[&rate=0.002465319482669808]87.73786672419808,(((((((8:[&rate=0.0029955529235232803]18.178688389648055,10:[&rate=0.0030725589019327776]18.178688389648055):[&rate=0.002371200803796041]16.154821900480858,(3:[&rate=0.00344603363320884]11.421298210610242,5:[&rate=0.002662658311271623]11.421298210610242):[&rate=0.0022768554031208543]22 [...]
+tree STATE_8020000 [&lnP=-22215.619239462827] = [&R] (9:[&rate=0.002955243401081507]125.26226788625452,(33:[&rate=0.002921714775868613]92.85571479586844,(14:[&rate=0.0029904019684476105]86.39895873312238,((((((2:[&rate=0.002716920552114851]23.95038325104779,((1:[&rate=0.0030847176454668213]6.926654780059061,6:[&rate=0.002730114300055199]6.926654780059061):[&rate=0.003133872344626958]13.40950333179413,4:[&rate=0.0030460931605045564]20.33615811185319):[&rate=0.0028680398389895854]3.6142251 [...]
+tree STATE_8030000 [&lnP=-22232.745032628063] = [&R] (9:[&rate=0.0018722836633175146]181.0298320275167,((14:[&rate=0.002308236905490067]112.71791353147975,((((((8:[&rate=0.0025348540595162092]25.391499533164186,10:[&rate=0.002293817955677917]25.391499533164186):[&rate=0.0018722836633175146]24.5935210126449,((3:[&rate=0.001976954176874214]16.112161340444903,5:[&rate=0.0023382965293155416]16.112161340444903):[&rate=0.0018722836633175146]29.433372824289886,((13:[&rate=0.0017698546176667265] [...]
+tree STATE_8040000 [&lnP=-22219.134059134885] = [&R] (9:[&rate=0.0017566864143932811]189.19739019221132,((14:[&rate=0.0030923576704942877]78.41914421862008,((((((8:[&rate=0.003251917645483865]25.660899819720175,10:[&rate=0.0019424291989870634]25.660899819720175):[&rate=0.002697441663032059]14.248678108664574,((3:[&rate=0.002491361420898043]16.9243805899722,5:[&rate=0.0023845068653562396]16.9243805899722):[&rate=0.002320032811787616]18.551369962293354,((13:[&rate=0.002093569778478115]23.2 [...]
+tree STATE_8050000 [&lnP=-22201.65170868374] = [&R] (9:[&rate=0.0037618784476435366]100.43170825805426,((14:[&rate=0.003604094364821055]68.39573859924683,((((((2:[&rate=0.003457481518632798]21.07748838421256,((1:[&rate=0.003101912951310414]7.430594585334944,6:[&rate=0.0027935510957256336]7.430594585334944):[&rate=0.00366398357195011]10.493447632497436,4:[&rate=0.0037915799727869467]17.92404221783238):[&rate=0.003101912951310414]3.1534461663801814):[&rate=0.003487317728059543]4.7003540526 [...]
+tree STATE_8060000 [&lnP=-22213.411700795637] = [&R] (9:[&rate=0.0023319377293093925]148.12236228362693,((((((((2:[&rate=0.0023484879512822394]29.089588807105947,((1:[&rate=0.002830924192764411]8.667346013531287,6:[&rate=0.0021852202476739446]8.667346013531287):[&rate=0.0025709574891760404]15.87804951490811,4:[&rate=0.0026722726617259087]24.545395528439396):[&rate=0.003062069444938607]4.544193278666551):[&rate=0.0020640215244168755]3.962767907827377,(18:[&rate=0.0025709574891760404]28.29 [...]
+tree STATE_8070000 [&lnP=-22231.749054822747] = [&R] (9:[&rate=0.002077405897933251]154.6461988267842,((14:[&rate=0.0022987071140761124]98.46073765098453,33:[&rate=0.0025881077340692573]98.46073765098453):[&rate=0.0019563233910292605]18.71986362255346,(((((((2:[&rate=0.002077405897933251]33.10165421049064,((1:[&rate=0.0022064817355284955]11.468621362055062,6:[&rate=0.001969983146281598]11.468621362055062):[&rate=0.002032167310844511]14.482764068205084,4:[&rate=0.002665695120306369]25.951 [...]
+tree STATE_8080000 [&lnP=-22213.575505995985] = [&R] (9:[&rate=0.00208891868212035]164.85647110948403,(33:[&rate=0.0023400320680744603]129.06049066910586,(((((((2:[&rate=0.002282866667728384]28.27728173994551,((1:[&rate=0.0026024624391287124]7.900776938293709,6:[&rate=0.0022711385286765633]7.900776938293709):[&rate=0.0023173756393925103]17.061302952604546,4:[&rate=0.0032054263914590765]24.962079890898256):[&rate=0.002259257352130931]3.3152018490472557):[&rate=0.002635459232113754]4.87299 [...]
+tree STATE_8090000 [&lnP=-22214.71913579334] = [&R] (9:[&rate=0.003211972547356649]121.59958142561857,((14:[&rate=0.003586967977397657]79.81136506552285,((((((2:[&rate=0.0038221312799158473]20.662690250694133,((1:[&rate=0.003667546733129573]5.148623511630273,6:[&rate=0.0035223652718220856]5.148623511630273):[&rate=0.004041439473631622]10.526264253855183,4:[&rate=0.00456215938839565]15.674887765485456):[&rate=0.004069593616404847]4.987802485208677):[&rate=0.0035059758321704074]3.815381289 [...]
+tree STATE_8100000 [&lnP=-22221.542110141163] = [&R] (9:[&rate=0.002924880427677677]148.2176444501858,((((((16:[&rate=0.002699855396320984]38.31343429527811,((8:[&rate=0.002849998016651599]24.898753713469656,10:[&rate=0.002513445503447732]24.898753713469656):[&rate=0.002641851543354845]12.327101411146149,((3:[&rate=0.0030423070839999547]14.680859111919144,5:[&rate=0.0022905467341554657]14.680859111919144):[&rate=0.0027823748897579763]18.94631778789088,((13:[&rate=0.002133113179032052]21. [...]
+tree STATE_8110000 [&lnP=-22206.147084796037] = [&R] (9:[&rate=0.0030235396813965733]140.42398268840535,(((((((((13:[&rate=0.0023926670328844528]21.52214508627108,(11:[&rate=0.003464801393940821]5.414346031570057,12:[&rate=0.003464801393940821]5.414346031570057):[&rate=0.0033959503912016553]16.107799054701026):[&rate=0.002231787847660174]5.401835750948965,7:[&rate=0.00254035946659746]26.923980837220046):[&rate=0.0026879011144216427]7.696911763730416,((3:[&rate=0.0030700998487685196]12.82 [...]
+tree STATE_8120000 [&lnP=-22215.5155352653] = [&R] (9:[&rate=0.0027218594048931243]125.25198652911943,(((((((((3:[&rate=0.003491331644349763]11.267548226738814,5:[&rate=0.002986945391042753]11.267548226738814):[&rate=0.003387229705300092]15.372378755772377,((13:[&rate=0.0029733523178737354]19.028505471318763,7:[&rate=0.0033359023514110413]19.028505471318763):[&rate=0.003425338238390316]5.073071155961141,(11:[&rate=0.003425338238390316]6.176743721366849,12:[&rate=0.003166491226266016]6.17 [...]
+tree STATE_8130000 [&lnP=-22213.267378659002] = [&R] (9:[&rate=0.002354375170100013]130.1822807322263,(((((((((3:[&rate=0.0032796434186518233]15.043963495633673,5:[&rate=0.002296717094412838]15.043963495633673):[&rate=0.0025030854679334615]18.00180745417516,((13:[&rate=0.0028413105959128036]21.24673333678637,7:[&rate=0.003227623484120038]21.24673333678637):[&rate=0.002073954583952072]7.8839852068121665,(11:[&rate=0.0031829496846757874]6.619701127899061,12:[&rate=0.002735921376642117]6.61 [...]
+tree STATE_8140000 [&lnP=-22221.394682633036] = [&R] (9:[&rate=0.0023761903125290177]167.50088984211786,((14:[&rate=0.002370439068437722]106.50238290164202,(((((((3:[&rate=0.00240664626791528]15.687279318582851,5:[&rate=0.0024463850709798966]15.687279318582851):[&rate=0.002415842175033563]19.88243255015376,((13:[&rate=0.002457534361077981]26.235848612302608,7:[&rate=0.00248948223203574]26.235848612302608):[&rate=0.002417969436884634]4.314612216346944,(11:[&rate=0.002363214113434736]8.197 [...]
+tree STATE_8150000 [&lnP=-22217.33609444146] = [&R] (9:[&rate=0.00245321132313613]160.72175280573413,((14:[&rate=0.0022168347945932254]116.97418447306516,(((((((3:[&rate=0.0021499436413271156]17.525775332298036,5:[&rate=0.0021720018804515973]17.525775332298036):[&rate=0.0023271615365644026]20.063797340046452,((13:[&rate=0.0018230598844819988]28.502113177513507,7:[&rate=0.00245321132313613]28.502113177513507):[&rate=0.0022881885411670097]3.657548395879836,(11:[&rate=0.0022757849535000505] [...]
+tree STATE_8160000 [&lnP=-22213.650318863554] = [&R] (9:[&rate=0.0026468986828638227]133.0943199872658,(((((((((2:[&rate=0.00305725927816715]23.222188320981868,((1:[&rate=0.002684035430199699]5.72000190140067,6:[&rate=0.002837775944558473]5.72000190140067):[&rate=0.00305725927816715]13.145883891947875,4:[&rate=0.00393362247004683]18.865885793348546):[&rate=0.0030866205550184617]4.356302527633321):[&rate=0.0028653920560699946]3.186789331287443,(18:[&rate=0.0028790344586253178]22.127884857 [...]
+tree STATE_8170000 [&lnP=-22224.996521321067] = [&R] (9:[&rate=0.0033369262373696322]81.6145714844724,((14:[&rate=0.004331297077750594]53.6841404787203,((((16:[&rate=0.003631508309102336]26.350800164197214,((8:[&rate=0.003809360180072289]17.182635483626,10:[&rate=0.004004961423176715]17.182635483626):[&rate=0.003760678824158189]8.521968726172531,((3:[&rate=0.0037445986059164073]8.522238704495637,5:[&rate=0.004331297077750594]8.522238704495637):[&rate=0.0034719958481408885]12.908453827673 [...]
+tree STATE_8180000 [&lnP=-22234.700481856707] = [&R] (9:[&rate=0.0023425461028944525]140.23565591825962,((14:[&rate=0.002343378240245701]91.3906575036753,33:[&rate=0.002342447701860443]91.3906575036753):[&rate=0.0023448594030176604]8.486156327525393,((((((2:[&rate=0.002342398711102682]30.690204801281823,((1:[&rate=0.002341844917113968]11.183627537676117,6:[&rate=0.002343520010626149]11.183627537676117):[&rate=0.0023414368171928344]15.027124675291827,4:[&rate=0.0023398917006405098]26.2107 [...]
+tree STATE_8190000 [&lnP=-22214.022332492525] = [&R] (9:[&rate=0.003838597026153098]101.57487417653743,(33:[&rate=0.0028059728587225634]95.94325168235846,(14:[&rate=0.0032888977281236784]79.20791403351339,(((((16:[&rate=0.003041672894247161]29.987123832464434,(8:[&rate=0.0030264037274873225]17.5910123613767,10:[&rate=0.0029424304625691603]17.5910123613767):[&rate=0.0030855516392418614]12.396111471087735):[&rate=0.002833818384306694]2.030679136909008,((3:[&rate=0.0029778751649413918]11.36 [...]
+tree STATE_8200000 [&lnP=-22212.083751289127] = [&R] (9:[&rate=0.00255508287327138]132.73429631820179,((33:[&rate=0.0029797491820760112]79.85984299049989,((((((8:[&rate=0.0028225543775964044]19.224102767191027,10:[&rate=0.0027623417188355486]19.224102767191027):[&rate=0.00377900057830296]10.631402669106532,((3:[&rate=0.0034604571768916187]12.348189875184952,5:[&rate=0.0031504671141171607]12.348189875184952):[&rate=0.00331189958473556]16.501902476545155,((13:[&rate=0.00238106657779688]20. [...]
+tree STATE_8210000 [&lnP=-22210.841104215193] = [&R] (9:[&rate=0.003304979537043869]106.83995069851966,((14:[&rate=0.0033711874046782734]76.93258205627698,(((((((2:[&rate=0.003282243683587655]21.187436322410733,((1:[&rate=0.003241954511028687]6.6035853026177636,6:[&rate=0.0036660979957782044]6.6035853026177636):[&rate=0.003506467634953255]11.65710607384781,4:[&rate=0.0036401000681595404]18.260691376465573):[&rate=0.003334491882637041]2.9267449459451598):[&rate=0.003327161417592729]4.6307 [...]
+tree STATE_8220000 [&lnP=-22192.670334705024] = [&R] (9:[&rate=0.0036493671306705314]89.77541959039546,((14:[&rate=0.004506333968891598]56.25364004726752,((((16:[&rate=0.003202229231359581]23.691073198522627,((8:[&rate=0.004671576732237116]13.61258384282375,10:[&rate=0.003380117494430575]13.61258384282375):[&rate=0.004342332479195098]9.548541872435173,((3:[&rate=0.004313005022511907]8.439031096047133,5:[&rate=0.0038898554581735085]8.439031096047133):[&rate=0.0041022879199592525]12.085887 [...]
+tree STATE_8230000 [&lnP=-22225.93960558097] = [&R] (9:[&rate=0.0016790017841293464]213.2785244787272,((14:[&rate=0.0019656156750132167]130.25804305966446,33:[&rate=0.001788906854758246]130.25804305966446):[&rate=0.0018275005734681622]4.992713761323557,(((((((2:[&rate=0.0016114467993762933]38.26280413667089,((1:[&rate=0.002221847245573183]11.457736027479774,6:[&rate=0.0018275005734681622]11.457736027479774):[&rate=0.0023672862086282307]17.194889583123974,4:[&rate=0.00251238958457114]28.6 [...]
+tree STATE_8240000 [&lnP=-22203.357284742306] = [&R] (9:[&rate=0.0026747330734974923]137.3214995133148,(33:[&rate=0.0022290298496828297]104.8091683760917,(14:[&rate=0.0031172504864964154]85.76319442157867,((((((2:[&rate=0.0025409159876856264]28.811056807221966,((1:[&rate=0.0029627824336160512]7.840580348188299,6:[&rate=0.0023157193435029257]7.840580348188299):[&rate=0.0023157193435029257]17.057944794006175,4:[&rate=0.002918810909235239]24.898525142194476):[&rate=0.0029627824336160512]3.9 [...]
+tree STATE_8250000 [&lnP=-22210.91874559474] = [&R] (9:[&rate=0.001985494291980641]158.40822519732805,(33:[&rate=0.0024143301622709967]100.78649407066744,(14:[&rate=0.0026883419948458605]82.64022756479176,((((16:[&rate=0.00234474738979822]39.590005422859505,((8:[&rate=0.0026749792016671717]25.387779897456042,10:[&rate=0.0026154319602897987]25.387779897456042):[&rate=0.0028633620430259027]12.90006847135681,((3:[&rate=0.0028979324447911633]14.771599555808523,5:[&rate=0.002228780245313513]1 [...]
+tree STATE_8260000 [&lnP=-22217.04600477854] = [&R] (9:[&rate=0.003250195021431892]108.53017151252811,((((((((2:[&rate=0.0033721746591855502]22.201845287628746,((1:[&rate=0.004819037265757751]5.265189024100586,6:[&rate=0.0031720829532588023]5.265189024100586):[&rate=0.0032752327230533068]10.946276541973326,4:[&rate=0.004133387560487265]16.21146556607391):[&rate=0.003926380355813892]5.9903797215548344):[&rate=0.0031720829532588023]5.058275687710768,(18:[&rate=0.0031987029463003726]21.9006 [...]
+tree STATE_8270000 [&lnP=-22215.2574033269] = [&R] (9:[&rate=0.0021947367710358438]170.97297639124952,(((((((16:[&rate=0.002526550263135119]36.70185330190349,(8:[&rate=0.002745507018531993]20.4167114132032,10:[&rate=0.002700973746385254]20.4167114132032):[&rate=0.002581277273711488]16.28514188870029):[&rate=0.0022319184267158426]2.1640361151938876,((3:[&rate=0.002020370999797193]20.485998587318385,5:[&rate=0.0017992340031888015]20.485998587318385):[&rate=0.002845806686370392]15.173948551 [...]
+tree STATE_8280000 [&lnP=-22203.989271289087] = [&R] (9:[&rate=0.0020918348808264533]150.74508558442585,(33:[&rate=0.0021975514601516154]111.75500513129116,(((((16:[&rate=0.002022764776058728]37.0633788515614,(((3:[&rate=0.0030478255113644344]12.642450548690894,5:[&rate=0.002339958548327534]12.642450548690894):[&rate=0.0021736975334527746]20.91559706900636,((13:[&rate=0.0023936391144633988]21.47913977858371,7:[&rate=0.003157436221696855]21.47913977858371):[&rate=0.0026836954973812072]4.8 [...]
+tree STATE_8290000 [&lnP=-22228.148865240237] = [&R] (9:[&rate=0.0030947495741328887]127.6923975623635,(33:[&rate=0.0025922835475296092]103.60754302215052,(14:[&rate=0.003129851660068659]75.42410003911026,(((((((3:[&rate=0.0028293804349281662]14.612328344131042,5:[&rate=0.002931214623945086]14.612328344131042):[&rate=0.0027848935037191385]19.84015780141049,((13:[&rate=0.002762292693953559]18.694538704212977,7:[&rate=0.003215885119719146]18.694538704212977):[&rate=0.002750832724070786]6.5 [...]
+tree STATE_8300000 [&lnP=-22229.61993495774] = [&R] (9:[&rate=0.001981458351915538]186.83420209918603,(14:[&rate=0.0019626361830344143]148.70425414735735,(33:[&rate=0.002347547179922043]132.11912317250986,((((((2:[&rate=0.002507361396377777]34.087758789192904,((1:[&rate=0.0018858328572383665]11.104616626508252,6:[&rate=0.0019626361830344143]11.104616626508252):[&rate=0.0019215366897925538]14.910283347782888,4:[&rate=0.002507361396377777]26.01489997429114):[&rate=0.002075814311139244]8.07 [...]
+tree STATE_8310000 [&lnP=-22218.371621660917] = [&R] (9:[&rate=0.0027160586290827736]150.95637128325973,(33:[&rate=0.002921210467928122]101.44212926284152,(14:[&rate=0.003238920153640654]77.44845876882621,((((16:[&rate=0.0031985954616719917]34.98485456813042,(((2:[&rate=0.0032810680551920352]22.636793412613308,((1:[&rate=0.003658248241229258]8.34030225035832,6:[&rate=0.0021785043100535978]8.34030225035832):[&rate=0.0026331444545212275]10.587098717935508,4:[&rate=0.0040216759459363784]18. [...]
+tree STATE_8320000 [&lnP=-22196.61739060277] = [&R] (9:[&rate=0.004145142035582647]93.60895321740323,((((((((8:[&rate=0.0035371852053366825]18.840417214034925,10:[&rate=0.003483261718132085]18.840417214034925):[&rate=0.003142265678784632]10.757464488118394,((3:[&rate=0.0035943414084269342]11.864826740430424,5:[&rate=0.003213722614421737]11.864826740430424):[&rate=0.0030844356068810835]15.539491818115007,((13:[&rate=0.002854606018888658]18.254713092401282,7:[&rate=0.003347950962180528]18. [...]
+tree STATE_8330000 [&lnP=-22223.345459730153] = [&R] (9:[&rate=0.0027028663619895416]131.2835042310361,(33:[&rate=0.002653922926486565]108.14072677525866,(((((16:[&rate=0.002327705272414676]40.59462200961018,(((2:[&rate=0.0023434624292953533]28.094027206241407,((1:[&rate=0.0030586653270440432]7.639680499748504,6:[&rate=0.002988431021324251]7.639680499748504):[&rate=0.002563015539137101]17.026980070114806,4:[&rate=0.0027374866687057417]24.66666056986331):[&rate=0.0030216793580797683]3.427 [...]
+tree STATE_8340000 [&lnP=-22218.742406535308] = [&R] (9:[&rate=0.0023144036658533105]159.0250990131981,(33:[&rate=0.0021957939770327023]109.02304237152643,(14:[&rate=0.00290516756889224]91.51075148740041,(((((((3:[&rate=0.002084763144976663]18.073006439210186,5:[&rate=0.0021138450461586906]18.073006439210186):[&rate=0.0026502088635962278]20.135474003161274,((13:[&rate=0.001986422572809569]27.42182475884177,7:[&rate=0.0028510583586504245]27.42182475884177):[&rate=0.0020382497465642625]3.6 [...]
+tree STATE_8350000 [&lnP=-22222.88385735029] = [&R] (9:[&rate=0.00220991444307968]132.34284181847565,((((((((2:[&rate=0.0023329566730750694]28.627981088368173,((1:[&rate=0.0025472388606005746]6.356540999259641,6:[&rate=0.0029230367314421303]6.356540999259641):[&rate=0.00220991444307968]17.11310891867241,4:[&rate=0.0027229752112419925]23.469649917932053):[&rate=0.002489917872375195]5.158331170436121):[&rate=0.002430639041748558]3.549714439362738,(18:[&rate=0.0021841525097006285]27.9499495 [...]
+tree STATE_8360000 [&lnP=-22217.275684013974] = [&R] (9:[&rate=0.0033726405196663604]129.0539821596791,(33:[&rate=0.002386137672678582]111.2817661790784,(14:[&rate=0.00281308291718166]95.64515954483693,((((((2:[&rate=0.0024937100315654695]28.67575737485301,((1:[&rate=0.0030217642733761435]8.02342724511389,6:[&rate=0.002360186459066785]8.02342724511389):[&rate=0.002398709405933412]15.473362639489753,4:[&rate=0.0029234345469498265]23.496789884603643):[&rate=0.0026481722964010372]5.17896749 [...]
+tree STATE_8370000 [&lnP=-22215.96592353205] = [&R] (9:[&rate=0.002661457170675724]145.3227182445557,(14:[&rate=0.002792756254845935]93.21009489845949,(((((((8:[&rate=0.0030970395230314365]21.623425543088825,10:[&rate=0.003022439790145829]21.623425543088825):[&rate=0.003022439790145829]13.099304407703027,((3:[&rate=0.0026078841723223348]10.89327230384787,5:[&rate=0.0029370343685262102]10.89327230384787):[&rate=0.0028722213690400046]21.92354491947287,((13:[&rate=0.002336307664106086]23.46 [...]
+tree STATE_8380000 [&lnP=-22234.755117273435] = [&R] (9:[&rate=0.0024885515052896967]155.83682521074599,(33:[&rate=0.0024876806162662524]119.1079604576764,((((((((3:[&rate=0.00248650266691475]14.97374791491427,5:[&rate=0.002489540767979491]14.97374791491427):[&rate=0.002481366557399659]17.18271972623039,((13:[&rate=0.0024829695807708635]23.747624544384166,(11:[&rate=0.0024874971029424722]6.627434609103328,12:[&rate=0.0024829695807708635]6.627434609103328):[&rate=0.002484994711357608]17.1 [...]
+tree STATE_8390000 [&lnP=-22228.687005399403] = [&R] (9:[&rate=0.0025690421443542817]149.5801155148328,(33:[&rate=0.0024861110251016773]110.84471568985263,(14:[&rate=0.0023554680394379324]102.6183015719434,((((((2:[&rate=0.002440750870001226]31.593257248729063,((1:[&rate=0.002443878762632825]8.379462947419256,6:[&rate=0.002400950173557538]8.379462947419256):[&rate=0.002450129709660542]18.75824677647546,4:[&rate=0.002518054427027549]27.137709723894716):[&rate=0.0024933584450142175]4.45554 [...]
+tree STATE_8400000 [&lnP=-22215.724045070314] = [&R] (9:[&rate=0.002305028038513752]138.64563954118972,((14:[&rate=0.0019252049276251323]100.80905474113541,((((((2:[&rate=0.002376479847853388]30.29513975978285,((1:[&rate=0.0022521145778887967]8.664215212386905,6:[&rate=0.0021257438178588707]8.664215212386905):[&rate=0.0022012069748688764]17.33037905681858,4:[&rate=0.0025847497727390645]25.994594269205482):[&rate=0.00275206661045194]4.300545490577367):[&rate=0.00275206661045194]6.15695877 [...]
+tree STATE_8410000 [&lnP=-22207.58159737682] = [&R] (9:[&rate=0.002589365153155367]126.86613341107079,(33:[&rate=0.002084633255146005]91.53895255808244,(((((((2:[&rate=0.0028954723460839697]22.68194785561561,((1:[&rate=0.0034163602928520727]6.199721279459041,6:[&rate=0.0026877454546154456]6.199721279459041):[&rate=0.0030084270268359416]11.622463123650789,4:[&rate=0.0039986141391882175]17.82218440310983):[&rate=0.003366673468098166]4.859763452505781):[&rate=0.0031392933422442386]3.9523362 [...]
+tree STATE_8420000 [&lnP=-22221.448196419955] = [&R] ((((((((2:[&rate=0.0034117956380538914]21.204548941003285,((1:[&rate=0.0030049507999839365]6.567329985412869,6:[&rate=0.003452648953419407]6.567329985412869):[&rate=0.004060966460456042]11.133716967071287,4:[&rate=0.0036390261539334242]17.701046952484155):[&rate=0.0031952787105763766]3.5035019885191296):[&rate=0.004026126135573903]2.530777521226703,(18:[&rate=0.004228589776498669]20.7113800900754,(26:[&rate=0.004228589776498669]13.1884 [...]
+tree STATE_8430000 [&lnP=-22212.059402652303] = [&R] ((33:[&rate=0.002343526886957453]120.35500979882909,(14:[&rate=0.002964368565415128]97.22015128780772,((((((2:[&rate=0.0025668372021212275]28.115837173527208,((1:[&rate=0.0023910725856430733]8.710145886517987,6:[&rate=0.0025523539132200797]8.710145886517987):[&rate=0.002142093393308952]14.632028202673874,4:[&rate=0.003429425859894925]23.34217408919186):[&rate=0.0028475108494617073]4.773663084335347):[&rate=0.0022923124353165514]6.52599 [...]
+tree STATE_8440000 [&lnP=-22217.593325497695] = [&R] (((((((((3:[&rate=0.0029245718084413303]13.622673842790615,5:[&rate=0.00269719886738945]13.622673842790615):[&rate=0.003123108002857773]18.909067474602995,((13:[&rate=0.002526153500460928]19.244043672743775,7:[&rate=0.003467106609720445]19.244043672743775):[&rate=0.002770124188919718]5.641930684056266,(11:[&rate=0.00269719886738945]7.0294596272414385,12:[&rate=0.002600401731636211]7.0294596272414385):[&rate=0.002600401731636211]17.8565 [...]
+tree STATE_8450000 [&lnP=-22220.186819395985] = [&R] ((33:[&rate=0.0023088638907739515]108.42531358244418,((((((((2:[&rate=0.0026388450008420676]29.391139707177445,((1:[&rate=0.0030407338954945226]7.733465398706641,6:[&rate=0.0019937726614080095]7.733465398706641):[&rate=0.0027152156378981588]15.925535153886699,4:[&rate=0.0030972028536196123]23.65900055259334):[&rate=0.0031636302522924306]5.732139154584104):[&rate=0.0031636302522924306]3.0920161784934663,(18:[&rate=0.0026388450008420676] [...]
+tree STATE_8460000 [&lnP=-22207.920594676023] = [&R] ((33:[&rate=0.0036906850825812808]71.54612452708089,(((((((2:[&rate=0.004200123575070739]17.82707059636078,((1:[&rate=0.0034695009268889796]5.776408794822428,6:[&rate=0.004008898318348573]5.776408794822428):[&rate=0.003671037778996709]10.62854793331824,4:[&rate=0.004832155031572956]16.404956728140668):[&rate=0.004832155031572956]1.4221138682201122):[&rate=0.0029151461028373337]5.375131302262748,(18:[&rate=0.0038773600968798018]20.13896 [...]
+tree STATE_8470000 [&lnP=-22212.017407489504] = [&R] ((33:[&rate=0.002818830011943036]91.54061753391402,(14:[&rate=0.003645550600628711]68.80956152541245,((((16:[&rate=0.003015374930411587]31.94479229439436,(((3:[&rate=0.0030926967282550685]11.473101628085013,5:[&rate=0.003457385200155343]11.473101628085013):[&rate=0.003322727168566283]17.013652752862022,((13:[&rate=0.002715833644379727]20.042348068774082,(11:[&rate=0.003457385200155343]5.602267576341953,12:[&rate=0.003067844348749081]5. [...]
+tree STATE_8480000 [&lnP=-22224.181359984825] = [&R] ((33:[&rate=0.0017156705303897123]165.55688299333536,(((((((2:[&rate=0.0021424189654082253]35.88198266224372,((1:[&rate=0.002552593200603183]8.947955731766065,6:[&rate=0.0021424189654082253]8.947955731766065):[&rate=0.0014948888858015124]22.17313396991225,4:[&rate=0.002666509460038232]31.121089701678315):[&rate=0.0027163260152308273]4.760892960565407):[&rate=0.0023565454826414605]3.063491352362661,(18:[&rate=0.001972390803978291]34.285 [...]
+tree STATE_8490000 [&lnP=-22223.867240922384] = [&R] ((33:[&rate=0.0020544242125353843]119.30685766808377,(14:[&rate=0.0022278726257813617]102.32590803693087,((((((2:[&rate=0.0022587883225445473]34.246244714633775,((1:[&rate=0.002166241177977337]7.232936709206543,6:[&rate=0.002555963869710815]7.232936709206543):[&rate=0.0019513879069178936]19.667623564056925,4:[&rate=0.0027063809237443965]26.900560273263466):[&rate=0.0026042036374690604]7.3456844413703095):[&rate=0.0025157464571004327]4. [...]
+tree STATE_8500000 [&lnP=-22213.18492762478] = [&R] ((((((((3:[&rate=0.0035818020979245914]11.424315118186017,5:[&rate=0.003395858906632112]11.424315118186017):[&rate=0.0034532620295046434]13.497623870203038,((13:[&rate=0.0032018780664650544]15.652160384438318,7:[&rate=0.0038290888625721097]15.652160384438318):[&rate=0.0034532620295046434]3.2357584404381825,(11:[&rate=0.004039467561851394]4.741765729061154,12:[&rate=0.0038719835101433744]4.741765729061154):[&rate=0.003814816160411599]14. [...]
+tree STATE_8510000 [&lnP=-22215.881447732718] = [&R] (((((((16:[&rate=0.002460113231672473]39.16590892258912,(((3:[&rate=0.002198972866333873]16.331479913231178,5:[&rate=0.0020734283607537107]16.331479913231178):[&rate=0.002770867544133948]16.3823484071481,((13:[&rate=0.0024844617305931926]22.31827045490332,7:[&rate=0.003019991385376601]22.31827045490332):[&rate=0.0026430845017964386]2.160224425445474,(11:[&rate=0.0025565195565653348]6.454636602046138,12:[&rate=0.0024226863584211012]6.45 [...]
+tree STATE_8520000 [&lnP=-22217.289827943765] = [&R] (((14:[&rate=0.002441691253493302]103.1270812920996,((((((3:[&rate=0.002420808394213301]13.573110666334847,5:[&rate=0.0024764163074024507]13.573110666334847):[&rate=0.0022017537375779877]22.038076847686824,((13:[&rate=0.0018519131025104636]24.88376099108603,7:[&rate=0.0024764163074024507]24.88376099108603):[&rate=0.00238270171087797]5.011593964365133,(11:[&rate=0.0023737423614688027]8.10786312019908,12:[&rate=0.002234953279426148]8.107 [...]
+tree STATE_8530000 [&lnP=-22204.544340587174] = [&R] ((14:[&rate=0.004460059868546967]68.48446810904997,(33:[&rate=0.0053594857242979296]58.19358954981087,(((((((3:[&rate=0.004927231448688768]8.702760951824372,5:[&rate=0.003936420656035485]8.702760951824372):[&rate=0.004149497116684736]12.150502084629784,((7:[&rate=0.00482268877620021]12.705309458674813,13:[&rate=0.00407840326057268]12.705309458674813):[&rate=0.0038633799042159813]5.337579723015276,(11:[&rate=0.005057039068322997]3.47498 [...]
+tree STATE_8540000 [&lnP=-22229.337904009706] = [&R] ((33:[&rate=0.0024667512260486773]104.98007049234413,(14:[&rate=0.0024878451865088563]89.14089372643498,(((((((3:[&rate=0.0031132355555947675]12.99994328601585,5:[&rate=0.002571717038457385]12.99994328601585):[&rate=0.0022489483043071864]25.069104885429113,((7:[&rate=0.0027225024055770406]23.47592056937897,13:[&rate=0.002442646124908282]23.47592056937897):[&rate=0.0027385514375364082]3.944553944162113,(11:[&rate=0.0024878451865088563]6 [...]
+tree STATE_8550000 [&lnP=-22224.357507645826] = [&R] ((14:[&rate=0.0024920399547220307]94.56307010878346,(((((((3:[&rate=0.002532163410246068]15.433383477020145,5:[&rate=0.002532163410246068]15.433383477020145):[&rate=0.002513882582825841]18.059545019545595,((11:[&rate=0.0025624368402195536]6.031594017520336,12:[&rate=0.002461730227449823]6.031594017520336):[&rate=0.0024437220741153317]23.27745111098734,(13:[&rate=0.0023440401034618952]23.516184011281137,7:[&rate=0.0024294530102009033]23 [...]
+tree STATE_8560000 [&lnP=-22227.67761915931] = [&R] (((33:[&rate=0.0028252278714593296]96.39044511948812,((((((2:[&rate=0.0030082325959569626]26.04528379950829,((1:[&rate=0.0029980022661436857]8.125534547377972,6:[&rate=0.002733136005863166]8.125534547377972):[&rate=0.0027857025393686926]14.455814953908819,4:[&rate=0.0029278159574065016]22.58134950128679):[&rate=0.0028415435225172202]3.463934298221499):[&rate=0.0026664661185645316]4.89001982034851,(18:[&rate=0.0028086580993523336]23.7341 [...]
+tree STATE_8570000 [&lnP=-22215.703681056963] = [&R] (((14:[&rate=0.0032470437133011237]75.1638674694507,((((((2:[&rate=0.003020028111856951]25.381800769212475,((1:[&rate=0.002685146455611383]7.564290868196091,6:[&rate=0.002813125914688928]7.564290868196091):[&rate=0.002718674655598244]12.43756789897491,4:[&rate=0.0030998381177026976]20.001858767171):[&rate=0.0028867317704841778]5.379942002041474):[&rate=0.003547100145307263]5.316274925863937,(18:[&rate=0.0028027975691888585]23.069494424 [...]
+tree STATE_8580000 [&lnP=-22210.420242438155] = [&R] (((14:[&rate=0.0035686091124262744]65.56848745034286,((((((2:[&rate=0.0032038501658736016]21.426927099595474,((1:[&rate=0.003941382066221081]6.839843866471715,6:[&rate=0.00344135008150074]6.839843866471715):[&rate=0.0032727701958346865]12.79637042760724,4:[&rate=0.0036695456956397296]19.636214294078954):[&rate=0.003356510347897467]1.7907128055165202):[&rate=0.0032569275325303043]2.9996649102764117,(18:[&rate=0.003580537959045618]21.151 [...]
+tree STATE_8590000 [&lnP=-22212.291406233864] = [&R] ((14:[&rate=0.0024402534815652875]84.47882357573818,(33:[&rate=0.003362821096272649]71.5041784758574,(((16:[&rate=0.0031036183552902406]35.52222796541298,(((8:[&rate=0.002927649076934431]19.467243722436407,10:[&rate=0.003383277601416751]19.467243722436407):[&rate=0.0036082147912195727]12.516574865622289,((7:[&rate=0.0031640795332504583]21.959615550026104,(13:[&rate=0.0030194178864603244]16.976838881952848,(11:[&rate=0.00328268376737484 [...]
+tree STATE_8600000 [&lnP=-22208.478410446332] = [&R] ((33:[&rate=0.0029142693249905385]97.75139827868239,(14:[&rate=0.0032936528913630842]81.07413348784864,(((((8:[&rate=0.0030677418192751943]17.84524809436085,10:[&rate=0.0033379639084711076]17.84524809436085):[&rate=0.0038638725226766446]12.267129821036697,((((13:[&rate=0.0027551979727572545]18.08020037834343,7:[&rate=0.003938956415015783]18.08020037834343):[&rate=0.003249454011112038]3.593321777896012,(11:[&rate=0.004125504989545731]4. [...]
+tree STATE_8610000 [&lnP=-22209.375953416] = [&R] ((33:[&rate=0.0027710426003450597]88.45646027275147,(14:[&rate=0.004019505623866492]65.46329691813203,((((16:[&rate=0.002716982568466557]31.37671116374777,((((13:[&rate=0.003460366506396444]17.467441006909088,7:[&rate=0.0034847707849318147]17.467441006909088):[&rate=0.00410008147291357]3.721944396560307,(11:[&rate=0.003460366506396444]4.954596695074035,12:[&rate=0.003536109770326572]4.954596695074035):[&rate=0.0033056867023028677]16.23478 [...]
+tree STATE_8620000 [&lnP=-22228.00044138131] = [&R] ((33:[&rate=0.0023901952037776474]124.9315701122762,((((((((13:[&rate=0.002530973704885827]25.534440859140204,7:[&rate=0.0025224861910120135]25.534440859140204):[&rate=0.0022992215856284595]2.753537288531575,(11:[&rate=0.002583281654124005]5.444134593590799,12:[&rate=0.0024724111291554197]5.444134593590799):[&rate=0.0024249394192190233]22.84384355408098):[&rate=0.00250642455953581]8.238496356780658,(3:[&rate=0.0025595615118642343]14.331 [...]
+tree STATE_8630000 [&lnP=-22212.504949827737] = [&R] (((((((16:[&rate=0.002611183206014326]39.28482731235596,((8:[&rate=0.002689845132220788]23.276938257080033,10:[&rate=0.0025964036978456412]23.276938257080033):[&rate=0.002611183206014326]15.198882412181334,(((13:[&rate=0.002470853596974313]22.575946103722046,(11:[&rate=0.0032023119122634664]6.191633236647051,12:[&rate=0.0026415789141779654]6.191633236647051):[&rate=0.002416569636764547]16.384312867074996):[&rate=0.002706835880657342]3. [...]
+tree STATE_8640000 [&lnP=-22218.30007914279] = [&R] (((((((((((13:[&rate=0.0022073496404088427]23.435390358312112,(11:[&rate=0.002231785302975941]7.203494798229839,12:[&rate=0.002728525303270601]7.203494798229839):[&rate=0.0029736923753844076]16.231895560082272):[&rate=0.0028601669461484273]6.311484400327419,7:[&rate=0.0022561439785062903]29.74687475863953):[&rate=0.001975085869454806]8.320710654619734,(3:[&rate=0.00242789404628577]15.982518337360583,5:[&rate=0.0022561439785062903]15.982 [...]
+tree STATE_8650000 [&lnP=-22231.059222253676] = [&R] (((((((((3:[&rate=0.0022434202863868967]18.07206924898045,5:[&rate=0.002210438430590734]18.07206924898045):[&rate=0.002077196878580617]19.59759146484853,((13:[&rate=0.0025219188869795386]22.581866672457856,(11:[&rate=0.002480544111403527]6.895103948706141,12:[&rate=0.0024143155344204656]6.895103948706141):[&rate=0.0023332899603035312]15.686762723751716):[&rate=0.00226190022978827]4.751474129531129,7:[&rate=0.0024143155344204656]27.3333 [...]
+tree STATE_8660000 [&lnP=-22237.32092306502] = [&R] ((((((((((23:[&rate=0.0023398101969143285]12.010417260990952,17:[&rate=0.002521110305018325]12.010417260990952):[&rate=0.0024986277965937595]10.945808470125666,(((21:[&rate=0.0023494082466904657]4.101276649797101,(19:[&rate=0.0018673438862388263]2.7110100937037838,20:[&rate=0.002477139195418006]2.7110100937037838):[&rate=0.002521110305018325]1.3902665560933172):[&rate=0.0021642774258963326]5.44977007547394,15:[&rate=0.002570248262353132 [...]
+tree STATE_8670000 [&lnP=-22212.52835381833] = [&R] ((33:[&rate=0.0028917890231266227]87.11833446989061,(((((((23:[&rate=0.003058725731995282]8.897736280848,17:[&rate=0.0030516329729804443]8.897736280848):[&rate=0.0028461663875036236]12.624080181325349,((((21:[&rate=0.0028986656264277846]3.4150270510112053,(20:[&rate=0.0028917890231266227]2.1490154916210784,19:[&rate=0.003145549991137005]2.1490154916210784):[&rate=0.0029806889833552397]1.2660115593901269):[&rate=0.0030974574346144938]5.6 [...]
+tree STATE_8680000 [&lnP=-22209.426845517653] = [&R] ((33:[&rate=0.0024568292675859844]105.66163440371132,(14:[&rate=0.0025144077173279086]96.70045988440113,((((((3:[&rate=0.0024452484867285382]17.048841713532738,5:[&rate=0.00234826841617857]17.048841713532738):[&rate=0.0021921167961177638]20.62557277204816,((13:[&rate=0.0020497616377765375]23.945326938705257,(11:[&rate=0.0028481705630843744]5.597225080879841,12:[&rate=0.0023735993437002424]5.597225080879841):[&rate=0.0027046974902375266 [...]
+tree STATE_8690000 [&lnP=-22218.47327645192] = [&R] ((((((((((3:[&rate=0.0030713478494630376]13.787055698263629,5:[&rate=0.002426426725612524]13.787055698263629):[&rate=0.002566929140860391]14.452527542406727,((13:[&rate=0.002698417034552136]18.439754349549766,7:[&rate=0.00297926575617168]18.439754349549766):[&rate=0.002883132093773044]5.062079496771698,(11:[&rate=0.0022489047900185392]6.930373620513806,12:[&rate=0.0026220874386447416]6.930373620513806):[&rate=0.0025940241259773834]16.57 [...]
+tree STATE_8700000 [&lnP=-22211.266956837695] = [&R] (((14:[&rate=0.0027632470640767334]74.45937098894647,((((((((15:[&rate=0.0028741069381761387]8.55618315682739,((19:[&rate=0.0029232246243074004]2.4875222574169293,20:[&rate=0.0030299024044870186]2.4875222574169293):[&rate=0.0026506520691997318]1.9843697068170587,21:[&rate=0.0025236259900635475]4.471891964233988):[&rate=0.003537501983614963]4.084291192593401):[&rate=0.0027934579986346347]2.516797353791956,24:[&rate=0.0027632470640767334 [...]
+tree STATE_8710000 [&lnP=-22226.69281822305] = [&R] (((14:[&rate=0.0027253537579269745]98.07397982889677,(((16:[&rate=0.0025067791767412434]40.93370844806356,((((3:[&rate=0.0027253537579269745]15.87352517662437,5:[&rate=0.0023249960438894595]15.87352517662437):[&rate=0.002755356394032177]19.377202734569664,((13:[&rate=0.0024410255480197826]25.036315488480113,7:[&rate=0.003078154641342593]25.036315488480113):[&rate=0.002526534591132374]3.4791828975744608,(11:[&rate=0.0022852558317803407]7 [...]
+tree STATE_8720000 [&lnP=-22229.50253315307] = [&R] (((14:[&rate=0.0030634527832161386]84.45969204503126,(((((((24:[&rate=0.0024172470390087263]14.134364546229257,(((21:[&rate=0.001869823626302903]3.499299732360622,20:[&rate=0.0026426842123909887]3.499299732360622):[&rate=0.0024172470390087263]0.8235412374048434,19:[&rate=0.0027786791135299173]4.3228409697654655):[&rate=0.003724526164961593]3.7225456717709626,15:[&rate=0.0031253940705025787]8.045386641536428):[&rate=0.002209553443661889] [...]
+tree STATE_8730000 [&lnP=-22215.763720320316] = [&R] ((33:[&rate=0.0024332343617505603]110.45922422944568,((((((((24:[&rate=0.0028068722170985078]13.101841183030581,(((21:[&rate=0.002835103846241994]2.8887358388158644,20:[&rate=0.0025899763525861997]2.8887358388158644):[&rate=0.002646332615955895]0.5353517402465031,19:[&rate=0.0027609644012473204]3.4240875790623675):[&rate=0.002952396778167274]6.170649891414255,15:[&rate=0.002854475808835495]9.594737470476623):[&rate=0.002940214647837644 [...]
+tree STATE_8740000 [&lnP=-22218.24900530356] = [&R] ((33:[&rate=0.002896680195233685]90.84733385876604,((((((((24:[&rate=0.003928896978144654]10.294692199229715,((21:[&rate=0.0032443961538602185]4.164245310254224,(19:[&rate=0.0036925598894267837]2.0725337906859873,20:[&rate=0.0037351767098646924]2.0725337906859873):[&rate=0.0037131474806642545]2.091711519568237):[&rate=0.003344056309596609]2.550735233279,15:[&rate=0.0033076011431864235]6.714980543533224):[&rate=0.0035592760996881988]3.57 [...]
+tree STATE_8750000 [&lnP=-22212.914578510383] = [&R] ((((((((((24:[&rate=0.0027667909930139596]17.320864539611666,((21:[&rate=0.001897275970304375]3.5250437222242637,(20:[&rate=0.0028580297617805126]3.4765848610120034,19:[&rate=0.002423355045111651]3.4765848610120034):[&rate=0.003247192972579251]0.048458861212260285):[&rate=0.002343957204223609]7.148742526613792,15:[&rate=0.0020635012212292204]10.673786248838056):[&rate=0.002273261694257258]6.64707829077361):[&rate=0.0028852191412027]5.5 [...]
+tree STATE_8760000 [&lnP=-22217.351821654833] = [&R] (((((((16:[&rate=0.001673745321344123]45.00916055163053,((8:[&rate=0.002427426452776058]26.314401167875562,10:[&rate=0.0022199741966947885]26.314401167875562):[&rate=0.0023327712730471747]16.64694106981155,(((13:[&rate=0.001841082802104929]30.756555652216885,(11:[&rate=0.002368689086798093]8.891185465372242,12:[&rate=0.0018771495423768327]8.891185465372242):[&rate=0.0023875384591240984]21.865370186844643):[&rate=0.0022351961836277563]4 [...]
+tree STATE_8770000 [&lnP=-22226.581746700856] = [&R] (((((((((2:[&rate=0.0027797206349966293]27.266257571716285,((1:[&rate=0.0026172962005226773]7.967442533194987,6:[&rate=0.0025106951051151382]7.967442533194987):[&rate=0.0020720808366756502]17.24505092880969,4:[&rate=0.0030946079421615225]25.212493462004677):[&rate=0.0021319982899623076]2.053764109711608):[&rate=0.002656561699506243]6.652359840311991,(18:[&rate=0.0030380779234625063]28.425554161603294,(26:[&rate=0.0026042230801975952]17 [...]
+tree STATE_8780000 [&lnP=-22222.451631806824] = [&R] ((((((((((3:[&rate=0.002318243210276883]17.04963046280053,5:[&rate=0.0018970008293929103]17.04963046280053):[&rate=0.002370372156426456]20.727422797655706,(8:[&rate=0.0028931664810939485]19.505323020325932,10:[&rate=0.0026219791310262246]19.505323020325932):[&rate=0.0021727543216814443]18.271730240130303):[&rate=0.002644500544444754]1.5620687784113017,((13:[&rate=0.0017619717580265625]29.81105172164476,(11:[&rate=0.002669487740289126]6 [...]
+tree STATE_8790000 [&lnP=-22228.664567291824] = [&R] (((((((((2:[&rate=0.0018021172070584904]34.95366005359829,((1:[&rate=0.0020939124074294407]9.572105437987714,6:[&rate=0.0018976096491103702]9.572105437987714):[&rate=0.002116417804941857]18.709772566523583,4:[&rate=0.0024183206897768236]28.2818780045113):[&rate=0.001887776057129838]6.671782049086993):[&rate=0.0021050202230812375]6.608922864524544,(18:[&rate=0.00207243851033529]32.17709856420206,(26:[&rate=0.002128142397730464]22.259609 [...]
+tree STATE_8800000 [&lnP=-22228.311445482363] = [&R] (((((((((8:[&rate=0.0027382200216867934]21.021640643275777,10:[&rate=0.0024172852200714866]21.021640643275777):[&rate=0.0019005569212773277]15.716810627981086,((3:[&rate=0.003619060843584347]12.382134436389729,5:[&rate=0.0020582987412504613]12.382134436389729):[&rate=0.0029874047489347605]17.246168330448356,((13:[&rate=0.0023087085595312087]21.972677463168296,(11:[&rate=0.0023087085595312087]9.210426847013562,12:[&rate=0.00208747191569 [...]
+tree STATE_8810000 [&lnP=-22216.527247605027] = [&R] ((14:[&rate=0.001959856961094174]108.67886150005704,((((((((3:[&rate=0.002438826397165791]14.346445670452233,5:[&rate=0.002284441704350546]14.346445670452233):[&rate=0.00191337666075806]20.42892037781089,((13:[&rate=0.002122118098783779]23.588498037756466,7:[&rate=0.002586948884575351]23.588498037756466):[&rate=0.0022966142527308655]1.7400700951852812,(11:[&rate=0.002771289173107952]7.080836563077022,12:[&rate=0.002607396504529404]7.08 [...]
+tree STATE_8820000 [&lnP=-22211.013639256736] = [&R] ((33:[&rate=0.002081787413316681]107.32582044155089,(14:[&rate=0.0025369305999127416]84.55177772934813,((((16:[&rate=0.0022704139662547574]39.941392896007294,(((3:[&rate=0.002725558594730731]13.044529312879815,5:[&rate=0.002550279766128181]13.044529312879815):[&rate=0.0023662122478272513]20.38829382487897,((11:[&rate=0.002680844929556454]6.331550177529807,12:[&rate=0.0026610970808833598]6.331550177529807):[&rate=0.0021748436773637884]2 [...]
+tree STATE_8830000 [&lnP=-22215.51392320016] = [&R] ((33:[&rate=0.0026489220260698656]96.28862256529608,(((((((2:[&rate=0.0030298465345721965]28.142889143346743,((1:[&rate=0.0024366973867455234]7.308309468379475,6:[&rate=0.003256481722114184]7.308309468379475):[&rate=0.003212204620817928]15.966454710353213,4:[&rate=0.0027383752171235547]23.27476417873269):[&rate=0.002605731675138555]4.868124964614054):[&rate=0.0025631806726317567]3.939651775260387,(18:[&rate=0.0025420612256059102]25.5119 [...]
+tree STATE_8840000 [&lnP=-22232.332472146943] = [&R] ((33:[&rate=0.0023727785219990146]111.96545304006038,(14:[&rate=0.0023456099354129275]106.93067378549264,((((16:[&rate=0.0023128230885967717]38.55755235337178,(((3:[&rate=0.0024892667318883554]17.118595510694217,5:[&rate=0.002478341118752806]17.118595510694217):[&rate=0.0024892667318883554]17.164284162965625,(7:[&rate=0.0023596869818127936]28.381259381973425,((11:[&rate=0.0026524750505700903]6.085869095559429,12:[&rate=0.00255649128703 [...]
+tree STATE_8850000 [&lnP=-22214.92687568537] = [&R] (((14:[&rate=0.0026534657657384195]91.44400370208218,(((((((3:[&rate=0.0026799527629567848]13.324364505013174,5:[&rate=0.002685284644890144]13.324364505013174):[&rate=0.0026722761730789742]19.06929171118304,(7:[&rate=0.0026909865606871233]26.553859860358173,((11:[&rate=0.0026346648401479583]6.436263230991707,12:[&rate=0.0026519337091307324]6.436263230991707):[&rate=0.0026164476467541815]16.063134001669944,13:[&rate=0.0026658867695047543 [...]
+tree STATE_8860000 [&lnP=-22212.832791803314] = [&R] ((14:[&rate=0.002669579823893756]97.37758116845811,(((((16:[&rate=0.002669579823893756]35.573883077348455,(((3:[&rate=0.002431341559074707]14.693471579237592,5:[&rate=0.002382053422205377]14.693471579237592):[&rate=0.0031733402558591266]15.020142858331507,(7:[&rate=0.003081200797630383]24.186239829246375,((11:[&rate=0.0031733402558591266]6.51875688113305,12:[&rate=0.002682517954849771]6.51875688113305):[&rate=0.0033083907612922675]14.1 [...]
+tree STATE_8870000 [&lnP=-22221.706621129106] = [&R] ((14:[&rate=0.0015452924712110626]138.30579739891286,(33:[&rate=0.0020834231385390587]123.18094288990211,((((16:[&rate=0.001706208366321649]49.92775527245462,((8:[&rate=0.002481984692011616]26.44907074403614,10:[&rate=0.0020594722111793883]26.44907074403614):[&rate=0.0018200565657876641]19.678765160889736,((3:[&rate=0.001868876526031559]14.679521006128457,5:[&rate=0.0024121663047139727]14.679521006128457):[&rate=0.002036850271294488]29 [...]
+tree STATE_8880000 [&lnP=-22220.270393108305] = [&R] ((33:[&rate=0.002298946655286582]110.01280148797896,(14:[&rate=0.002853945357357675]88.20224357535058,((((16:[&rate=0.0021923626738366005]39.39660649233457,((8:[&rate=0.002407208256051249]24.22963912533121,10:[&rate=0.0022770738336612933]24.22963912533121):[&rate=0.0027194571480430993]13.915628463340894,((3:[&rate=0.002309234436807566]15.591329452541695,5:[&rate=0.0022528970526026816]15.591329452541695):[&rate=0.002298946655286582]18.2 [...]
+tree STATE_8890000 [&lnP=-22223.756954784058] = [&R] ((14:[&rate=0.0021579137570512667]118.76882612524331,(33:[&rate=0.0027049183433550967]106.26862159379742,((((16:[&rate=0.002366667137235678]43.27452734738673,((8:[&rate=0.0022607084828070822]23.003036761512035,10:[&rate=0.0024663293003865396]23.003036761512035):[&rate=0.002750794078936596]16.760249777577584,((3:[&rate=0.0020436891232037398]16.657928216257996,5:[&rate=0.002411493824485461]16.657928216257996):[&rate=0.0023207005803835895 [...]
+tree STATE_8900000 [&lnP=-22224.99018287714] = [&R] ((33:[&rate=0.0032757910685248196]76.22427246932601,(14:[&rate=0.003244639968736252]63.70140942918059,(((((((3:[&rate=0.0033111607243495795]11.216468530679233,5:[&rate=0.0032156369395015946]11.216468530679233):[&rate=0.0029580950455926035]15.62273791300683,(7:[&rate=0.0032387163316709424]21.646168790479308,((11:[&rate=0.0031531192788829742]7.725638282652923,12:[&rate=0.0029282697341852775]7.725638282652923):[&rate=0.0032156369395015946] [...]
+tree STATE_8910000 [&lnP=-22216.845972555642] = [&R] ((33:[&rate=0.002081494896182765]140.25767677252244,(14:[&rate=0.002460353705260889]104.64122062048177,((((((8:[&rate=0.0020327029035514177]30.25962947782697,10:[&rate=0.002164848481572346]30.25962947782697):[&rate=0.002042770492918068]13.8226663274598,((3:[&rate=0.0021095102428224017]20.312661713277873,5:[&rate=0.0018966904379695928]20.312661713277873):[&rate=0.002164848481572346]18.84543283067629,((11:[&rate=0.0020909002537421004]7.3 [...]
+tree STATE_8920000 [&lnP=-22223.976776796568] = [&R] ((14:[&rate=0.00276560696239197]101.4653175136493,(33:[&rate=0.0027329071320447394]88.63285721642008,(((((((3:[&rate=0.003533324547834348]9.302473169733842,5:[&rate=0.003184079071194122]9.302473169733842):[&rate=0.0027808857676396618]21.722475169370277,(7:[&rate=0.0031290521454492475]24.774365516559257,((11:[&rate=0.0027151999525832212]6.805542807595848,12:[&rate=0.0025276697158498973]6.805542807595848):[&rate=0.0033423503020921425]15. [...]
+tree STATE_8930000 [&lnP=-22212.62302659353] = [&R] ((((((((((3:[&rate=0.0024515155826325048]18.869820557209255,5:[&rate=0.002169181778207871]18.869820557209255):[&rate=0.002469353138676555]14.763554532730563,((11:[&rate=0.003374571439426252]7.433171799209749,12:[&rate=0.0019898222723894595]7.433171799209749):[&rate=0.00256000696016978]21.512573354478338,(7:[&rate=0.0026760793199069365]24.789950858532116,13:[&rate=0.0023075234706742935]24.789950858532116):[&rate=0.0023259407197364344]4.1 [...]
+tree STATE_8940000 [&lnP=-22219.24692843883] = [&R] ((33:[&rate=0.002688442096248109]90.22041632976526,((((((8:[&rate=0.0030421228920106203]21.460322171565693,10:[&rate=0.0026322448947567684]21.460322171565693):[&rate=0.0033642467151736976]13.308775003044978,((3:[&rate=0.002688442096248109]14.283072675075697,5:[&rate=0.0028989812907737264]14.283072675075697):[&rate=0.002824595641269166]17.800073214438267,(7:[&rate=0.0028082045326289365]24.52745655922392,((11:[&rate=0.00305472504227142]4. [...]
+tree STATE_8950000 [&lnP=-22221.246601969997] = [&R] ((((((((((23:[&rate=0.0024899254062336657]12.55508336035489,17:[&rate=0.002095917426666023]12.55508336035489):[&rate=0.0023582243650400025]13.598491982553385,(24:[&rate=0.0022693088891083306]16.32998733280815,((19:[&rate=0.0029582457928337604]5.753621584482522,(21:[&rate=0.0026103890596422966]2.629519746628197,20:[&rate=0.0026208813485798354]2.629519746628197):[&rate=0.00289969533649678]3.1241018378543246):[&rate=0.0031135764097984893] [...]
+tree STATE_8960000 [&lnP=-22210.71690456531] = [&R] ((33:[&rate=0.0024498749192567062]107.14833355553725,(((((((23:[&rate=0.003083182355982171]8.629434067134449,17:[&rate=0.003083182355982171]8.629434067134449):[&rate=0.0024970504585807352]14.825438001713094,(22:[&rate=0.0033214819243417657]20.43827096729102,(24:[&rate=0.002806057298990524]15.21260004049949,((21:[&rate=0.00277947690371623]3.320104279064032,(19:[&rate=0.0034021876104526655]2.093182266944376,20:[&rate=0.002391759712547635] [...]
+tree STATE_8970000 [&lnP=-22209.8969545736] = [&R] ((33:[&rate=0.0024715316629880545]101.50102962636768,(((((16:[&rate=0.002578573062646454]36.912283770472236,((((11:[&rate=0.0033551773765120594]7.6440965648833465,12:[&rate=0.002423903189367269]7.6440965648833465):[&rate=0.0031985224531474138]16.14278928793282,(7:[&rate=0.002904549295699416]21.785736000539053,13:[&rate=0.0023131131493222]21.785736000539053):[&rate=0.003392966655717737]2.0011498522771163):[&rate=0.0028463551550843586]8.79 [...]
+tree STATE_8980000 [&lnP=-22205.68219720338] = [&R] (((33:[&rate=0.002960766721078247]87.86515213605061,(((((((11:[&rate=0.0030101964161844902]4.485076498014912,12:[&rate=0.0037028157563295558]4.485076498014912):[&rate=0.0035791024820272456]18.35969247452372,(7:[&rate=0.0038094078212964873]18.28155456549997,13:[&rate=0.0023677258945956426]18.28155456549997):[&rate=0.003318316429542181]4.563214407038661):[&rate=0.004034790671343277]3.982983118330715,(3:[&rate=0.0035791024820272456]11.4746 [...]
+tree STATE_8990000 [&lnP=-22215.712503261417] = [&R] ((14:[&rate=0.002954861921529413]90.45679121783667,(33:[&rate=0.003411093337389961]86.73002662137783,(((((((7:[&rate=0.0036606612336181087]22.1246377932847,((11:[&rate=0.003969645479899166]4.648050598056819,12:[&rate=0.002954861921529413]4.648050598056819):[&rate=0.0033802626085170976]11.466514620777595,13:[&rate=0.0031569895132287228]16.114565218834414):[&rate=0.003350257975747348]6.010072574450284):[&rate=0.0024094139245513175]6.4105 [...]
+tree STATE_9000000 [&lnP=-22226.51850580425] = [&R] ((14:[&rate=0.0015694029822063405]146.19605219285577,(33:[&rate=0.002221079433266061]124.82412470674844,((((16:[&rate=0.0021254227314996357]54.066837914487266,((((11:[&rate=0.0015362706658472638]11.750855158104834,12:[&rate=0.0013401078556685376]11.750855158104834):[&rate=0.0020685432527027133]25.282368638579207,(7:[&rate=0.0022388578766233984]31.126098149144962,13:[&rate=0.0018297243294776303]31.126098149144962):[&rate=0.00248463324771 [...]
+tree STATE_9010000 [&lnP=-22225.165796293833] = [&R] ((14:[&rate=0.0018482188732938014]144.60425169059795,(33:[&rate=0.0017931687577950528]125.7860752744706,(((((((7:[&rate=0.0021600075569695822]38.905988969912386,((11:[&rate=0.001860762981203899]10.50070649518284,12:[&rate=0.0017612358909643105]10.50070649518284):[&rate=0.0021128574780295136]19.511334954780573,13:[&rate=0.0018848475680751842]30.012041449963412):[&rate=0.002025973672336175]8.893947519948973):[&rate=0.0018848475680751842] [...]
+tree STATE_9020000 [&lnP=-22221.35862929271] = [&R] (((33:[&rate=0.00242358498772892]95.86441804386222,((((((7:[&rate=0.002412650161615611]29.965290159313962,((11:[&rate=0.0023258260238402267]7.8102828748432795,12:[&rate=0.0022598474720112336]7.8102828748432795):[&rate=0.0026780967846163823]16.359048708709707,13:[&rate=0.0023258260238402267]24.16933158355299):[&rate=0.002701544731431671]5.795958575760974):[&rate=0.002767487848954524]8.432582985205688,((3:[&rate=0.0028972854742930223]14.4 [...]
+tree STATE_9030000 [&lnP=-22233.25861212204] = [&R] ((33:[&rate=0.0020457156036683753]136.95882064764643,(14:[&rate=0.0018888834335142415]127.26223106383162,(((((((3:[&rate=0.0021585110171161532]16.275217437413225,5:[&rate=0.0024881698801104274]16.275217437413225):[&rate=0.0017388909439007803]28.919570296522302,((11:[&rate=0.0020236683491248906]9.066332652724679,12:[&rate=0.0019401134122441785]9.066332652724679):[&rate=0.0019605925177026804]28.2447771298429,(7:[&rate=0.002488169880110427 [...]
+tree STATE_9040000 [&lnP=-22222.484697383774] = [&R] ((33:[&rate=0.0017408449395108355]123.87485880876683,(14:[&rate=0.002079655508699305]98.50197856457297,(((16:[&rate=0.0020147311130033034]46.97812228913591,((((3:[&rate=0.0030573470548572006]11.587107457899625,5:[&rate=0.00225949236049927]11.587107457899625):[&rate=0.00225949236049927]25.851109564191717,((11:[&rate=0.0020957072639274056]8.455679859019861,12:[&rate=0.0016489032012356507]8.455679859019861):[&rate=0.002573254434063972]20. [...]
+tree STATE_9050000 [&lnP=-22228.397803719843] = [&R] ((33:[&rate=0.0028513763304865523]95.59455295503606,(((((((23:[&rate=0.002754606367414238]9.211407979371128,17:[&rate=0.003008973029590293]9.211407979371128):[&rate=0.0025743648359206465]15.509877520450818,(22:[&rate=0.002946850577266934]20.55076915204093,(24:[&rate=0.002955833450000094]14.908009497977723,((21:[&rate=0.002761338451634633]4.448221669024431,(19:[&rate=0.0029141608092639287]3.258092331171241,20:[&rate=0.003053047572022465 [...]
+tree STATE_9060000 [&lnP=-22223.082440304508] = [&R] ((33:[&rate=0.0020751497836745377]123.61638229346009,((((((((23:[&rate=0.0020876534759759857]11.285770522429543,17:[&rate=0.0025307843120989373]11.285770522429543):[&rate=0.0020751497836745377]17.47866394179438,((24:[&rate=0.0025485445574735272]12.183634266152454,((21:[&rate=0.002404279176296626]4.438872930675594,(19:[&rate=0.0023050112054991364]4.19388537966118,20:[&rate=0.0020751497836745377]4.19388537966118):[&rate=0.001971278374910 [...]
+tree STATE_9070000 [&lnP=-22207.681281178426] = [&R] (((14:[&rate=0.0030447645782347714]81.63653931309025,((((16:[&rate=0.002798704450744208]33.86757396230304,(((3:[&rate=0.003274295225698096]12.826013954416842,5:[&rate=0.0030239023141826987]12.826013954416842):[&rate=0.003459228947158997]16.040520818995837,((11:[&rate=0.0037287754214791508]5.4292011711453725,12:[&rate=0.002772995817955172]5.4292011711453725):[&rate=0.003002890064724221]16.460590157259695,(7:[&rate=0.0038531357979712908] [...]
+tree STATE_9080000 [&lnP=-22212.375381846017] = [&R] (((14:[&rate=0.0027962848187971893]102.13913324564716,((((((((11:[&rate=0.002731036472221841]8.556228530589367,12:[&rate=0.0022169718799693186]8.556228530589367):[&rate=0.0021701937298830576]21.732407553028814,(7:[&rate=0.0024792278438164096]23.497623976109892,13:[&rate=0.0021701937298830576]23.497623976109892):[&rate=0.0023914896289054864]6.791012107508287):[&rate=0.0016277609715512793]9.19687468677217,(3:[&rate=0.002557837453359356]1 [...]
+tree STATE_9090000 [&lnP=-22226.344080159048] = [&R] (((((((((8:[&rate=0.002120588116202138]26.872701657649976,10:[&rate=0.002042493891162206]26.872701657649976):[&rate=0.001923563833012602]19.61602801024275,((3:[&rate=0.0018582420401065294]15.935027726792086,5:[&rate=0.002120588116202138]15.935027726792086):[&rate=0.002042493891162206]22.326492728579957,((11:[&rate=0.002051044891089194]8.090490417003618,12:[&rate=0.0019305953109426503]8.090490417003618):[&rate=0.0019731333840240254]25.1 [...]
+tree STATE_9100000 [&lnP=-22223.954885284897] = [&R] ((((((((8:[&rate=0.002603973308274798]26.06566152964494,10:[&rate=0.002666876596692464]26.06566152964494):[&rate=0.003086021925433853]11.266614769264045,(((3:[&rate=0.0022801420997382266]15.19357655358907,5:[&rate=0.00225897607721103]15.19357655358907):[&rate=0.0022349374944786024]20.863564184733846,(7:[&rate=0.0024262206462346697]30.264579494508823,((11:[&rate=0.0028726045746229633]6.5056441274961525,12:[&rate=0.002762169121226111]6.5 [...]
+tree STATE_9110000 [&lnP=-22219.361220345138] = [&R] ((((((16:[&rate=0.0021965683647196224]42.07336181248046,(((3:[&rate=0.0032166940273069735]15.347619401941483,5:[&rate=0.002355443794430764]15.347619401941483):[&rate=0.002355443794430764]20.566617983374453,((11:[&rate=0.0023211818837409674]8.729613397357411,12:[&rate=0.0022289120249987283]8.729613397357411):[&rate=0.002355443794430764]22.570102455473407,(7:[&rate=0.002366475518271319]24.4456036278682,13:[&rate=0.0021965683647196224]24. [...]
+tree STATE_9120000 [&lnP=-22220.22883483428] = [&R] ((33:[&rate=0.003019483146439906]96.14104531269608,(((((16:[&rate=0.0028890242672770963]33.404385355073,(((3:[&rate=0.003620138955145866]13.300682090164678,5:[&rate=0.0030022026150956184]13.300682090164678):[&rate=0.003019483146439906]15.044933896632472,(7:[&rate=0.0029665578549848107]23.852799575421436,((11:[&rate=0.003438258336291841]5.626766879951161,12:[&rate=0.0028890242672770963]5.626766879951161):[&rate=0.0034190239529359808]15.4 [...]
+tree STATE_9130000 [&lnP=-22224.853039259517] = [&R] (((14:[&rate=0.0028972142979122742]74.26875321818585,(((((22:[&rate=0.0025135546068510463]24.013091136386574,((((19:[&rate=0.0027065227053169254]4.6488032923224525,(20:[&rate=0.0022492319173684416]4.162292379607368,21:[&rate=0.0027065227053169254]4.162292379607368):[&rate=0.002267938325794951]0.48651091271508484):[&rate=0.002137837722181526]9.910317670178205,15:[&rate=0.0020400421941465336]14.559120962500657):[&rate=0.00213783772218152 [...]
+tree STATE_9140000 [&lnP=-22215.437468976954] = [&R] (((14:[&rate=0.0025503252034531467]87.68442549697518,((((((23:[&rate=0.002760653328227225]8.113436130525201,17:[&rate=0.0032672921190895145]8.113436130525201):[&rate=0.0028935385706467687]12.3781214078968,(((15:[&rate=0.0028030959511009556]9.946735452467468,(19:[&rate=0.002395170536272532]5.50820453323117,(20:[&rate=0.0028622858878024995]4.611320748156018,21:[&rate=0.002120732307794738]4.611320748156018):[&rate=0.0024528590707217227]0. [...]
+tree STATE_9150000 [&lnP=-22209.081352416604] = [&R] ((14:[&rate=0.0021066964238140766]108.0772451143649,((((((8:[&rate=0.0024289039085656806]25.305173891088717,10:[&rate=0.002533864965170093]25.305173891088717):[&rate=0.0024289039085656806]16.46095367922997,((3:[&rate=0.0031125085039530364]14.362339416374592,5:[&rate=0.0018648748587051249]14.362339416374592):[&rate=0.002398702311616662]22.193371433371897,((11:[&rate=0.0030683715328128284]6.223515933341962,12:[&rate=0.0021542057048980845 [...]
+tree STATE_9160000 [&lnP=-22214.148512509382] = [&R] (9:[&rate=0.00307411064883259]121.75182564627985,(33:[&rate=0.002955571738744992]99.03941933184547,(14:[&rate=0.002902340446054532]93.08328404931991,(((((((3:[&rate=0.00305244639503402]12.326923668259592,5:[&rate=0.002642702867140422]12.326923668259592):[&rate=0.002642702867140422]14.853647394065916,((7:[&rate=0.003339062329308591]18.005497553384533,13:[&rate=0.00293024773474708]18.005497553384533):[&rate=0.0030091232630469118]3.758060 [...]
+tree STATE_9170000 [&lnP=-22212.48337403931] = [&R] (9:[&rate=0.002525259019240815]129.9693398647205,((((((((8:[&rate=0.002883620531390552]20.99415096908967,10:[&rate=0.002868894381945892]20.99415096908967):[&rate=0.0028540361161421704]12.756849103841223,((3:[&rate=0.002525259019240815]17.51798174122866,5:[&rate=0.0025547666908935004]17.51798174122866):[&rate=0.0029272737702598565]13.490927566218701,((7:[&rate=0.003830538389200263]17.40752403145226,13:[&rate=0.0028540361161421704]17.4075 [...]
+tree STATE_9180000 [&lnP=-22203.246606814027] = [&R] (9:[&rate=0.002425273203464258]143.9111397033771,((((((((22:[&rate=0.002915645149875]20.60837206492612,(24:[&rate=0.0027836862425035446]13.05371053356859,(((21:[&rate=0.0027836862425035446]2.864944032214449,20:[&rate=0.0028924625734348306]2.864944032214449):[&rate=0.003275757602518083]1.148355945746839,19:[&rate=0.0023608835542230488]4.013299977961288):[&rate=0.003480825726044949]4.637366035064324,15:[&rate=0.0028476861894614148]8.6506 [...]
+tree STATE_9190000 [&lnP=-22221.97286770041] = [&R] (((14:[&rate=0.00243636687989364]90.0094228873705,((((((22:[&rate=0.0026678952990563335]21.555420935032853,(24:[&rate=0.0025377903484714906]13.371788747643713,((19:[&rate=0.0024818637839484066]4.335469000055272,(20:[&rate=0.0022032221069673706]2.8027160054246028,21:[&rate=0.00238873195169639]2.8027160054246028):[&rate=0.0025603268652106565]1.5327529946306693):[&rate=0.002153879615412495]5.039673666576928,15:[&rate=0.002583169373002387]9 [...]
+tree STATE_9200000 [&lnP=-22227.650481904042] = [&R] ((33:[&rate=0.0019819648159827593]122.80980237986802,((((((((3:[&rate=0.002042555419221254]22.549546030696476,5:[&rate=0.001862214839205936]22.549546030696476):[&rate=0.002204421172164659]19.6841688637965,((7:[&rate=0.0020064055868179455]35.42601016126505,13:[&rate=0.0016164549158759053]35.42601016126505):[&rate=0.0027196126833645123]1.4745132586386376,(11:[&rate=0.002291759164851696]8.488491490526362,12:[&rate=0.002042555419221254]8.4 [...]
+tree STATE_9210000 [&lnP=-22222.39101727983] = [&R] ((33:[&rate=0.0023035093580510003]97.657028249744,(14:[&rate=0.0029385029712682077]76.45074092503641,(((((((23:[&rate=0.00266353300729002]13.246557325321675,17:[&rate=0.0025807900272300874]13.246557325321675):[&rate=0.0025410890227930572]12.242540900973353,((24:[&rate=0.002394000034958736]14.210939289029287,(((20:[&rate=0.0031100631049921696]1.3930187962434275,21:[&rate=0.0033281494073077516]1.3930187962434275):[&rate=0.0023940000349587 [...]
+tree STATE_9220000 [&lnP=-22220.514541486406] = [&R] ((33:[&rate=0.002064161961235176]130.3225130130846,(14:[&rate=0.0022455922850166234]109.37166086721967,((((((2:[&rate=0.002315092527601553]30.666813123411092,((1:[&rate=0.002197756399751397]10.09629292157472,6:[&rate=0.0020133647631712646]10.09629292157472):[&rate=0.002315092527601553]16.55034528435437,4:[&rate=0.002602155307945859]26.646638205929094):[&rate=0.0027852156811831283]4.020174917481999):[&rate=0.0025756632851618192]3.471179 [...]
+tree STATE_9230000 [&lnP=-22208.967390729667] = [&R] ((33:[&rate=0.002419435143105413]94.36521989718628,(14:[&rate=0.00246285839754384]85.0616214282127,((((16:[&rate=0.0027982152288712767]37.177952694242975,(((3:[&rate=0.003051439908575526]14.855776290721735,5:[&rate=0.0024760151948719457]14.855776290721735):[&rate=0.0027752058684565803]15.868075154648707,(7:[&rate=0.002922097705486991]23.83198772188421,(13:[&rate=0.002786582330485017]20.88519428713434,(11:[&rate=0.0030831310626628413]6. [...]
+tree STATE_9240000 [&lnP=-22227.125221288505] = [&R] ((33:[&rate=0.0022394594700485236]108.20534801305247,(14:[&rate=0.002268577918061127]97.40283417013418,(((16:[&rate=0.002472268500906004]46.308694901865266,(((8:[&rate=0.0024439516716540377]22.7500253982125,10:[&rate=0.002472268500906004]22.7500253982125):[&rate=0.0026001843176541267]21.63028918322763,(((2:[&rate=0.002310141484535696]32.24362199359054,((1:[&rate=0.0026001843176541267]8.959019843056195,6:[&rate=0.0021146869078544793]8.9 [...]
+tree STATE_9250000 [&lnP=-22217.440595962573] = [&R] ((33:[&rate=0.002417388013170421]104.10331727453772,(((((16:[&rate=0.0023541538019560162]37.74210590747517,(((3:[&rate=0.0025255092539516804]15.484591810816806,5:[&rate=0.002805692920208527]15.484591810816806):[&rate=0.0027076011868598223]19.20265260111349,(((11:[&rate=0.0025567941522946346]5.567425116847587,12:[&rate=0.0030536473305798604]5.567425116847587):[&rate=0.002925630894932252]15.866825548474715,13:[&rate=0.002417388013170421] [...]
+tree STATE_9260000 [&lnP=-22216.175880227715] = [&R] ((33:[&rate=0.002759641743111268]95.01223661803465,(((((((8:[&rate=0.00320411924897658]19.494750077307003,10:[&rate=0.002759641743111268]19.494750077307003):[&rate=0.003301818332545281]11.383050030692925,((3:[&rate=0.003030120510912426]13.975168384708233,5:[&rate=0.0027707964387999984]13.975168384708233):[&rate=0.0029848349720212226]15.264032173572051,(7:[&rate=0.002708851417718647]26.32932424761899,(13:[&rate=0.0028019012020287616]22. [...]
+tree STATE_9270000 [&lnP=-22218.4200069033] = [&R] (((33:[&rate=0.0027144879498802905]83.97814091648362,(((((((3:[&rate=0.003422577502086014]10.738153042705186,5:[&rate=0.0032655764774729727]10.738153042705186):[&rate=0.002204844403151197]20.69150214328429,(8:[&rate=0.003030435803356093]19.441391072670765,10:[&rate=0.002445140156241448]19.441391072670765):[&rate=0.003103398938787921]11.98826411331871):[&rate=0.0029100597907891447]0.9917946298599531,(((11:[&rate=0.0026237114353842893]8.09 [...]
+tree STATE_9280000 [&lnP=-22231.571746209916] = [&R] (((((((((8:[&rate=0.0022592473544587564]26.032562755786934,10:[&rate=0.002066339638446824]26.032562755786934):[&rate=0.0018689319184220653]19.972685397139244,((((11:[&rate=0.002276245132534046]8.339704618391128,12:[&rate=0.0017677451769855823]8.339704618391128):[&rate=0.0018841170131998583]20.9316281346735,13:[&rate=0.0014892572181136059]29.271332753064627):[&rate=0.001913614817821316]5.757055762425281,7:[&rate=0.0022108538233017406]35 [...]
+tree STATE_9290000 [&lnP=-22213.34871580049] = [&R] (((((((((2:[&rate=0.0024716961717633932]26.39456887203672,((1:[&rate=0.003035462476097234]6.716167909312126,6:[&rate=0.0023178338741743514]6.716167909312126):[&rate=0.0029295911715665974]12.339375370207094,4:[&rate=0.0031371361314362547]19.05554327951922):[&rate=0.002302817021071654]7.339025592517501):[&rate=0.0032086603032771147]4.7708373379562055,(18:[&rate=0.002184642831850947]27.459916656841763,(26:[&rate=0.0026593971008232417]17.31 [...]
+tree STATE_9300000 [&lnP=-22204.725474817173] = [&R] (((((((((8:[&rate=0.0034291685003034577]18.934089630630726,10:[&rate=0.003545785156863382]18.934089630630726):[&rate=0.003320034565715494]11.651164621676465,16:[&rate=0.0035056096892372005]30.58525425230719):[&rate=0.0038485285170714863]0.07701894561239087,(((11:[&rate=0.00360956634505655]5.633927337579735,12:[&rate=0.0038831639078692423]5.633927337579735):[&rate=0.00360956634505655]18.235054899055005,(13:[&rate=0.0029909203608456587]1 [...]
+tree STATE_9310000 [&lnP=-22216.642112576435] = [&R] (((((((((2:[&rate=0.0023322109480280375]28.4639341391771,((1:[&rate=0.0024385049056214808]8.912710094001767,6:[&rate=0.0023476941157956443]8.912710094001767):[&rate=0.0025770226655287727]11.747449837547592,4:[&rate=0.0032781417420789886]20.66015993154936):[&rate=0.001827903390253396]7.80377420762774):[&rate=0.0031342029621393356]4.5103662720619795,(18:[&rate=0.0028478403351665325]26.057331581218122,(26:[&rate=0.0023782512301064236]18.7 [...]
+tree STATE_9320000 [&lnP=-22210.661792361516] = [&R] (((14:[&rate=0.0027232369501394493]83.03163373577634,((((((2:[&rate=0.002506883678744501]26.24592568083402,((1:[&rate=0.0031506606778145675]7.148040651055096,6:[&rate=0.0018795922884676484]7.148040651055096):[&rate=0.002754676864634646]14.39117218136362,4:[&rate=0.0031506606778145675]21.539212832418716):[&rate=0.0025971657268258844]4.706712848415304):[&rate=0.002650903001090311]3.8275536032485817,(18:[&rate=0.002908146162036619]24.2421 [...]
+tree STATE_9330000 [&lnP=-22224.72649182173] = [&R] (((((((((22:[&rate=0.002464286759642889]23.20960779724139,(24:[&rate=0.002346550246055414]14.855703239273664,(15:[&rate=0.002821620950917458]8.31546928806745,((19:[&rate=0.0022167466578901277]2.9877530358642943,20:[&rate=0.0022271507168475776]2.9877530358642943):[&rate=0.002528952213071887]1.4289273475782212,21:[&rate=0.0026303145463280877]4.4166803834425155):[&rate=0.0024410812118613005]3.8987889046249338):[&rate=0.0023168716629951343] [...]
+tree STATE_9340000 [&lnP=-22208.654200424397] = [&R] ((((((16:[&rate=0.0027750946218761457]36.08874002925386,(((((24:[&rate=0.0034413031872547557]11.014918771093397,(15:[&rate=0.0023644963848010797]9.119117774006645,(19:[&rate=0.002523427489156717]3.86076532756905,(20:[&rate=0.003134148514494442]2.9638476845121824,21:[&rate=0.0027897090310977477]2.9638476845121824):[&rate=0.0029057513353003966]0.8969176430568675):[&rate=0.0031756241523144242]5.258352446437595):[&rate=0.003108063681677304 [...]
+tree STATE_9350000 [&lnP=-22212.108985028888] = [&R] ((14:[&rate=0.0023978418928700404]88.67879189498706,(33:[&rate=0.0033875108293099417]77.05831822949433,(((((22:[&rate=0.0026197067527262375]24.08089992432603,((23:[&rate=0.00259774924343353]11.566132164595375,17:[&rate=0.002929379269163436]11.566132164595375):[&rate=0.0026625602915147624]11.598475489268074,(24:[&rate=0.002453639625787807]12.871149854307095,(15:[&rate=0.0023978418928700404]7.716055846415683,((21:[&rate=0.002217504395839 [...]
+tree STATE_9360000 [&lnP=-22229.282916980348] = [&R] ((33:[&rate=0.0019238678520451039]132.294136943417,(14:[&rate=0.001864335490265245]130.51674039119487,((((((23:[&rate=0.002312035707087625]10.974814347192034,17:[&rate=0.002417115169518483]10.974814347192034):[&rate=0.0021415645436732283]17.999955896238074,(22:[&rate=0.0022846033996949786]27.38875385525488,(24:[&rate=0.002823665725401858]17.755055929922097,(15:[&rate=0.0022982188785699727]12.770902612411971,(19:[&rate=0.001787119486470 [...]
+tree STATE_9370000 [&lnP=-22216.56140342813] = [&R] (((14:[&rate=0.0031081351929487877]104.27050933155216,(((((((22:[&rate=0.0024876019217832095]25.049671870795336,(((19:[&rate=0.0020976917041413655]2.413809936401218,(20:[&rate=0.002324724769237117]1.5966715133930516,21:[&rate=0.003041109836857773]1.5966715133930516):[&rate=0.0025231465318105257]0.8171384230081664):[&rate=0.0020212160812143106]8.596672186012293,15:[&rate=0.002599769193290953]11.010482122413512):[&rate=0.00168313305691689 [...]
+tree STATE_9380000 [&lnP=-22210.18708276561] = [&R] (((14:[&rate=0.003251538237366739]83.90752237302422,((((((23:[&rate=0.003279152741467701]8.595123791560153,17:[&rate=0.0027145065434423608]8.595123791560153):[&rate=0.002017879422640114]16.460641747149488,(22:[&rate=0.003374411241331525]19.882091634554648,(((21:[&rate=0.00284496029815507]4.4583508537932355,(20:[&rate=0.0031770216494129916]2.615662774246804,19:[&rate=0.0026636543255692613]2.615662774246804):[&rate=0.0030331716615817758]1 [...]
+tree STATE_9390000 [&lnP=-22226.219193832967] = [&R] (9:[&rate=0.0027446764406189915]152.15002580016875,((14:[&rate=0.002146040400404699]101.438744993521,((((((23:[&rate=0.002243178837968299]11.486634686016247,17:[&rate=0.0023244365128932145]11.486634686016247):[&rate=0.0021731074757743176]16.430531075917713,(22:[&rate=0.002284767423610082]26.84485041477724,((((21:[&rate=0.002602239894039203]2.56130083226074,20:[&rate=0.002209724924400194]2.56130083226074):[&rate=0.0018450686120916435]3. [...]
+tree STATE_9400000 [&lnP=-22210.925099374548] = [&R] (9:[&rate=0.0030183088798382577]117.02320302653987,(33:[&rate=0.0025170078766221706]90.09520808599845,(14:[&rate=0.002867543812898706]77.78984610374151,(((((((7:[&rate=0.0032556882533313182]19.377309716011602,13:[&rate=0.003119136308065952]19.377309716011602):[&rate=0.003119136308065952]3.6672413181074504,(11:[&rate=0.0032312227941581695]6.298868449211352,12:[&rate=0.0023627866723016457]6.298868449211352):[&rate=0.0034265391484947323]1 [...]
+tree STATE_9410000 [&lnP=-22213.389226749026] = [&R] ((9:[&rate=0.0029458777211516093]113.54252739035968,14:[&rate=0.0017487433872962928]113.54252739035968):[&rate=0.002084867202438837]3.0715679401158695,(((((16:[&rate=0.002211957913350113]41.98996005415899,((((7:[&rate=0.0029458777211516093]23.7035741850454,13:[&rate=0.002104839091049858]23.7035741850454):[&rate=0.002084867202438837]5.512188673621409,(11:[&rate=0.002462751911783879]8.599178091990654,12:[&rate=0.002276437142373687]8.5991 [...]
+tree STATE_9420000 [&lnP=-22211.822806291206] = [&R] ((14:[&rate=0.0023690250645405173]107.71567107417414,(33:[&rate=0.0021271103752242924]98.630669222419,((((((((7:[&rate=0.003065741636582416]24.586451892028293,13:[&rate=0.001964319881708123]24.586451892028293):[&rate=0.002791765851368705]3.360165768256415,(11:[&rate=0.002537241273015212]7.873666879814538,12:[&rate=0.0020529284269524706]7.873666879814538):[&rate=0.002214038790557458]20.072950780470173):[&rate=0.00255647503675902]8.63032 [...]
+tree STATE_9430000 [&lnP=-22205.6820728498] = [&R] ((33:[&rate=0.0027568837215247843]84.25699758033082,(((((((8:[&rate=0.0038508728917407854]15.274455136298366,10:[&rate=0.003428555448590832]15.274455136298366):[&rate=0.0029395488213942345]12.041105855559751,((3:[&rate=0.00289130947620504]13.146054938920116,5:[&rate=0.0028404352592392447]13.146054938920116):[&rate=0.0035028589917629797]12.468015088394715,((7:[&rate=0.0038107200086468424]17.71094611835346,13:[&rate=0.002693878451849637]17 [...]
+tree STATE_9440000 [&lnP=-22208.222565968463] = [&R] ((33:[&rate=0.002379953478093872]90.26144486225674,((((16:[&rate=0.002211733581588789]39.52953751080994,(((((24:[&rate=0.002236481799083538]13.66002297400601,((19:[&rate=0.0023944223114972414]3.248514112230648,(20:[&rate=0.0024992223116471935]2.4872946042141764,21:[&rate=0.0025840638316617153]2.4872946042141764):[&rate=0.0023944223114972414]0.7612195080164716):[&rate=0.0023944223114972414]7.119327950407335,15:[&rate=0.00240847666399225 [...]
+tree STATE_9450000 [&lnP=-22218.30865015119] = [&R] (9:[&rate=0.002317643915400798]134.34114155637522,((((16:[&rate=0.002916764747444004]38.52173995864789,(((((24:[&rate=0.0031021695548180735]12.64818007250894,(((21:[&rate=0.0027911238337883226]2.14124164728164,20:[&rate=0.0032060348483920567]2.14124164728164):[&rate=0.0032644972808878042]1.089871201791515,19:[&rate=0.0024776306610102498]3.231112849073155):[&rate=0.0036936177143599798]7.5045540310542425,15:[&rate=0.0027572634483020004]10 [...]
+tree STATE_9460000 [&lnP=-22227.84612368105] = [&R] (9:[&rate=0.002178121385063884]196.88903652952746,(33:[&rate=0.0018985679454184253]149.41499956313493,(((((((22:[&rate=0.0019492141614577765]29.46593127724998,(23:[&rate=0.0021887796887606554]12.116543006516887,17:[&rate=0.002178121385063884]12.116543006516887):[&rate=0.001983320216519214]17.349388270733094):[&rate=0.0023707660524138687]0.09610739544489633,(24:[&rate=0.0022694572443267102]18.75599889893219,((19:[&rate=0.0019608640295519 [...]
+tree STATE_9470000 [&lnP=-22215.610734588834] = [&R] (9:[&rate=0.0020445883356274615]149.42898429811288,((((((((22:[&rate=0.002706801589721789]20.19139581786503,(23:[&rate=0.002684845475435731]10.574059519736364,17:[&rate=0.002616795124679017]10.574059519736364):[&rate=0.003326284346698596]9.617336298128665):[&rate=0.0021315548611581464]1.1682497797082725,((15:[&rate=0.002982462572747026]8.816252470255995,(21:[&rate=0.0024912543083235638]3.3237508331995778,(19:[&rate=0.00321567886423166] [...]
+tree STATE_9480000 [&lnP=-22232.612746070274] = [&R] (9:[&rate=0.002420685942780829]146.29294288171272,(((((((((23:[&rate=0.002263736473178004]11.044536991393912,17:[&rate=0.0025214124177113037]11.044536991393912):[&rate=0.002301175357978278]16.25670004949763,((15:[&rate=0.0025703501627827724]11.549393593676982,((19:[&rate=0.0026325034325117534]5.279860650492188,20:[&rate=0.0017928598104783725]5.279860650492188):[&rate=0.0023487901919367156]1.058530494396373,21:[&rate=0.00195190865418344 [...]
+tree STATE_9490000 [&lnP=-22222.872587871425] = [&R] (9:[&rate=0.00308018464090329]119.9536900618501,((((((((23:[&rate=0.002636313448261763]11.171595735342416,17:[&rate=0.0029520429861988086]11.171595735342416):[&rate=0.0027513657919177635]10.749143951190197,(22:[&rate=0.002995876546037183]20.81670100551639,((((21:[&rate=0.0034365220665126717]3.3182410440838104,20:[&rate=0.0027289828376172217]3.3182410440838104):[&rate=0.002917273681650105]0.7256283957980627,19:[&rate=0.00296321742614363 [...]
+tree STATE_9500000 [&lnP=-22213.45436055848] = [&R] (9:[&rate=0.0023644844734347016]155.15914804971072,((((((((23:[&rate=0.002862976022934423]11.625306692553115,17:[&rate=0.0024590011129566034]11.625306692553115):[&rate=0.0022898820462188365]12.185129959209283,(22:[&rate=0.0033337559859007455]22.706490197007295,((((20:[&rate=0.0029166914779635853]2.0050633091156493,19:[&rate=0.003132103508259952]2.0050633091156493):[&rate=0.0025051371485508857]0.240172294503866,21:[&rate=0.00303328174536 [...]
+tree STATE_9510000 [&lnP=-22222.36471378678] = [&R] (((33:[&rate=0.0026767905599524976]81.15841963048751,((((((22:[&rate=0.002588994345288023]25.7421247239632,((23:[&rate=0.002376343577817936]9.939059378668157,17:[&rate=0.0030804154684807997]9.939059378668157):[&rate=0.002425426843782287]13.451422197189205,((((21:[&rate=0.0025063556119900117]2.022883706578434,20:[&rate=0.002588994345288023]2.022883706578434):[&rate=0.002864873333157292]1.020932802100822,19:[&rate=0.002555527308983981]3.0 [...]
+tree STATE_9520000 [&lnP=-22207.58723610796] = [&R] ((33:[&rate=0.0025945276363224176]104.58104449981367,((((((((3:[&rate=0.002951002931165766]13.704924299562032,5:[&rate=0.0026422058249057413]13.704924299562032):[&rate=0.002627018530700964]16.51663220023206,((13:[&rate=0.0025769410971167722]19.127417258993557,7:[&rate=0.0030900841832924636]19.127417258993557):[&rate=0.0025769410971167722]4.692736395335217,(11:[&rate=0.0030443231782693167]5.500215272026283,12:[&rate=0.0025582002937541426 [...]
+tree STATE_9530000 [&lnP=-22214.68157666038] = [&R] ((33:[&rate=0.0025056057640846464]106.87109170353546,(((((((8:[&rate=0.0031245052247868745]20.74383685643456,10:[&rate=0.002960003407644572]20.74383685643456):[&rate=0.002397053917674575]13.367923533044301,((3:[&rate=0.0030265859702116515]13.358888102764954,5:[&rate=0.002523766497841928]13.358888102764954):[&rate=0.0024220852319782084]19.15529635857514,((13:[&rate=0.002523766497841928]21.255205539807314,7:[&rate=0.002944410834404334]21. [...]
+tree STATE_9540000 [&lnP=-22203.169867839686] = [&R] (((14:[&rate=0.0030236515786498056]78.17934525889362,33:[&rate=0.0035704156736513157]78.17934525889362):[&rate=0.0022499941245488074]6.834089610645421,((((16:[&rate=0.002525599411328504]35.142793581164746,((8:[&rate=0.002568513032883189]24.230221608786728,10:[&rate=0.0024790997542056365]24.230221608786728):[&rate=0.0034325689631021956]10.523194737082022,((3:[&rate=0.0033953607690295666]11.63760217862395,5:[&rate=0.002967280362247403]11 [...]
+tree STATE_9550000 [&lnP=-22231.05067217407] = [&R] (((14:[&rate=0.002374716741758546]105.31094934468528,((((16:[&rate=0.0021103665173471233]44.33549974876093,(((23:[&rate=0.0021958227120445336]12.534157613761602,17:[&rate=0.002284973300651487]12.534157613761602):[&rate=0.002260474714351359]14.524028238879795,(22:[&rate=0.0024797189622943255]24.747685548338502,(24:[&rate=0.0025292552787270508]17.00075506234311,(15:[&rate=0.0024185488651692214]12.451029086387916,((21:[&rate=0.002338719430 [...]
+tree STATE_9560000 [&lnP=-22215.458161635] = [&R] ((((((16:[&rate=0.002895049679463293]37.07183977777085,(((((13:[&rate=0.002638028746055711]17.614243941430793,7:[&rate=0.003544184719860038]17.614243941430793):[&rate=0.002540185955759645]4.774559395040363,(11:[&rate=0.002356340669500665]6.916807681147415,12:[&rate=0.002485761208588473]6.916807681147415):[&rate=0.003156568907773215]15.471995655323742):[&rate=0.0031805875446148236]5.720527256833741,(3:[&rate=0.0029787257629303255]11.681391 [...]
+tree STATE_9570000 [&lnP=-22218.55480429211] = [&R] (((14:[&rate=0.0028360425411090833]88.37930662973103,((((((22:[&rate=0.0029652949682304535]20.355870827548085,((((20:[&rate=0.003285863000917868]3.0074839662666037,21:[&rate=0.0026401157007935106]3.0074839662666037):[&rate=0.0031252690523559065]1.7174588709270675,19:[&rate=0.0027241228833553106]4.724942837193671):[&rate=0.002913721936534945]6.7316021944156255,15:[&rate=0.0027241228833553106]11.456545031609297):[&rate=0.00297548100353932 [...]
+tree STATE_9580000 [&lnP=-22208.24997679324] = [&R] (((14:[&rate=0.00314998563529749]71.59423637713768,((((((2:[&rate=0.0028768901008593556]21.719590473904567,((1:[&rate=0.003228362206525344]5.826118210675664,6:[&rate=0.0033145037858449515]5.826118210675664):[&rate=0.002791438624476055]10.497659132497684,4:[&rate=0.004269115378895567]16.323777343173347):[&rate=0.003100889278244344]5.39581313073122):[&rate=0.0023740332413360807]6.026499520126617,(18:[&rate=0.0030307239487040363]23.8047464 [...]
+tree STATE_9590000 [&lnP=-22219.167060545653] = [&R] (((((((16:[&rate=0.0020604900542906435]41.48158529388981,((8:[&rate=0.0027059789134349166]22.87629663575679,10:[&rate=0.0028330674349112597]22.87629663575679):[&rate=0.0025910588166757845]16.42634154503052,((3:[&rate=0.002264184226026673]13.242705008512766,5:[&rate=0.002673710390893983]13.242705008512766):[&rate=0.002401959080143837]22.908914450163515,((13:[&rate=0.0019741290816360684]24.666592032311296,(11:[&rate=0.0027834367838114804 [...]
+tree STATE_9600000 [&lnP=-22218.53017912463] = [&R] (((14:[&rate=0.0026792269015605548]84.47205160558678,((((16:[&rate=0.002264394157458215]40.43831913703351,((8:[&rate=0.0033943621153445136]19.300252526761177,10:[&rate=0.0029976532834249674]19.300252526761177):[&rate=0.0021975953553854486]19.2682053641986,((3:[&rate=0.0027195604129772745]18.100716037112033,5:[&rate=0.002447360475724779]18.100716037112033):[&rate=0.0034886866389787375]15.54757319719214,((11:[&rate=0.0023226442114006435]7 [...]
+tree STATE_9610000 [&lnP=-22204.116632751276] = [&R] ((33:[&rate=0.002459074987799836]103.31446464289279,(14:[&rate=0.0029225175408927736]88.43419322574428,((((16:[&rate=0.00267837734714656]34.0382172346239,(((3:[&rate=0.003229669359723626]13.552188628577,5:[&rate=0.0028424753266778563]13.552188628577):[&rate=0.0031338788709869775]14.735628238642327,((11:[&rate=0.0031569367212082606]4.965457128043492,12:[&rate=0.0032047608134970974]4.965457128043492):[&rate=0.002882436763670249]18.713912 [...]
+tree STATE_9620000 [&lnP=-22215.703351477892] = [&R] ((33:[&rate=0.002479861678716168]99.5080706254101,(((((((8:[&rate=0.0030437699974343157]19.895994279603038,10:[&rate=0.002791481410884098]19.895994279603038):[&rate=0.002925438938879872]14.998479804025848,((3:[&rate=0.00280039753225744]14.947096577862512,5:[&rate=0.0026412014829814492]14.947096577862512):[&rate=0.0027197540259222336]17.325569081196754,((11:[&rate=0.0028274145890648346]8.5097485702706,12:[&rate=0.002378233753375702]8.50 [...]
+tree STATE_9630000 [&lnP=-22224.58636796916] = [&R] ((33:[&rate=0.002728606082204256]94.4859871666213,(14:[&rate=0.0027943385583423736]87.90003615293176,((((((2:[&rate=0.0026643643618337438]27.099584476436647,((1:[&rate=0.0028147180056691573]7.313029914799576,6:[&rate=0.0028147180056691573]7.313029914799576):[&rate=0.0028829087213524734]15.314870289392381,4:[&rate=0.0029275628761177373]22.627900204191956):[&rate=0.0027943385583423736]4.471684272244691):[&rate=0.002728606082204256]6.52453 [...]
+tree STATE_9640000 [&lnP=-22204.807068155755] = [&R] ((33:[&rate=0.0032735280090875718]75.14015903014081,(14:[&rate=0.0030862016934371447]66.25152649941717,((((((2:[&rate=0.00395620321504004]21.157026638840765,((1:[&rate=0.0030229793986193665]5.522880033518946,6:[&rate=0.003414574017264502]5.522880033518946):[&rate=0.0036430588807290607]10.87421993030954,4:[&rate=0.004125322397051395]16.397099963828484):[&rate=0.0030554058590460073]4.759926675012281):[&rate=0.004300739169555857]3.6907337 [...]
+tree STATE_9650000 [&lnP=-22216.467166091858] = [&R] ((33:[&rate=0.002110031878909994]104.24062673898497,(14:[&rate=0.0028703525489748426]92.89182511565156,((((((3:[&rate=0.0023977174609908425]14.76284339695465,5:[&rate=0.0025964280378635058]14.76284339695465):[&rate=0.0023527936252925063]19.58003451707708,((11:[&rate=0.0031789684563618094]6.424818850812573,12:[&rate=0.0026906602161170957]6.424818850812573):[&rate=0.0024397045568246216]20.623011492874888,(7:[&rate=0.002781007299855251]22 [...]
+tree STATE_9660000 [&lnP=-22218.416810707986] = [&R] ((33:[&rate=0.002221586573634766]108.48009053710597,((((((((3:[&rate=0.0018041111278358111]17.296921307157938,5:[&rate=0.0018041111278358111]17.296921307157938):[&rate=0.002710933000444466]17.209762207896148,((13:[&rate=0.0018647585776765674]22.787887793767297,(11:[&rate=0.0024176210948003334]7.3610456598935645,12:[&rate=0.0021726339798926273]7.3610456598935645):[&rate=0.002963591903403454]15.426842133873732):[&rate=0.00277997549257562 [...]
+tree STATE_9670000 [&lnP=-22211.680403188235] = [&R] (((14:[&rate=0.002733823064956763]84.2598000260353,((((16:[&rate=0.0025340277426188383]37.63029464867548,(((13:[&rate=0.002094540134401595]24.087613714646103,7:[&rate=0.0025192008520237112]24.087613714646103):[&rate=0.0024127388551419523]5.195802359674246,(11:[&rate=0.002785528607320204]6.12219693110861,12:[&rate=0.002380522713011795]6.12219693110861):[&rate=0.0025192008520237112]23.16121914321174):[&rate=0.0023967816235675263]5.821189 [...]
+tree STATE_9680000 [&lnP=-22203.65360825608] = [&R] ((((((((((3:[&rate=0.004306016843197694]11.115033199534594,5:[&rate=0.002781954441128845]11.115033199534594):[&rate=0.0029282043360902045]15.43780549909306,((13:[&rate=0.002781954441128845]19.37681287450903,7:[&rate=0.0038298017534664115]19.37681287450903):[&rate=0.003537324336150969]2.9788694005672056,(11:[&rate=0.0032409048019733542]5.448684783301644,12:[&rate=0.0031997403490996853]5.448684783301644):[&rate=0.0031385438887445325]16.90 [...]
+tree STATE_9690000 [&lnP=-22220.35908056464] = [&R] (((((((((2:[&rate=0.0027446694092932785]26.49419572062766,((1:[&rate=0.0022333386324528467]8.238937554801678,6:[&rate=0.002459365929205855]8.238937554801678):[&rate=0.002654412140797742]13.672992722034953,4:[&rate=0.0027828120006577074]21.91193027683663):[&rate=0.0026961837869865857]4.582265443791027):[&rate=0.0025293954752687617]8.319666901713774,(18:[&rate=0.0027446694092932785]29.94689866010838,(26:[&rate=0.002459365929205855]17.9255 [...]
+tree STATE_9700000 [&lnP=-22215.910994605194] = [&R] (((((((16:[&rate=0.003464483177872294]31.30111812486868,((8:[&rate=0.0032543899573846532]18.7361276909693,10:[&rate=0.0036423258703598216]18.7361276909693):[&rate=0.0032962041818536225]11.669427141232703,((3:[&rate=0.0028247850496494727]11.470385613470475,5:[&rate=0.0031155391346673984]11.470385613470475):[&rate=0.0032543899573846532]17.24463211657084,((13:[&rate=0.0029593143911706985]19.15313236673725,(11:[&rate=0.0030105610278849558] [...]
+tree STATE_9710000 [&lnP=-22232.2555784541] = [&R] (((33:[&rate=0.002225144648307028]105.658036118483,14:[&rate=0.002095313917349709]105.658036118483):[&rate=0.0019845071843181622]12.82735969760951,((((16:[&rate=0.0020746636418775694]45.88409620205288,((3:[&rate=0.0021303093826551046]17.120321785380224,5:[&rate=0.0020887546134051482]17.120321785380224):[&rate=0.002151139054316355]27.167584691583368,(((13:[&rate=0.0023569651332265634]30.04196864881434,(11:[&rate=0.0023569651332265634]10.5 [...]
+tree STATE_9720000 [&lnP=-22220.45802699337] = [&R] (((14:[&rate=0.002415416343210429]112.33997870974054,((((((8:[&rate=0.0022377088864146415]28.130946931860407,10:[&rate=0.0020685748683500886]28.130946931860407):[&rate=0.002715746637651252]16.905478825006043,16:[&rate=0.0019070681214750497]45.03642575686645):[&rate=0.0018195687555424241]3.46266054094761,(((13:[&rate=0.0018653935453177275]27.10924522609286,7:[&rate=0.002415416343210429]27.10924522609286):[&rate=0.0018653935453177275]6.27 [...]
+tree STATE_9730000 [&lnP=-22204.97458253403] = [&R] (((14:[&rate=0.0025044586522480605]98.91218328112429,((((((8:[&rate=0.0031069357668017505]23.001763346413224,10:[&rate=0.0022139982402923698]23.001763346413224):[&rate=0.0024412536640160912]17.826808456131037,16:[&rate=0.002320878102177794]40.82857180254426):[&rate=0.002032806133179442]0.9478001745844935,((3:[&rate=0.0024259138450363925]16.132709295600353,5:[&rate=0.0023059256618611417]16.132709295600353):[&rate=0.001897201727669204]21. [...]
+tree STATE_9740000 [&lnP=-22234.145647708545] = [&R] ((((((((((3:[&rate=0.0024236218912708496]14.806675070277471,5:[&rate=0.0024876849479474102]14.806675070277471):[&rate=0.002788050050618402]19.329418423617483,((13:[&rate=0.002708900465492461]21.453494585860216,(11:[&rate=0.0024578183243417714]5.94638531781963,12:[&rate=0.0023817798830906623]5.94638531781963):[&rate=0.0026587290504998303]15.507109268040587):[&rate=0.0022560651777209792]5.633801586958384,7:[&rate=0.0023969227479734333]27 [...]
+tree STATE_9750000 [&lnP=-22213.734014321257] = [&R] ((((((((((3:[&rate=0.003064581845696724]14.13176003218846,5:[&rate=0.002811523387359737]14.13176003218846):[&rate=0.003501444597550382]13.47342091576252,((13:[&rate=0.002837123556256475]16.62195213378361,7:[&rate=0.0037340471699273355]16.62195213378361):[&rate=0.0037340471699273355]3.0809743647286716,(11:[&rate=0.0027177526659615163]5.910548893722829,12:[&rate=0.0030153905208997423]5.910548893722829):[&rate=0.0035659793523448993]13.792 [...]
+tree STATE_9760000 [&lnP=-22219.36197956354] = [&R] (33:[&rate=0.001994004692466886]107.46143224063854,((14:[&rate=0.003257046215601961]82.78194341230318,(((((((3:[&rate=0.0032971972535926646]13.975348758001836,5:[&rate=0.0024422963213159062]13.975348758001836):[&rate=0.0026859850556596376]20.419641069076228,((13:[&rate=0.002097116117048628]22.855096243433326,7:[&rate=0.002838737259604784]22.855096243433326):[&rate=0.002422020312048195]3.5409871604040895,(11:[&rate=0.0034464137164678596] [...]
+tree STATE_9770000 [&lnP=-22230.75430996806] = [&R] (9:[&rate=0.0027074731415443276]135.3246191617482,((14:[&rate=0.0027693480395794097]89.25900804933161,((((((3:[&rate=0.003112738255547875]14.974144702305443,5:[&rate=0.0025976537414805203]14.974144702305443):[&rate=0.0024183896655408377]19.80634973501879,((13:[&rate=0.002439779759000586]23.24490258596446,(11:[&rate=0.002787647193964902]7.039783585155312,12:[&rate=0.0025770852185424374]7.039783585155312):[&rate=0.002505349851740427]16.20 [...]
+tree STATE_9780000 [&lnP=-22218.3913246327] = [&R] (9:[&rate=0.002161150657311112]149.56145137368287,((14:[&rate=0.0033334873510742556]79.22211316214464,33:[&rate=0.0029423373256605213]79.22211316214464):[&rate=0.002613812654533438]9.196190134604919,(((((((3:[&rate=0.0031754624475509034]12.893718221699073,5:[&rate=0.0027752410187759377]12.893718221699073):[&rate=0.0025072573453253097]20.050701663026558,((13:[&rate=0.0025072573453253097]18.79725184375986,7:[&rate=0.002978976805263941]18.7 [...]
+tree STATE_9790000 [&lnP=-22210.204110779072] = [&R] (9:[&rate=0.002721242342357765]126.29610179698084,((14:[&rate=0.00321934628902694]82.66425935891391,(((((((23:[&rate=0.003076450774073442]9.962252345987366,17:[&rate=0.00321934628902694]9.962252345987366):[&rate=0.0025739317794708045]12.782029400033895,((((19:[&rate=0.0027429880524884784]5.463753164073032,(21:[&rate=0.002400775595747396]3.727037440316079,20:[&rate=0.0026520232821239786]3.727037440316079):[&rate=0.002865043470809993]1.7 [...]
+tree STATE_9800000 [&lnP=-22217.516622515617] = [&R] (9:[&rate=0.002190192528373154]190.2368105425783,((14:[&rate=0.0021255219476598456]125.80095741434289,(((((((23:[&rate=0.0017146261297263546]15.06653747972754,17:[&rate=0.0019476234896455768]15.06653747972754):[&rate=0.0019091620120791392]16.349730085042815,(22:[&rate=0.002110449603301129]30.067556246317736,(((19:[&rate=0.0018037329361723935]7.299096841633808,(20:[&rate=0.001642986838446713]4.229188452448629,21:[&rate=0.001599657017540 [...]
+tree STATE_9810000 [&lnP=-22209.089586590108] = [&R] (9:[&rate=0.003217227665802157]133.8034337158831,(33:[&rate=0.002465793382362821]109.14387709746099,(14:[&rate=0.0026154929835003602]92.44910988481584,((((((23:[&rate=0.0028164704519346435]10.49343803834917,17:[&rate=0.0024460504558413555]10.49343803834917):[&rate=0.0025694157278825075]13.593492178337614,(22:[&rate=0.0026154929835003602]23.274976432341695,(((19:[&rate=0.0034296215051152837]2.9655267189912875,(21:[&rate=0.00332569228125 [...]
+tree STATE_9820000 [&lnP=-22218.988977454672] = [&R] (9:[&rate=0.0029220943171095345]136.238025025405,(((((((((13:[&rate=0.0030308299113460052]20.372917010573797,7:[&rate=0.003276219749107553]20.372917010573797):[&rate=0.002831891983461568]6.247440719682594,(11:[&rate=0.0030308299113460052]5.862767631796472,12:[&rate=0.003252658751611299]5.862767631796472):[&rate=0.0028999451056594475]20.757590098459918):[&rate=0.003229469370755452]4.885644743811813,(3:[&rate=0.002449334314893972]15.0170 [...]
+tree STATE_9830000 [&lnP=-22213.95590961945] = [&R] (9:[&rate=0.0030121449975628106]127.42586375094702,((((((16:[&rate=0.002941208606197141]33.1303854075125,((8:[&rate=0.003345203967305672]17.71079855357358,10:[&rate=0.0032656680466360347]17.71079855357358):[&rate=0.0029160283739554666]13.183084830462885,((3:[&rate=0.0029890434481762043]9.869906891438623,5:[&rate=0.0032112267507447023]9.869906891438623):[&rate=0.0028758113449161554]17.947756446703963,((13:[&rate=0.003641563554244809]19.5 [...]
+tree STATE_9840000 [&lnP=-22222.99181162054] = [&R] (9:[&rate=0.0023755719378007775]154.47676805947478,(33:[&rate=0.0022583941839042507]103.64316090162772,(14:[&rate=0.0024412657931782485]98.02065474818005,((((16:[&rate=0.0021639097562695967]42.03724184533014,(((2:[&rate=0.0024948700919973366]31.183863332629286,((1:[&rate=0.002287407451017796]7.8499166773070685,6:[&rate=0.0021428337811026017]7.8499166773070685):[&rate=0.0023606849909194143]18.945816447563615,4:[&rate=0.002546906850158189 [...]
+tree STATE_9850000 [&lnP=-22208.40336336499] = [&R] (9:[&rate=0.0034027229269203148]105.36430085011904,(33:[&rate=0.0038335347621747924]69.70454104506346,(((((((8:[&rate=0.005031863998879503]14.336757239821493,10:[&rate=0.00453904445384169]14.336757239821493):[&rate=0.0032970134150438262]8.605467334560418,((3:[&rate=0.004815616535045979]10.366758072061446,5:[&rate=0.003176600146010413]10.366758072061446):[&rate=0.004718447375352977]10.03992011333324,((13:[&rate=0.0033513171193831496]14.6 [...]
+tree STATE_9860000 [&lnP=-22201.267183981618] = [&R] (9:[&rate=0.004358119937361779]93.71593737533276,((14:[&rate=0.0046479489427474156]65.7010629618403,33:[&rate=0.003395381687202707]65.7010629618403):[&rate=0.004184837588247777]3.8132044975642714,((((((2:[&rate=0.004407676798482376]18.705543517122702,((1:[&rate=0.004407676798482376]5.8768761798010285,6:[&rate=0.0034515484646719056]5.8768761798010285):[&rate=0.003395381687202707]9.200902090161016,4:[&rate=0.004072987972723483]15.0777782 [...]
+tree STATE_9870000 [&lnP=-22223.859895082405] = [&R] (9:[&rate=0.002627189774830023]133.28442944091358,((14:[&rate=0.0026438184730732317]82.13800996543924,((((((3:[&rate=0.0026415973460976417]14.72542892922352,5:[&rate=0.0026678915640177695]14.72542892922352):[&rate=0.0025981024410013536]18.351381204340363,((13:[&rate=0.002648446674462744]25.272323623808845,7:[&rate=0.0026415973460976417]25.272323623808845):[&rate=0.002613734731463797]2.8128056711043854,(11:[&rate=0.002619459926729262]8. [...]
+tree STATE_9880000 [&lnP=-22217.943426928272] = [&R] (9:[&rate=0.0037650287397870716]109.546015455785,((14:[&rate=0.0035937402658194452]74.0929977654459,33:[&rate=0.002897679439781653]74.0929977654459):[&rate=0.002280249292611101]11.119348493227477,(((((((3:[&rate=0.003040449218582246]11.020741751415587,5:[&rate=0.0034368451495991473]11.020741751415587):[&rate=0.0032530633577360544]17.88688350790298,((13:[&rate=0.0027098029591930764]17.200235930750768,7:[&rate=0.003810930008555758]17.200 [...]
+tree STATE_9890000 [&lnP=-22218.179268144704] = [&R] (9:[&rate=0.0028469025120512344]149.5094779710239,(((((((((3:[&rate=0.002502308627343771]16.016894859407863,5:[&rate=0.0025286637623716758]16.016894859407863):[&rate=0.002475585208675889]17.162963946232452,((13:[&rate=0.0020111152154284402]22.026052042496556,7:[&rate=0.003033146738500942]22.026052042496556):[&rate=0.002434347133840578]3.1783947224628513,(11:[&rate=0.003266299505732925]7.382731838491287,12:[&rate=0.0020111152154284402]7 [...]
+tree STATE_9900000 [&lnP=-22219.490353801433] = [&R] (9:[&rate=0.0029512624443074993]116.02139332185357,((((((((2:[&rate=0.003059489619543617]23.300004655405807,((1:[&rate=0.002940302047338127]7.695463560305715,6:[&rate=0.003045099285415294]7.695463560305715):[&rate=0.002940302047338127]14.295334026545625,4:[&rate=0.0031277104971300946]21.99079758685134):[&rate=0.0030875685123456643]1.3092070685544677):[&rate=0.003029347371851152]4.983784118572018,(18:[&rate=0.003057348851031216]22.77342 [...]
+tree STATE_9910000 [&lnP=-22214.203980078746] = [&R] (9:[&rate=0.0020534407953413914]163.46915755911382,(14:[&rate=0.002415458348231326]109.16164650838711,(((((((2:[&rate=0.002726687797393056]29.80112724833335,((1:[&rate=0.0031006002275543345]5.817536117373792,6:[&rate=0.0027790360613620868]5.817536117373792):[&rate=0.0021864853195702207]19.58397024798566,4:[&rate=0.0029405422890800615]25.401506365359452):[&rate=0.0028858052529739655]4.399620882973899):[&rate=0.0027434615202579045]4.2250 [...]
+tree STATE_9920000 [&lnP=-22228.00909128526] = [&R] (9:[&rate=0.0024288676218273677]170.8182540871745,(33:[&rate=0.002327247491494147]131.25276281840536,((((16:[&rate=0.0017721089236943063]50.35821277617777,((((2:[&rate=0.002607714635044862]32.3109235644515,((1:[&rate=0.0029843892284489187]9.01333757671528,6:[&rate=0.0024073417730874364]9.01333757671528):[&rate=0.002168292035000087]18.370643823852316,4:[&rate=0.002639437656976588]27.383981400567595):[&rate=0.002236489085563207]4.92694216 [...]
+tree STATE_9930000 [&lnP=-22215.362249031645] = [&R] (9:[&rate=0.002302347264615616]157.03930090065325,(33:[&rate=0.001995201997507259]115.9352167502231,(((((((2:[&rate=0.0025252560817784287]30.889286807590622,((1:[&rate=0.0023163145654459876]9.861490303321203,6:[&rate=0.002035783523577882]9.861490303321203):[&rate=0.0020994710172306075]16.161948371338312,4:[&rate=0.0025252560817784287]26.023438674659513):[&rate=0.0021488335279115844]4.865848132931109):[&rate=0.0019027657397488226]8.8083 [...]
+tree STATE_9940000 [&lnP=-22220.231842646688] = [&R] (9:[&rate=0.002460169114665668]163.64006380544086,(33:[&rate=0.0022910063258915164]131.7936185334673,((((16:[&rate=0.002363813553753481]45.57453535773513,((((2:[&rate=0.0021679301576811557]30.518811991419714,((1:[&rate=0.0018784296931253952]11.59461728885011,6:[&rate=0.0019728871218199803]11.59461728885011):[&rate=0.002315648631582223]14.257002238018686,4:[&rate=0.0028755133520072685]25.851619526868795):[&rate=0.0030235806530259837]4.6 [...]
+tree STATE_9950000 [&lnP=-22205.235538286135] = [&R] (9:[&rate=0.002683636639318472]108.73515186073847,((((((((2:[&rate=0.0037820668574169627]22.21280993595945,((1:[&rate=0.002850974227203364]6.128305857972671,6:[&rate=0.002940973686685057]6.128305857972671):[&rate=0.003193166604635256]11.969584745983447,4:[&rate=0.0045693508826340525]18.097890603956117):[&rate=0.003026835202476196]4.114919332003332):[&rate=0.0028196232398321865]2.756230392810881,(18:[&rate=0.002850974227203364]21.849266 [...]
+tree STATE_9960000 [&lnP=-22220.8952449779] = [&R] (9:[&rate=0.0025883023290778654]158.78502026811148,(33:[&rate=0.0026815948829269744]109.02751162034752,(((((((2:[&rate=0.002372615586071414]31.186736710771207,((1:[&rate=0.0024609750757198083]8.414197851699543,6:[&rate=0.0023014228752308765]8.414197851699543):[&rate=0.0021483124812696425]18.90289401263113,4:[&rate=0.002652349897398436]27.31709186433067):[&rate=0.002423416879530433]3.8696448464405364):[&rate=0.0027188937600602816]4.850046 [...]
+tree STATE_9970000 [&lnP=-22210.5570320945] = [&R] (9:[&rate=0.0023612355328108403]152.4340780283371,(33:[&rate=0.0018742450163896005]115.64890853177076,(14:[&rate=0.003483339118100866]84.72528801358642,((((((((13:[&rate=0.00204822959766543]21.305508764865397,(11:[&rate=0.0026329022362878186]7.004390634879475,12:[&rate=0.0020865294342551706]7.004390634879475):[&rate=0.003201877365174749]14.301118129985923):[&rate=0.0028215682351651803]4.445451319253863,7:[&rate=0.0027053201272072597]25.7 [...]
+tree STATE_9980000 [&lnP=-22207.73231212401] = [&R] (9:[&rate=0.0033290316266420617]95.46508931975383,(33:[&rate=0.0034080797740816795]74.46812714810872,(14:[&rate=0.004434154352837692]66.1543829997321,(((((((13:[&rate=0.003552834267797713]14.858058973488497,7:[&rate=0.004434154352837692]14.858058973488497):[&rate=0.0030228646561968]4.454809376552426,(11:[&rate=0.004909111497290881]4.649714925776727,12:[&rate=0.004136372849582633]4.649714925776727):[&rate=0.0036588518291295925]14.6631534 [...]
+tree STATE_9990000 [&lnP=-22224.619817633044] = [&R] (9:[&rate=0.0019161219287265573]161.71816739913197,(33:[&rate=0.002211078892328373]103.86687701605587,(14:[&rate=0.0028664974976002604]87.1103650989621,(((((((2:[&rate=0.002560955494388655]32.54399828246454,((1:[&rate=0.002704940821542097]7.104260341720765,6:[&rate=0.00245879799546835]7.104260341720765):[&rate=0.0020220177495094294]19.84424189875293,4:[&rate=0.002663915434189892]26.948502240473694):[&rate=0.0028475757748612713]5.595496 [...]
+tree STATE_10000000 [&lnP=-22211.16354300554] = [&R] (9:[&rate=0.002872983148149406]133.49453952847972,((((((((2:[&rate=0.0029905045412702046]24.501929635488477,((1:[&rate=0.0029268057252540337]7.181524544620856,6:[&rate=0.0024407291542424873]7.181524544620856):[&rate=0.002768361563097923]13.91237753747653,4:[&rate=0.0029905045412702046]21.093902082097387):[&rate=0.002703316878483909]3.40802755339109):[&rate=0.002768361563097923]7.13691806722149,(18:[&rate=0.0025697670223146225]25.082533 [...]
+End;
diff --git a/doc/source/examples/pythonidae.chars.nexus b/doc/source/examples/pythonidae.chars.nexus
new file mode 100644
index 0000000..44cd7e0
--- /dev/null
+++ b/doc/source/examples/pythonidae.chars.nexus
@@ -0,0 +1,99 @@
+#NEXUS
+
+[
+    Pythonidae data set as used in:
+
+    AUTHORS   Rawlings,L.H., Rabosky,D.L., Donnellan,S.C. and Hutchinson,M.N.
+    TITLE     Python phylogenetics: inference from morphology and mitochondrial
+              DNA
+    JOURNAL   Biol. J. Linn. Soc. Lond. 93 (3), 603-619 (2008)
+
+    Data:
+        CR      n=732   1-732
+        CYTB    n=1114  733-1846
+        12S     n=372   1847-2218
+        16S     n=498   2219-2716
+]
+
+BEGIN TAXA;
+    TITLE DEFAULT;
+    DIMENSIONS NTAX=33;
+    TAXLABELS
+        'Xenopeltis unicolor'
+        'Loxocemus bicolor'
+        'Morelia spilota'
+        'Morelia bredli'
+        'Morelia carinata'
+        'Morelia amethistina'
+        'Morelia oenpelliensis'
+        'Morelia boeleni'
+        'Morelia viridisS'
+        'Morelia viridisN'
+        'Liasis olivaceus'
+        'Liasis mackloti'
+        'Liasis fuscus'
+        'Liasis albertisii'
+        'Apodora papuana'
+        'Bothrochilus boa'
+        'Antaresia maculosa'
+        'Antaresia stimsoni'
+        'Antaresia childreni'
+        'Antaresia perthensis'
+        'Antaresia melanocephalus'
+        'Antaresia ramsayi'
+        'Python reticulatus'
+        'Python timoriensis'
+        'Python sebae'
+        'Python molurus'
+        'Python curtus'
+        'Python regius'
+        'Candoia aspera'
+        'Morelia nauta'
+        'Morelia clastolepis'
+        'Morelia tracyae'
+        'Morelia kinghorni'
+  ;
+END;
+
+BEGIN CHARACTERS;
+    TITLE DnaCharacterMatrix4302953360;
+    LINK TAXA = DEFAULT;
+    DIMENSIONS  NCHAR=2716;
+    FORMAT DATATYPE=DNA GAP=- MISSING=? MATCHCHAR=.;
+    MATRIX
+'Xenopeltis unicolor'         ???????????????????????????????????????????????????????AAACT--AAAATTCCCATTT-CCCACATATAT----GGATATT--ACGAAG--AAAAAA---GGACTAAAAAAAG---TCCTCTCTCGACCCCCCCCCTACCCCCCCCC-ACAGTT---AGTACGGGT------TTTCC-ATATATGTAACTCTTATAGATTTGCCTATCAAGGC-ATACTATGTATAATCATACATTAATGGCTTGCCCCATGAATATTAAACAGGAATTTCCCTTTAAATATTTTAGCCTAAAAAAGCCTTCGTACAGAACTTTAATA----CCACATTTCT-CAGTCGTTCAATGAAGCACGGAT-ATAGTA--TTGTT-GATAACCATGACTATCC--ACATCCAACTTGTCTTACAGGATCTTGCTA-TTCACGTGAAATCCTCTATCCTTT [...]
+'Loxocemus bicolor'           ?????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????? [...]
+'Morelia spilota'             ?GCCACACCCCTCACTTCCTCC-------------------CAACCATAGTCTGTAA-TTTACAGACTATGGT--CCATGCCTTAATATA-AAGCCAAAAATCCATATAATTTACCACAAAATAAAG-----CTCTCTC-TCGGCCCCCCCCCTACCCCCCCCC---AARAA-CATTGGGGAR------ACCGGCACACAAAACCA--TTARAAAACTCTTAACAAACCT--CTCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAGCAGGAATTTCCCTTTTATTATTTTAGTCTAAAATGGCCTTTGTACAAAATATTCTG----TCCTCATTCTCTTGGTCGTTCTATGCAGCACGAGTT--AACTA-ATCTT-ATTAATCATGGATATTC-TCAAC-CTAAGGGTGTCTCTTAGTCTAGCG-CTTCCCGTGAAATCCTCTATCCTTC [...]
+'Morelia bredli'              ?GCCAC-CCCCTCACTTCCT---------------------TAACCATAGTCTGTAA-TTTACAGACTATGGT--CCATGCCTTAATATA-AAACCAAAAATCCATATAATTTACCACAAAATAAAG-----YTYTYTY-TYGGCCCCCCCCCTACMCCCCCCC--AAAGAA-CATTGGGAAA------ACCGGCACACAAAACTA--TTAGAAAACTCTTAACAAACCC--CTCTATGTATAATCTTACATTAATGGTTTGCCTCATGAATATTAAGCAGGAATTTCCCTTTTATTATTTTAGTCTAAAATGGCCTTTGTACAAAACATTCCG----TCCTCATTCTCCTGGTCGTTCTATGCAGCATGAGTT--AACCA-ATCTT-ATTGATCATGGATATTC-TTAAC-CTAAGGGTGTCTCTTATCCTAGCA-CTTCCCGTGAAATCCTCTATCCTTC [...]
+'Morelia carinata'            ?GCCACAACCCTCACTTCCT--------------------AAAACCATAGTCTGTAAA--TACAGACTATGGT--TCTTACCTCAATATA-AAGCCAAAAACCCATATAAAACGC-ACACAATAAAACG---CTCTC-C-TCGGCCCCCCCCCTACCCCCCCC--ATAATAAACATAGGAGAA------ATCAGCACACAAAACTA--CTGAAGATACCCCCTCATCTCT--CCCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAGCAGGAATTTCCCTTCAAATATTTTAGTCTAAATAAGCCTTCGTACAGAATATTTAG----TCCTCATTTTC-TGGTCGTTCAATGCAACACGGATT--AATGG-ATCTT-ACTAACCATGGCTATCC-TTGAT-CAAGKGGKGTCTYTTAATCTAGTA-CTTCCCGTGAAACCCTCTATCCTTC [...]
+'Morelia amethistina'         ?ACCACACCCCTCACTTCCTC--------------------CAACCATAGTCTGTAA-TTTACAGACTATGGT--CCATGCCTTAATATA-AAGCCAAAAATCCATATAATTTACCACAAAATAAAG-----CTCTCTC-TCGGCCCCCCCCCTACCCCCCCCC--AAAAAAACATTGGGGAA------ACCGGCACACAAAACCA--TTAAAAAACTCTTAACAAACCT--CTCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAGCAGGAATTTCCCTTTTATTATTTTAGTCTAAAATGGCCTTTGTACAAAATATTCTG----TCCTCATTTTCTTGGTCGTTCTATGCAGCATGAGCT--AACTA-ATCTT-ATTAATCATGGATATTC-TTAAC-CTARGGGTGTCTCTTAGTCTAGCG-CTTCCCGTGAAATCCTCTATCCTTC [...]
+'Morelia oenpelliensis'       ?GCCAC-MCCCTCACTTCCT---------------------TAACCATAGTCTGTAA-TTTACAGACTATGGT--CCATGCCTTAATATA-AAACCAGAAATCCATATAATTTACCACCAAATAAAG-----YTYTYTY-TYGGCCCCCCCCCTACCCCCCCCC--AAAGAA-CATTGGGAGA------ACCGGCACACAAAATTA--TTAGAAGACTTTTAACATACCC--CTCTATGGATAATTTTACATTAATGGTTTGCCTCATGRATATTAAGCAGGGAWTTCCCTTTTATTATTTTAGTCTAAAACGGCCCTTGTACCAGACATTCCG----TCCTCATTCTCCTGGTCGTTCTATGCAGCATGAGTT--AACCA-ATCTT-ATTGATCATGGATATTCCTTGAC-CTAAGGGTGTSTCTTATCCTAGCA-CTTCCCGTGAAATCCT-TATCCTTC [...]
+'Morelia boeleni'             ??AAACAACCCTCACTTCCTT--------------------CAACCATAGTCTGGA---TTCCAGACTATGGT--TGTTACCTAAAAAACTAAAGAAAAAATCCATATAAAC----------TAAAAA----CTCTCTC-TCGGCCCCCCCCCTACMCCCCCC---GGGTCAGCACAAAAAC-------ATCAC---------------CCAAAAATCCCCCTTTTT-CC--CCCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATAATAAGCAGGAATTTCCCTTTAAATATTTTAGTCTAAAATAGCCTTT-TACATAAAATTATG----TCCTCATTTCT-TGGTCGTTCAATGCAGCACGGATT--AATAT-ATCTT-ATTGATCATGGATATCC-TTGGT-CTAATGGTGTCTCTTAGTCTAACA-CTTCCCGTGAAATCCTCTATCCTTC [...]
+'Morelia viridisS'            ?A-ATCAACCCTCACTTCCTCC-------------------TAGCCATAGTCTGTAAG-TTACAGACTATGGCT--CATGCCTTAATATATAAACCAAAAACCCATATAAT-CACTGAACAATAAAA-----CTYTYTYCTCGGCCCCCCCCCTACCCCCCCC---GGAAAACCATAAAA---------ATCAGCACATAAATAAA--CCTACTAATCCCATTGCTTCCT---CCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAGCAGGAATTTCCCTTCAAATATTTTAGCCTAAATTAGCTTCCGTACAAAATATCTAG----CCCTCATTTTC-TGGTCGTTCAATGCAATCGGGGTT--AATAA-ATCTT-ACTAACCATGGATATCC-TTGAT-CAGGTGGTGTCTCTTAATTTAGTA-CTTCCCGTGAAATCCTCTATCCTTC [...]
+'Morelia viridisN'            ?A-CTCAACCCTCACTTCCTTC-------------------CAGCCATAGTCTGTAAA-TTACAGGCTATGGCT--CATACCTTGATATATAAACCAAAAACCCATATAATTCACCACACAACAAAA-----CTCTCTCCTCGGCCCCCCCC-TACCCCCCCCC--GGAAAAACATAGAAGAA------GTCAGCACAATTAAACT--TACTGATAACCCCTTGCTTCCT--CCCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAGCAGGAATTTCCCTTCAAATATTTTAGCCTAAAATAGCCTTTGTACAAAATACCTTG----TCCTCATTTTC-TGGTCGTYCAATGCAATCGGGTCT--AACAA-ATCTT-ACTAACCATGGATATCC-TTGAT-CAAGTTGTGTCTCTTAATCTAGTAACTTCCCGTGAAATCCTCTATCCT-C [...]
+'Liasis olivaceus'            ?GCCACAACCCTCACTTCCCC-----------------ACCTAACCATAGTCTGTAAA-TTACAGACTATGGT--TGATACCTTAATACA-AAGCCGAAACCCCATATAAACAGCACCACAACAAAA----CTCTACTC-TCGGCCCCCCCCCTACMCCCCCCC--ACAAAAACATAGGARAA------ATCAGCACAAACAATC---MCCTAAAATCCCCCCTTAACCC--CCCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAGCAGGAATTCCCCTTTAAATATTTTAGTCTGAATTAGCCCTTGTACAAAAAATCTTG----TCCTCATTTTC-TGGTCGTTCAATGCAGCACGGATT--AATAG-ATCTT-ATTAACCATGGCTATCC-TTGAT-CTAGTGGTGTCCCATGATCTAGTA-CTTCCCGTGAAATCCTCTATCCTTC [...]
+'Liasis mackloti'             ?ACCACAACCCTCACTTCCTT--------------------CAGCCATAGTCTGTAA-TTTACAGGCTATGGC--TGATACCTTAATATA-AAACCAAAATCCCATATAAATACCACCACAACAAAG-----CTCTCTC-TCGGCCCCCCCCCTACMCCCCCCC--ACCAAAACATAGAARAA------ATCAGCACAATAAATA---CTARAAGTATTTGCTTCCTTCC--CCCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTTAGCAGGAATTTCCCTTTAAATATTTTAGCCTAAAATAGCCTTTGTACACAAAACTATG----TCCTCATTTCT-TGGTCGTTCAATGCAGCACGGATT--AATAG-ATTTT-AATAACCATGACTATCC-TTGAT-CTAGTGGTGTCCCATGATTTAGTA-CTTCCCGTGAAATCCTCTATCCTTC [...]
+'Liasis fuscus'               ?ACCACAACCCTCACTTCCTC--------------------CAGCCATAGTCTGTAA-TTTACAGGCTATGGC--TGATACCTTAATATA-AAACCAAAATCCCATATAAATACCACCACAACAAAG-----CTCTCTY-TCGGCCCCCCCCCTACCCCCCCCC--ACCAAAACATAGAAGAA------ATCAGCACA-AAATAACA-CTAGAAGTATTACTTCCTTGCC--CCCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTTAGCAGGAATTTCCCTTCAAATATTTTAGCCTAAAATAGCCTTCATACATAAAATTATG----TCCTCATTTCT-TGGTCGTTCAATGCAGCACGGATT--AATGG-ATTTT-AATAACCATGACTATCC-TTGAT-CTAGTGGTGTCCCATGATTTAGTA-CTTCCCGTGAAATCCTCTATCCTTC [...]
+'Liasis albertisii'           ????GCTCCTCTCACTTCCTC--------------------AGACCACAGTCTGCAA---TGCAGACTGTGGTTTTGTGCCCAGAATATA--AACCAAAAAACCATATAAACAACACCRCGACAAAAAAGA--TCTCTC-TCGGCCCCCCCCMTACMCCCCCCC--AAAAAAACATARAGGAA------ATCAG--------------------TTCATARACT--------CTCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAGCAGGAATTCCCCTTTAAATATTTTAATCTAAATTAGCCTTCGTACACAAAATTCAG----TCCTCATTCTC-TGGTCGTTCAATGCAGCACGGATT--AATCA-GTCTT-ACTAACCATGGATATCC-TTGAT-CTAGTCGTCTCTCTTAGTCTAACA-CTTCCCGTGAAACCCTCTATCCTTC [...]
+'Apodora papuana'             ?GCCACAACCCTCAMTTCCTT--------------------CAGCCACAGTYTGTAA-TTTACAGACTGTGGC--CCATGCCTCAATATA-AAGCCGAAAATCCATATAAATAACACCAAAACAAAG-----CTYTCCC-TYGGCCCCCCCCCTACCCCCCCCC--AAAAAAATATAGAGAA------CTATAGAACAAATAACCA---CCAAGAAGTTCACTATCCCCC--TCCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAGCCGGAATTTCCCTTTAAATATTTTAGTCTAAATATGCCCTTGTACACAAAATTCAG----TCCTCATTTCT-TGGTCGTTCAATGCAGCCAGGAAT--AATCA-ATCTT-ATTAACCATGGATATCC-TTGAT-CTAGTGGTGTCTCTTGGTCTAGTA-CTTCCCGTGAAATCCTCTATCCTTC [...]
+'Bothrochilus boa'            ??GCACCGCCCTCACTTCCTC--------------------CGACCGCAGTCTGCC----AGCAGGCTGCGGTC-GCATGCCCAAAAACACAAACCAAAAAACCATATAAACAACGCCGCAACAAAAGG----YCYCYC-TCGGCCCCCCCCCTAC-CCCCCCC-ACAAAAAACATAGAGAAA------ATCAG--------------------TTTTCACAC---------CCCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAGCAGGAATTTCCCTTCAAATATTTTAATCTAAAATAGCCTTTGTACATAAAATTTGC----CCCTCATTTCT-TGGTCGTTCAATGCAGCATGGATT--AATCA-GTCTT-ATTAACCATGGATATTC-TCAGT-CTAGTTGTGTCTCTTAGCCTAACA-CTTCCCGTGAAATCCTCTATCCTTC [...]
+'Antaresia maculosa'          ?ACCACAACCCTCACTTCCTCC--------------------AGCCATAGTCTGTAAATTTACAGACTATGGC--TGATACCTCAACATA-CAGCCAAAATTCCATATAATAT-CCCCACAACAA-----CTYTYTYTCYTYGGCCCCCCCCCTACCCCCCCC---ATCCAAATATATAAGAA------ATCAGCACAATAAACCT--ACTAGGAATTGCCAATAACTCC--CCCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAGCAGGAATTCCCCTTCAAATATTTTAGTCTAAAATAGCCTTTGTACAGAATATTTAG----TCCTCATTTCT-TGGTCGTTCAATGCAACACGGATT---ATCAGTTCTT-ACTAACCATGGATATCC-TTGAT-CTAGTGGTGTCTCTTAATCTAGTA-CTTCCCGTGAAATCCTCTATCCTTC [...]
+'Antaresia stimsoni'          ?ACCACAACCCTCAMTTCCTTTCAGCCATAGTCTGTAAATACAGCCATAGTCTGTAAA--TACAGACTATGGC--TGATACCGCCATATA-GAGCCGAAAACCCATATAATATGCCACACAATAAA------CTYTYTCCTYGGCCCCCCCCCTACCCCCCCCC--ATTAAAACATATGGGAA------AACAGCACAAATACATA--TTAAAGAATGTCCAATTAATCC--TCCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAGCAGGAATTCCCCTTTAAATATTTTAGCCTAAAATGTCCTTCGTACAGAATATTAAG----TCCTCATTTTC-TGGTCGTTCAATGCAATCAGGATT--AATCA-TTCTT-ACTAACCATGGCTATCC-TTGAT-CTAGTGGTGTCCCTTAATTTAGTA-CTTCCCGTGAAATCCTCTATCCTTC [...]
+'Antaresia childreni'         ?GCCACAACCCTCACTTCCTTCCAGCCATAGTCTGTAAATACAGCCATAGTCTGTAAA--TACAGACTATGGC--TGATGCCGCCATATA-GAGCCGAAAAACCATATAATATACCACACAATAAA------CTCTCTCCTCGGCCCCCCCCCTACCCCCCCCC--ATTAAAACATATGGGAA------AGCAGCACAAATACATA--TTAAAGAATGTCCAATTAATCC--TCCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAGCAGGAATTCCCCTTTAAATATTTTAGTCTAAAATGTCCTTTGTACAGAATATTTAG----CCCTCATTCTC-TGGTCGTTCAATGCAATCGGGATT--AATCA-TTCTT-AATAACCATGACTATCC-TTGAT-CTAGTGGTGTCCCTTGATTTAGTA-CTTCCCGTGAAATCCTCTATCCTTT [...]
+'Antaresia perthensis'        ?ATCA-AACCC------------------------AAAACTAAGCCACAGCCTGTTT--AAACAGGCTGTGGC--TGATGCCGCCATACA-AAGCCGAAATTCCATATAACACACCACAATATAAA------CTYTYTCCTYGGCCCCCCCCCTACCCCCCCCC--AACCAAACATATAAGAA------AACAGAACAGTGAACAA--TTAGAGATTCTCCAATTAACTC--TCCTATGTATAATCTTACATTAATGGTTTGCCCCATGGATATTAAGCAGGAATTTCCCTTCAAATATTTTAGTCTAAAATAGCCTTTGTACAGTCTATTTAG----TCCTCATTTTC-TGGTCGTTCAATGCAGCATGGATT--AATCA-TTCTT-ACCGATCATGACTATCC-TTGAT-CTAGTGGTGTCTCTTAATTTAGTA-CTTCCCGTGAAATCCTCTATCCTTC [...]
+'Antaresia melanocephalus'    ?ACCACA----------CCTTCC-----------------CCAACCATAGTCTGTAACC--ACAGACTATGGT--CGATGTCTCAATATA-AAGCCAAAAATCTATATAAATAAA-ACACAATAAAG-----CTCTCTCCTCGGCCCCCCCC-TACMCCCCCC--ACAAGAAATATAGAAGAA------ACCAGCACATAAGACTA--TAAGGATTCCCCCCTTCTTTCC--CCCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAGCCGGAATTCCCATTTAAATATTTTAATCTAAATTTGCCTTTGTACTTAAAATTCAG----TCCTCATTTCT-TGGTCGTTCAATGCAGCACGGATT--AATAG-ATCTT-ATTAACCATGGCTATCC-TTGAT-CTAGTGGTGTCCCATGATCTAGCT-CTTCCCGTGAAATCCTCTATCCTTC [...]
+'Antaresia ramsayi'           ?ACCACG----------CCTTCC-----------------CCA-CCATAGTCTGTAAA-TTACAGACTATGGT--CGTTGCCTCAACATA-AAGCCAAAAACCCATATAAACAAAAC--ATATAAA----CTCTCTCTCCTCGACCCCCCCC-TACCCCCCCC--ACAAGAAATATAGAAGAA------ACCAGCACATAAGACTA--TAAGGATTTCCCCCTCCTTTCC--CCCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAGCCGGAATTCCCATTTAAATATTTTAATCTAAATT-GCCTTCGTACCTAAAATTCAG----TCCTCATTTCT-TGGTCGTTCAATGCAGCACGGATT--AATAG-ATCTT-ATTAACCATGGCTATCC-TTGAT-CTAGTGGTGTCCCATGATCTAGCT-CTTCCCGTGAAATCCTCTATCCTTC [...]
+'Python reticulatus'          ??CCATCACCCTCACTTCCTCC--------------------AACCATAGCCAAATA-TTT---GGCTATGGTT-TCATGCCAAAATATATCAACCAAAAACCCATATTAATATAATGCTATAAAATGG-------TCCCTCGACCCCCCCCCTACCCCCCCC--AAAAAA--CATAAGGAAA------GTCCG-CACATCATAAACCTCGTACTTTTCCCTATTTTTT-GCTCCTATGTATAATCTTACATTAATGGCTTGCCCCATGGATAATAAGCAGGAATTTCCCTTTTAATATTTTAGTCTAAATTAGCCTTCGTACAGGTAATTCAGT----CCTCATTTTC-TGGTCGTTCAATGCAGCATGGATT--AATAA-TTGTT-GATAACCATGGATATCC-TTGAT-CTAGTTGTGTCCCTTGATTTAACA-CTTCCCGTGAAATCCTCTATCCTTC [...]
+'Python timoriensis'                TA-CACCACCA------------------------------AGACCATAGTCGGTAAATC----GACTATGGTCTTTTTACGCCAAAAATACAACCAAAAATCCATATTAATATAGCAATATAAAATAG-------CCCCTCGACCCCCCCCCTACCCCCCCCC-ACAAAAA-TATAAAGAAA------ACCCG-TATGTCATAAACTCCGAATTTTTCCCTATTTTT--GCCCCTATGTATAATCATACATTATTGGCTTGCCCCATGGATAATAAGCAAGAATTCCCTTTTTAATATTTTAGTCTAAAATTGCCTTT-TACAAAAAACTCAGT----CCTCATTTCT-TGGTCGTTCAATGCAGCATGGGCT--AATAA-TTATT-AATAACCATGACTATCC-TTGAT-CTAGTTGTGTCTCTTAGTTTGGTA-CTTCCCGTGAAATCCTCTA [...]
+'Python sebae'                ?????????????????????????????????????????????????????????????????????????????CTTCCTCAGACAC-AAACTCA-ACCTCAAATAAAAATAAAAATAAT-----------CCTACCTCGGCCCCCCCCCTACCCCCCCC--ACTATTT-CATATGGAA-------TACAGGATATATAC-TTTGTTAGAAAAATCCATATTTTTTCTACCCTATGTATAATCTTACATTAATGGCTTGCCCCATGAATAATAAGCGGGAATTCCTAATAAAATATTTTAGCCTAAAATTGCCTTCGTACATAAAATT-AGC---TCCACATTTCTTTGGTCGTTCAATGCTGCANGGATTATAGTAC-TTCTT-AATACACATGACTATCC-TTGAT-CTAGTCGTCTCTCTTAACTTAACA-CTTCCCGTGAAATCCTCTATCCTTT [...]
+'Python molurus'              ?????????????????????????????????????????????????????????????????????????????CTTCCTCAGACAC-AAACTCA-ACCTCAAATAAAAATAAAAACAAT-----------CCTACCTCGGCCCCCCCCCTACCCCCCCCC-ACTATTT-CATATGGAA-------TACAGGATATATACATTTGTTAGAAAAATCCATATTTTTTCTACCCTATGTATAATCTTACATTAATGGCTTGCCCCATGAATAATAAGCGGGAATTCCTAATAAAATATTTTAGCCTAAAATTGCCTTCGTACATAAAATT-AGC---TCCACATTTCTTTGGTCGTTCAATGCTGCACGGATTATAGTAC-TTCTT-AATACACATGACTATCC-TTGAT-CTAGTCGTCTCTCTTAACTTAACA-CTTCCCGTGAAATCCTCTATCCTTT [...]
+'Python curtus'               ?CCACAAAA-----------------------------------CCAT-----------------ATTAATYTT--CCCACCTATAAYTA-AACCCGAAATTCCCTATAAA--CACAACAAAAAATA-----CTCCTTCYTCGCCCCCCCCC-TACCCCCCCCCCAC-ATTT-AATATAAGAT------TCTGG--AATATACACACATCGTTAATTTCCATATTTTTT--ATGCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATAATAAGCCGGAATTCCATATTAAATATTTTAGCCTAAAATTGCCTTAGTACCTAAAACT-AGTCCTTCCTCATTTTC-TGGTCGTTCAATGCTGCATGGATT--AATCA-TTCTTTAACAGATATGTCTATCC-TTGAT-CTAGTCGTCTCTCTTAACCTGGCG-CTTCCCGTGAAATCCTCTATCCTTT [...]
+'Python regius'               ?????????????????????????????????????????????????????????????????????????????TTACCTCAAT----AAACCCAAACCCACTATAAAAATATAA-----------------CCCCCTCGGCCCCCCCCCTTCCCCCCCCC-ACTTACA---TAGGAGGA------TTTAG-ATATATACACATATTAGGATTTTCCCTATCTTTTC-ACCCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAACCAGAATTTCCAATTAAATATTTTAACCTAAAATTGCCTTCGTACACTACACC-AGT---CCCTCATTTCT-TGGTCGTTCAATGCTGCACGGATTATAGTAC-TTATT-AATGCTCATGTCTATCC-TTGGT-CTAGTGGTGTCTCTTAGTTTAACA-CTTCCCGTGAAATCCTCTATCCTTT [...]
+'Candoia aspera'              ?????????????????????????????????????????????????????????????????????????????????????????????????AAA----CTA-------------------------CT-CTCTGG-GACCCCCCCC-TACCCCCCCC--AGATAAACTATACTAAAATTTACCTGAGTACACTATGTAAATATTGTACATTAGTCTATATTTC--ATGCTATGTATAATCATACATTAATGATCTGCCCCATGGATAATAAGCAGGAATTTCCCTATTAATATTTCAGCCTATTAATGCCTTAGTACAGTCAGTGTGTC---ACCACATCAT--GGGTCGTTTTATGCAGCAAGGATTA-ACTA--TTATT-GGTAATCATGCCTATCC--TGATCCAAGTTGTC-CTCTTAATCTACCTA-CTCACGTGAAATCCTCTATCCTTC [...]
+'Morelia nauta'               ?????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????? [...]
+'Morelia clastolepis'         ?????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????? [...]
+'Morelia tracyae'             ?????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????? [...]
+'Morelia kinghorni'           ?????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????? [...]
+    ;
+END;
+
diff --git a/doc/source/examples/pythonidae.chars.phylip b/doc/source/examples/pythonidae.chars.phylip
new file mode 100644
index 0000000..5ffa51f
--- /dev/null
+++ b/doc/source/examples/pythonidae.chars.phylip
@@ -0,0 +1,34 @@
+33 2716
+Xenopeltis_unicolor             ???????????????????????????????????????????????????????AAACT--AAAATTCCCATTT-CCCACATATAT----GGATATT--ACGAAG--AAAAAA---GGACTAAAAAAAG---TCCTCTCTCGACCCCCCCCCTACCCCCCCCC-ACAGTT---AGTACGGGT------TTTCC-ATATATGTAACTCTTATAGATTTGCCTATCAAGGC-ATACTATGTATAATCATACATTAATGGCTTGCCCCATGAATATTAAACAGGAATTTCCCTTTAAATATTTTAGCCTAAAAAAGCCTTCGTACAGAACTTTAATA----CCACATTTCT-CAGTCGTTCAATGAAGCACGGAT-ATAGTA--TTGTT-GATAACCATGACTATCC--ACATCCAACTTGTCTTACAGGATCTTGCTA-TTCACGTGAAATCCTCTATCCT [...]
+Loxocemus_bicolor               ???????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????? [...]
+Morelia_spilota                 ?GCCACACCCCTCACTTCCTCC-------------------CAACCATAGTCTGTAA-TTTACAGACTATGGT--CCATGCCTTAATATA-AAGCCAAAAATCCATATAATTTACCACAAAATAAAG-----CTCTCTC-TCGGCCCCCCCCCTACCCCCCCCC---AARAA-CATTGGGGAR------ACCGGCACACAAAACCA--TTARAAAACTCTTAACAAACCT--CTCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAGCAGGAATTTCCCTTTTATTATTTTAGTCTAAAATGGCCTTTGTACAAAATATTCTG----TCCTCATTCTCTTGGTCGTTCTATGCAGCACGAGTT--AACTA-ATCTT-ATTAATCATGGATATTC-TCAAC-CTAAGGGTGTCTCTTAGTCTAGCG-CTTCCCGTGAAATCCTCTATCCT [...]
+Morelia_bredli                  ?GCCAC-CCCCTCACTTCCT---------------------TAACCATAGTCTGTAA-TTTACAGACTATGGT--CCATGCCTTAATATA-AAACCAAAAATCCATATAATTTACCACAAAATAAAG-----YTYTYTY-TYGGCCCCCCCCCTACMCCCCCCC--AAAGAA-CATTGGGAAA------ACCGGCACACAAAACTA--TTAGAAAACTCTTAACAAACCC--CTCTATGTATAATCTTACATTAATGGTTTGCCTCATGAATATTAAGCAGGAATTTCCCTTTTATTATTTTAGTCTAAAATGGCCTTTGTACAAAACATTCCG----TCCTCATTCTCCTGGTCGTTCTATGCAGCATGAGTT--AACCA-ATCTT-ATTGATCATGGATATTC-TTAAC-CTAAGGGTGTCTCTTATCCTAGCA-CTTCCCGTGAAATCCTCTATCCT [...]
+Morelia_carinata                ?GCCACAACCCTCACTTCCT--------------------AAAACCATAGTCTGTAAA--TACAGACTATGGT--TCTTACCTCAATATA-AAGCCAAAAACCCATATAAAACGC-ACACAATAAAACG---CTCTC-C-TCGGCCCCCCCCCTACCCCCCCC--ATAATAAACATAGGAGAA------ATCAGCACACAAAACTA--CTGAAGATACCCCCTCATCTCT--CCCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAGCAGGAATTTCCCTTCAAATATTTTAGTCTAAATAAGCCTTCGTACAGAATATTTAG----TCCTCATTTTC-TGGTCGTTCAATGCAACACGGATT--AATGG-ATCTT-ACTAACCATGGCTATCC-TTGAT-CAAGKGGKGTCTYTTAATCTAGTA-CTTCCCGTGAAACCCTCTATCCT [...]
+Morelia_amethistina             ?ACCACACCCCTCACTTCCTC--------------------CAACCATAGTCTGTAA-TTTACAGACTATGGT--CCATGCCTTAATATA-AAGCCAAAAATCCATATAATTTACCACAAAATAAAG-----CTCTCTC-TCGGCCCCCCCCCTACCCCCCCCC--AAAAAAACATTGGGGAA------ACCGGCACACAAAACCA--TTAAAAAACTCTTAACAAACCT--CTCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAGCAGGAATTTCCCTTTTATTATTTTAGTCTAAAATGGCCTTTGTACAAAATATTCTG----TCCTCATTTTCTTGGTCGTTCTATGCAGCATGAGCT--AACTA-ATCTT-ATTAATCATGGATATTC-TTAAC-CTARGGGTGTCTCTTAGTCTAGCG-CTTCCCGTGAAATCCTCTATCCT [...]
+Morelia_oenpelliensis           ?GCCAC-MCCCTCACTTCCT---------------------TAACCATAGTCTGTAA-TTTACAGACTATGGT--CCATGCCTTAATATA-AAACCAGAAATCCATATAATTTACCACCAAATAAAG-----YTYTYTY-TYGGCCCCCCCCCTACCCCCCCCC--AAAGAA-CATTGGGAGA------ACCGGCACACAAAATTA--TTAGAAGACTTTTAACATACCC--CTCTATGGATAATTTTACATTAATGGTTTGCCTCATGRATATTAAGCAGGGAWTTCCCTTTTATTATTTTAGTCTAAAACGGCCCTTGTACCAGACATTCCG----TCCTCATTCTCCTGGTCGTTCTATGCAGCATGAGTT--AACCA-ATCTT-ATTGATCATGGATATTCCTTGAC-CTAAGGGTGTSTCTTATCCTAGCA-CTTCCCGTGAAATCCT-TATCCT [...]
+Morelia_boeleni                 ??AAACAACCCTCACTTCCTT--------------------CAACCATAGTCTGGA---TTCCAGACTATGGT--TGTTACCTAAAAAACTAAAGAAAAAATCCATATAAAC----------TAAAAA----CTCTCTC-TCGGCCCCCCCCCTACMCCCCCC---GGGTCAGCACAAAAAC-------ATCAC---------------CCAAAAATCCCCCTTTTT-CC--CCCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATAATAAGCAGGAATTTCCCTTTAAATATTTTAGTCTAAAATAGCCTTT-TACATAAAATTATG----TCCTCATTTCT-TGGTCGTTCAATGCAGCACGGATT--AATAT-ATCTT-ATTGATCATGGATATCC-TTGGT-CTAATGGTGTCTCTTAGTCTAACA-CTTCCCGTGAAATCCTCTATCCT [...]
+Morelia_viridisS                ?A-ATCAACCCTCACTTCCTCC-------------------TAGCCATAGTCTGTAAG-TTACAGACTATGGCT--CATGCCTTAATATATAAACCAAAAACCCATATAAT-CACTGAACAATAAAA-----CTYTYTYCTCGGCCCCCCCCCTACCCCCCCC---GGAAAACCATAAAA---------ATCAGCACATAAATAAA--CCTACTAATCCCATTGCTTCCT---CCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAGCAGGAATTTCCCTTCAAATATTTTAGCCTAAATTAGCTTCCGTACAAAATATCTAG----CCCTCATTTTC-TGGTCGTTCAATGCAATCGGGGTT--AATAA-ATCTT-ACTAACCATGGATATCC-TTGAT-CAGGTGGTGTCTCTTAATTTAGTA-CTTCCCGTGAAATCCTCTATCCT [...]
+Morelia_viridisN                ?A-CTCAACCCTCACTTCCTTC-------------------CAGCCATAGTCTGTAAA-TTACAGGCTATGGCT--CATACCTTGATATATAAACCAAAAACCCATATAATTCACCACACAACAAAA-----CTCTCTCCTCGGCCCCCCCC-TACCCCCCCCC--GGAAAAACATAGAAGAA------GTCAGCACAATTAAACT--TACTGATAACCCCTTGCTTCCT--CCCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAGCAGGAATTTCCCTTCAAATATTTTAGCCTAAAATAGCCTTTGTACAAAATACCTTG----TCCTCATTTTC-TGGTCGTYCAATGCAATCGGGTCT--AACAA-ATCTT-ACTAACCATGGATATCC-TTGAT-CAAGTTGTGTCTCTTAATCTAGTAACTTCCCGTGAAATCCTCTATCCT [...]
+Liasis_olivaceus                ?GCCACAACCCTCACTTCCCC-----------------ACCTAACCATAGTCTGTAAA-TTACAGACTATGGT--TGATACCTTAATACA-AAGCCGAAACCCCATATAAACAGCACCACAACAAAA----CTCTACTC-TCGGCCCCCCCCCTACMCCCCCCC--ACAAAAACATAGGARAA------ATCAGCACAAACAATC---MCCTAAAATCCCCCCTTAACCC--CCCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAGCAGGAATTCCCCTTTAAATATTTTAGTCTGAATTAGCCCTTGTACAAAAAATCTTG----TCCTCATTTTC-TGGTCGTTCAATGCAGCACGGATT--AATAG-ATCTT-ATTAACCATGGCTATCC-TTGAT-CTAGTGGTGTCCCATGATCTAGTA-CTTCCCGTGAAATCCTCTATCCT [...]
+Liasis_mackloti                 ?ACCACAACCCTCACTTCCTT--------------------CAGCCATAGTCTGTAA-TTTACAGGCTATGGC--TGATACCTTAATATA-AAACCAAAATCCCATATAAATACCACCACAACAAAG-----CTCTCTC-TCGGCCCCCCCCCTACMCCCCCCC--ACCAAAACATAGAARAA------ATCAGCACAATAAATA---CTARAAGTATTTGCTTCCTTCC--CCCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTTAGCAGGAATTTCCCTTTAAATATTTTAGCCTAAAATAGCCTTTGTACACAAAACTATG----TCCTCATTTCT-TGGTCGTTCAATGCAGCACGGATT--AATAG-ATTTT-AATAACCATGACTATCC-TTGAT-CTAGTGGTGTCCCATGATTTAGTA-CTTCCCGTGAAATCCTCTATCCT [...]
+Liasis_fuscus                   ?ACCACAACCCTCACTTCCTC--------------------CAGCCATAGTCTGTAA-TTTACAGGCTATGGC--TGATACCTTAATATA-AAACCAAAATCCCATATAAATACCACCACAACAAAG-----CTCTCTY-TCGGCCCCCCCCCTACCCCCCCCC--ACCAAAACATAGAAGAA------ATCAGCACA-AAATAACA-CTAGAAGTATTACTTCCTTGCC--CCCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTTAGCAGGAATTTCCCTTCAAATATTTTAGCCTAAAATAGCCTTCATACATAAAATTATG----TCCTCATTTCT-TGGTCGTTCAATGCAGCACGGATT--AATGG-ATTTT-AATAACCATGACTATCC-TTGAT-CTAGTGGTGTCCCATGATTTAGTA-CTTCCCGTGAAATCCTCTATCCT [...]
+Liasis_albertisii               ????GCTCCTCTCACTTCCTC--------------------AGACCACAGTCTGCAA---TGCAGACTGTGGTTTTGTGCCCAGAATATA--AACCAAAAAACCATATAAACAACACCRCGACAAAAAAGA--TCTCTC-TCGGCCCCCCCCMTACMCCCCCCC--AAAAAAACATARAGGAA------ATCAG--------------------TTCATARACT--------CTCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAGCAGGAATTCCCCTTTAAATATTTTAATCTAAATTAGCCTTCGTACACAAAATTCAG----TCCTCATTCTC-TGGTCGTTCAATGCAGCACGGATT--AATCA-GTCTT-ACTAACCATGGATATCC-TTGAT-CTAGTCGTCTCTCTTAGTCTAACA-CTTCCCGTGAAACCCTCTATCCT [...]
+Apodora_papuana                 ?GCCACAACCCTCAMTTCCTT--------------------CAGCCACAGTYTGTAA-TTTACAGACTGTGGC--CCATGCCTCAATATA-AAGCCGAAAATCCATATAAATAACACCAAAACAAAG-----CTYTCCC-TYGGCCCCCCCCCTACCCCCCCCC--AAAAAAATATAGAGAA------CTATAGAACAAATAACCA---CCAAGAAGTTCACTATCCCCC--TCCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAGCCGGAATTTCCCTTTAAATATTTTAGTCTAAATATGCCCTTGTACACAAAATTCAG----TCCTCATTTCT-TGGTCGTTCAATGCAGCCAGGAAT--AATCA-ATCTT-ATTAACCATGGATATCC-TTGAT-CTAGTGGTGTCTCTTGGTCTAGTA-CTTCCCGTGAAATCCTCTATCCT [...]
+Bothrochilus_boa                ??GCACCGCCCTCACTTCCTC--------------------CGACCGCAGTCTGCC----AGCAGGCTGCGGTC-GCATGCCCAAAAACACAAACCAAAAAACCATATAAACAACGCCGCAACAAAAGG----YCYCYC-TCGGCCCCCCCCCTAC-CCCCCCC-ACAAAAAACATAGAGAAA------ATCAG--------------------TTTTCACAC---------CCCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAGCAGGAATTTCCCTTCAAATATTTTAATCTAAAATAGCCTTTGTACATAAAATTTGC----CCCTCATTTCT-TGGTCGTTCAATGCAGCATGGATT--AATCA-GTCTT-ATTAACCATGGATATTC-TCAGT-CTAGTTGTGTCTCTTAGCCTAACA-CTTCCCGTGAAATCCTCTATCCT [...]
+Antaresia_maculosa              ?ACCACAACCCTCACTTCCTCC--------------------AGCCATAGTCTGTAAATTTACAGACTATGGC--TGATACCTCAACATA-CAGCCAAAATTCCATATAATAT-CCCCACAACAA-----CTYTYTYTCYTYGGCCCCCCCCCTACCCCCCCC---ATCCAAATATATAAGAA------ATCAGCACAATAAACCT--ACTAGGAATTGCCAATAACTCC--CCCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAGCAGGAATTCCCCTTCAAATATTTTAGTCTAAAATAGCCTTTGTACAGAATATTTAG----TCCTCATTTCT-TGGTCGTTCAATGCAACACGGATT---ATCAGTTCTT-ACTAACCATGGATATCC-TTGAT-CTAGTGGTGTCTCTTAATCTAGTA-CTTCCCGTGAAATCCTCTATCCT [...]
+Antaresia_stimsoni              ?ACCACAACCCTCAMTTCCTTTCAGCCATAGTCTGTAAATACAGCCATAGTCTGTAAA--TACAGACTATGGC--TGATACCGCCATATA-GAGCCGAAAACCCATATAATATGCCACACAATAAA------CTYTYTCCTYGGCCCCCCCCCTACCCCCCCCC--ATTAAAACATATGGGAA------AACAGCACAAATACATA--TTAAAGAATGTCCAATTAATCC--TCCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAGCAGGAATTCCCCTTTAAATATTTTAGCCTAAAATGTCCTTCGTACAGAATATTAAG----TCCTCATTTTC-TGGTCGTTCAATGCAATCAGGATT--AATCA-TTCTT-ACTAACCATGGCTATCC-TTGAT-CTAGTGGTGTCCCTTAATTTAGTA-CTTCCCGTGAAATCCTCTATCCT [...]
+Antaresia_childreni             ?GCCACAACCCTCACTTCCTTCCAGCCATAGTCTGTAAATACAGCCATAGTCTGTAAA--TACAGACTATGGC--TGATGCCGCCATATA-GAGCCGAAAAACCATATAATATACCACACAATAAA------CTCTCTCCTCGGCCCCCCCCCTACCCCCCCCC--ATTAAAACATATGGGAA------AGCAGCACAAATACATA--TTAAAGAATGTCCAATTAATCC--TCCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAGCAGGAATTCCCCTTTAAATATTTTAGTCTAAAATGTCCTTTGTACAGAATATTTAG----CCCTCATTCTC-TGGTCGTTCAATGCAATCGGGATT--AATCA-TTCTT-AATAACCATGACTATCC-TTGAT-CTAGTGGTGTCCCTTGATTTAGTA-CTTCCCGTGAAATCCTCTATCCT [...]
+Antaresia_perthensis            ?ATCA-AACCC------------------------AAAACTAAGCCACAGCCTGTTT--AAACAGGCTGTGGC--TGATGCCGCCATACA-AAGCCGAAATTCCATATAACACACCACAATATAAA------CTYTYTCCTYGGCCCCCCCCCTACCCCCCCCC--AACCAAACATATAAGAA------AACAGAACAGTGAACAA--TTAGAGATTCTCCAATTAACTC--TCCTATGTATAATCTTACATTAATGGTTTGCCCCATGGATATTAAGCAGGAATTTCCCTTCAAATATTTTAGTCTAAAATAGCCTTTGTACAGTCTATTTAG----TCCTCATTTTC-TGGTCGTTCAATGCAGCATGGATT--AATCA-TTCTT-ACCGATCATGACTATCC-TTGAT-CTAGTGGTGTCTCTTAATTTAGTA-CTTCCCGTGAAATCCTCTATCCT [...]
+Antaresia_melanocephalus        ?ACCACA----------CCTTCC-----------------CCAACCATAGTCTGTAACC--ACAGACTATGGT--CGATGTCTCAATATA-AAGCCAAAAATCTATATAAATAAA-ACACAATAAAG-----CTCTCTCCTCGGCCCCCCCC-TACMCCCCCC--ACAAGAAATATAGAAGAA------ACCAGCACATAAGACTA--TAAGGATTCCCCCCTTCTTTCC--CCCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAGCCGGAATTCCCATTTAAATATTTTAATCTAAATTTGCCTTTGTACTTAAAATTCAG----TCCTCATTTCT-TGGTCGTTCAATGCAGCACGGATT--AATAG-ATCTT-ATTAACCATGGCTATCC-TTGAT-CTAGTGGTGTCCCATGATCTAGCT-CTTCCCGTGAAATCCTCTATCCT [...]
+Antaresia_ramsayi               ?ACCACG----------CCTTCC-----------------CCA-CCATAGTCTGTAAA-TTACAGACTATGGT--CGTTGCCTCAACATA-AAGCCAAAAACCCATATAAACAAAAC--ATATAAA----CTCTCTCTCCTCGACCCCCCCC-TACCCCCCCC--ACAAGAAATATAGAAGAA------ACCAGCACATAAGACTA--TAAGGATTTCCCCCTCCTTTCC--CCCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAGCCGGAATTCCCATTTAAATATTTTAATCTAAATT-GCCTTCGTACCTAAAATTCAG----TCCTCATTTCT-TGGTCGTTCAATGCAGCACGGATT--AATAG-ATCTT-ATTAACCATGGCTATCC-TTGAT-CTAGTGGTGTCCCATGATCTAGCT-CTTCCCGTGAAATCCTCTATCCT [...]
+Python_reticulatus              ??CCATCACCCTCACTTCCTCC--------------------AACCATAGCCAAATA-TTT---GGCTATGGTT-TCATGCCAAAATATATCAACCAAAAACCCATATTAATATAATGCTATAAAATGG-------TCCCTCGACCCCCCCCCTACCCCCCCC--AAAAAA--CATAAGGAAA------GTCCG-CACATCATAAACCTCGTACTTTTCCCTATTTTTT-GCTCCTATGTATAATCTTACATTAATGGCTTGCCCCATGGATAATAAGCAGGAATTTCCCTTTTAATATTTTAGTCTAAATTAGCCTTCGTACAGGTAATTCAGT----CCTCATTTTC-TGGTCGTTCAATGCAGCATGGATT--AATAA-TTGTT-GATAACCATGGATATCC-TTGAT-CTAGTTGTGTCCCTTGATTTAACA-CTTCCCGTGAAATCCTCTATCCT [...]
+Python_timoriensis              TA-CACCACCA------------------------------AGACCATAGTCGGTAAATC----GACTATGGTCTTTTTACGCCAAAAATACAACCAAAAATCCATATTAATATAGCAATATAAAATAG-------CCCCTCGACCCCCCCCCTACCCCCCCCC-ACAAAAA-TATAAAGAAA------ACCCG-TATGTCATAAACTCCGAATTTTTCCCTATTTTT--GCCCCTATGTATAATCATACATTATTGGCTTGCCCCATGGATAATAAGCAAGAATTCCCTTTTTAATATTTTAGTCTAAAATTGCCTTT-TACAAAAAACTCAGT----CCTCATTTCT-TGGTCGTTCAATGCAGCATGGGCT--AATAA-TTATT-AATAACCATGACTATCC-TTGAT-CTAGTTGTGTCTCTTAGTTTGGTA-CTTCCCGTGAAATCCTCTATCCT [...]
+Python_sebae                    ?????????????????????????????????????????????????????????????????????????????CTTCCTCAGACAC-AAACTCA-ACCTCAAATAAAAATAAAAATAAT-----------CCTACCTCGGCCCCCCCCCTACCCCCCCC--ACTATTT-CATATGGAA-------TACAGGATATATAC-TTTGTTAGAAAAATCCATATTTTTTCTACCCTATGTATAATCTTACATTAATGGCTTGCCCCATGAATAATAAGCGGGAATTCCTAATAAAATATTTTAGCCTAAAATTGCCTTCGTACATAAAATT-AGC---TCCACATTTCTTTGGTCGTTCAATGCTGCANGGATTATAGTAC-TTCTT-AATACACATGACTATCC-TTGAT-CTAGTCGTCTCTCTTAACTTAACA-CTTCCCGTGAAATCCTCTATCCT [...]
+Python_molurus                  ?????????????????????????????????????????????????????????????????????????????CTTCCTCAGACAC-AAACTCA-ACCTCAAATAAAAATAAAAACAAT-----------CCTACCTCGGCCCCCCCCCTACCCCCCCCC-ACTATTT-CATATGGAA-------TACAGGATATATACATTTGTTAGAAAAATCCATATTTTTTCTACCCTATGTATAATCTTACATTAATGGCTTGCCCCATGAATAATAAGCGGGAATTCCTAATAAAATATTTTAGCCTAAAATTGCCTTCGTACATAAAATT-AGC---TCCACATTTCTTTGGTCGTTCAATGCTGCACGGATTATAGTAC-TTCTT-AATACACATGACTATCC-TTGAT-CTAGTCGTCTCTCTTAACTTAACA-CTTCCCGTGAAATCCTCTATCCT [...]
+Python_curtus                   ?CCACAAAA-----------------------------------CCAT-----------------ATTAATYTT--CCCACCTATAAYTA-AACCCGAAATTCCCTATAAA--CACAACAAAAAATA-----CTCCTTCYTCGCCCCCCCCC-TACCCCCCCCCCAC-ATTT-AATATAAGAT------TCTGG--AATATACACACATCGTTAATTTCCATATTTTTT--ATGCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATAATAAGCCGGAATTCCATATTAAATATTTTAGCCTAAAATTGCCTTAGTACCTAAAACT-AGTCCTTCCTCATTTTC-TGGTCGTTCAATGCTGCATGGATT--AATCA-TTCTTTAACAGATATGTCTATCC-TTGAT-CTAGTCGTCTCTCTTAACCTGGCG-CTTCCCGTGAAATCCTCTATCCT [...]
+Python_regius                   ?????????????????????????????????????????????????????????????????????????????TTACCTCAAT----AAACCCAAACCCACTATAAAAATATAA-----------------CCCCCTCGGCCCCCCCCCTTCCCCCCCCC-ACTTACA---TAGGAGGA------TTTAG-ATATATACACATATTAGGATTTTCCCTATCTTTTC-ACCCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAACCAGAATTTCCAATTAAATATTTTAACCTAAAATTGCCTTCGTACACTACACC-AGT---CCCTCATTTCT-TGGTCGTTCAATGCTGCACGGATTATAGTAC-TTATT-AATGCTCATGTCTATCC-TTGGT-CTAGTGGTGTCTCTTAGTTTAACA-CTTCCCGTGAAATCCTCTATCCT [...]
+Candoia_aspera                  ?????????????????????????????????????????????????????????????????????????????????????????????????AAA----CTA-------------------------CT-CTCTGG-GACCCCCCCC-TACCCCCCCC--AGATAAACTATACTAAAATTTACCTGAGTACACTATGTAAATATTGTACATTAGTCTATATTTC--ATGCTATGTATAATCATACATTAATGATCTGCCCCATGGATAATAAGCAGGAATTTCCCTATTAATATTTCAGCCTATTAATGCCTTAGTACAGTCAGTGTGTC---ACCACATCAT--GGGTCGTTTTATGCAGCAAGGATTA-ACTA--TTATT-GGTAATCATGCCTATCC--TGATCCAAGTTGTC-CTCTTAATCTACCTA-CTCACGTGAAATCCTCTATCCT [...]
+Morelia_nauta                   ???????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????? [...]
+Morelia_clastolepis             ???????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????? [...]
+Morelia_tracyae                 ???????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????? [...]
+Morelia_kinghorni               ???????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????? [...]
diff --git a/doc/source/examples/pythonidae.fasta b/doc/source/examples/pythonidae.fasta
new file mode 100644
index 0000000..a0c5414
--- /dev/null
+++ b/doc/source/examples/pythonidae.fasta
@@ -0,0 +1,99 @@
+>Xenopeltis unicolor
+???????????????????????????????????????????????????????AAACT--AAAATTCCCATTT-CCCACATATAT----GGATATT--ACGAAG--AAAAAA---GGACTAAAAAAAG---TCCTCTCTCGACCCCCCCCCTACCCCCCCCC-ACAGTT---AGTACGGGT------TTTCC-ATATATGTAACTCTTATAGATTTGCCTATCAAGGC-ATACTATGTATAATCATACATTAATGGCTTGCCCCATGAATATTAAACAGGAATTTCCCTTTAAATATTTTAGCCTAAAAAAGCCTTCGTACAGAACTTTAATA----CCACATTTCT-CAGTCGTTCAATGAAGCACGGAT-ATAGTA--TTGTT-GATAACCATGACTATCC--ACATCCAACTTGTCTTACAGGATCTTGCTA-TTCACGTGAAATCCTCTATCCTTTCATAGCAGGCATACCATTCGACTTCTCACG [...]
+
+>Loxocemus bicolor
+???????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????? [...]
+
+>Morelia spilota
+?GCCACACCCCTCACTTCCTCC-------------------CAACCATAGTCTGTAA-TTTACAGACTATGGT--CCATGCCTTAATATA-AAGCCAAAAATCCATATAATTTACCACAAAATAAAG-----CTCTCTC-TCGGCCCCCCCCCTACCCCCCCCC---AARAA-CATTGGGGAR------ACCGGCACACAAAACCA--TTARAAAACTCTTAACAAACCT--CTCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAGCAGGAATTTCCCTTTTATTATTTTAGTCTAAAATGGCCTTTGTACAAAATATTCTG----TCCTCATTCTCTTGGTCGTTCTATGCAGCACGAGTT--AACTA-ATCTT-ATTAATCATGGATATTC-TCAAC-CTAAGGGTGTCTCTTAGTCTAGCG-CTTCCCGTGAAATCCTCTATCCTTCCATAGAATGCTAACCATTCGACTTCTCACG [...]
+
+>Morelia bredli
+?GCCAC-CCCCTCACTTCCT---------------------TAACCATAGTCTGTAA-TTTACAGACTATGGT--CCATGCCTTAATATA-AAACCAAAAATCCATATAATTTACCACAAAATAAAG-----YTYTYTY-TYGGCCCCCCCCCTACMCCCCCCC--AAAGAA-CATTGGGAAA------ACCGGCACACAAAACTA--TTAGAAAACTCTTAACAAACCC--CTCTATGTATAATCTTACATTAATGGTTTGCCTCATGAATATTAAGCAGGAATTTCCCTTTTATTATTTTAGTCTAAAATGGCCTTTGTACAAAACATTCCG----TCCTCATTCTCCTGGTCGTTCTATGCAGCATGAGTT--AACCA-ATCTT-ATTGATCATGGATATTC-TTAAC-CTAAGGGTGTCTCTTATCCTAGCA-CTTCCCGTGAAATCCTCTATCCTTCCATAGAATGCTAACCATTCGACTTCTCACG [...]
+
+>Morelia carinata
+?GCCACAACCCTCACTTCCT--------------------AAAACCATAGTCTGTAAA--TACAGACTATGGT--TCTTACCTCAATATA-AAGCCAAAAACCCATATAAAACGC-ACACAATAAAACG---CTCTC-C-TCGGCCCCCCCCCTACCCCCCCC--ATAATAAACATAGGAGAA------ATCAGCACACAAAACTA--CTGAAGATACCCCCTCATCTCT--CCCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAGCAGGAATTTCCCTTCAAATATTTTAGTCTAAATAAGCCTTCGTACAGAATATTTAG----TCCTCATTTTC-TGGTCGTTCAATGCAACACGGATT--AATGG-ATCTT-ACTAACCATGGCTATCC-TTGAT-CAAGKGGKGTCTYTTAATCTAGTA-CTTCCCGTGAAACCCTCTATCCTTCCATAGAATGCTAACCATTCGACTTYTCACG [...]
+
+>Morelia amethistina
+?ACCACACCCCTCACTTCCTC--------------------CAACCATAGTCTGTAA-TTTACAGACTATGGT--CCATGCCTTAATATA-AAGCCAAAAATCCATATAATTTACCACAAAATAAAG-----CTCTCTC-TCGGCCCCCCCCCTACCCCCCCCC--AAAAAAACATTGGGGAA------ACCGGCACACAAAACCA--TTAAAAAACTCTTAACAAACCT--CTCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAGCAGGAATTTCCCTTTTATTATTTTAGTCTAAAATGGCCTTTGTACAAAATATTCTG----TCCTCATTTTCTTGGTCGTTCTATGCAGCATGAGCT--AACTA-ATCTT-ATTAATCATGGATATTC-TTAAC-CTARGGGTGTCTCTTAGTCTAGCG-CTTCCCGTGAAATCCTCTATCCTTCCATAGAATGCTAACCATTCGACTTCTCACG [...]
+
+>Morelia oenpelliensis
+?GCCAC-MCCCTCACTTCCT---------------------TAACCATAGTCTGTAA-TTTACAGACTATGGT--CCATGCCTTAATATA-AAACCAGAAATCCATATAATTTACCACCAAATAAAG-----YTYTYTY-TYGGCCCCCCCCCTACCCCCCCCC--AAAGAA-CATTGGGAGA------ACCGGCACACAAAATTA--TTAGAAGACTTTTAACATACCC--CTCTATGGATAATTTTACATTAATGGTTTGCCTCATGRATATTAAGCAGGGAWTTCCCTTTTATTATTTTAGTCTAAAACGGCCCTTGTACCAGACATTCCG----TCCTCATTCTCCTGGTCGTTCTATGCAGCATGAGTT--AACCA-ATCTT-ATTGATCATGGATATTCCTTGAC-CTAAGGGTGTSTCTTATCCTAGCA-CTTCCCGTGAAATCCT-TATCCTTCCATAGAATGCTAACCATTCGACTTCTCACG [...]
+
+>Morelia boeleni
+??AAACAACCCTCACTTCCTT--------------------CAACCATAGTCTGGA---TTCCAGACTATGGT--TGTTACCTAAAAAACTAAAGAAAAAATCCATATAAAC----------TAAAAA----CTCTCTC-TCGGCCCCCCCCCTACMCCCCCC---GGGTCAGCACAAAAAC-------ATCAC---------------CCAAAAATCCCCCTTTTT-CC--CCCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATAATAAGCAGGAATTTCCCTTTAAATATTTTAGTCTAAAATAGCCTTT-TACATAAAATTATG----TCCTCATTTCT-TGGTCGTTCAATGCAGCACGGATT--AATAT-ATCTT-ATTGATCATGGATATCC-TTGGT-CTAATGGTGTCTCTTAGTCTAACA-CTTCCCGTGAAATCCTCTATCCTTCCATAGAATGCTAACCATTCGACTTCTCACG [...]
+
+>Morelia viridisS
+?A-ATCAACCCTCACTTCCTCC-------------------TAGCCATAGTCTGTAAG-TTACAGACTATGGCT--CATGCCTTAATATATAAACCAAAAACCCATATAAT-CACTGAACAATAAAA-----CTYTYTYCTCGGCCCCCCCCCTACCCCCCCC---GGAAAACCATAAAA---------ATCAGCACATAAATAAA--CCTACTAATCCCATTGCTTCCT---CCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAGCAGGAATTTCCCTTCAAATATTTTAGCCTAAATTAGCTTCCGTACAAAATATCTAG----CCCTCATTTTC-TGGTCGTTCAATGCAATCGGGGTT--AATAA-ATCTT-ACTAACCATGGATATCC-TTGAT-CAGGTGGTGTCTCTTAATTTAGTA-CTTCCCGTGAAATCCTCTATCCTTCCATAGAATGCTAACCATTCGACTTCTCACG [...]
+
+>Morelia viridisN
+?A-CTCAACCCTCACTTCCTTC-------------------CAGCCATAGTCTGTAAA-TTACAGGCTATGGCT--CATACCTTGATATATAAACCAAAAACCCATATAATTCACCACACAACAAAA-----CTCTCTCCTCGGCCCCCCCC-TACCCCCCCCC--GGAAAAACATAGAAGAA------GTCAGCACAATTAAACT--TACTGATAACCCCTTGCTTCCT--CCCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAGCAGGAATTTCCCTTCAAATATTTTAGCCTAAAATAGCCTTTGTACAAAATACCTTG----TCCTCATTTTC-TGGTCGTYCAATGCAATCGGGTCT--AACAA-ATCTT-ACTAACCATGGATATCC-TTGAT-CAAGTTGTGTCTCTTAATCTAGTAACTTCCCGTGAAATCCTCTATCCT-CCATAGAATGCTAACCATTCGACTTCTCACG [...]
+
+>Liasis olivaceus
+?GCCACAACCCTCACTTCCCC-----------------ACCTAACCATAGTCTGTAAA-TTACAGACTATGGT--TGATACCTTAATACA-AAGCCGAAACCCCATATAAACAGCACCACAACAAAA----CTCTACTC-TCGGCCCCCCCCCTACMCCCCCCC--ACAAAAACATAGGARAA------ATCAGCACAAACAATC---MCCTAAAATCCCCCCTTAACCC--CCCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAGCAGGAATTCCCCTTTAAATATTTTAGTCTGAATTAGCCCTTGTACAAAAAATCTTG----TCCTCATTTTC-TGGTCGTTCAATGCAGCACGGATT--AATAG-ATCTT-ATTAACCATGGCTATCC-TTGAT-CTAGTGGTGTCCCATGATCTAGTA-CTTCCCGTGAAATCCTCTATCCTTCCATTGAATGCTAACCATTCGACTTCTCACG [...]
+
+>Liasis mackloti
+?ACCACAACCCTCACTTCCTT--------------------CAGCCATAGTCTGTAA-TTTACAGGCTATGGC--TGATACCTTAATATA-AAACCAAAATCCCATATAAATACCACCACAACAAAG-----CTCTCTC-TCGGCCCCCCCCCTACMCCCCCCC--ACCAAAACATAGAARAA------ATCAGCACAATAAATA---CTARAAGTATTTGCTTCCTTCC--CCCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTTAGCAGGAATTTCCCTTTAAATATTTTAGCCTAAAATAGCCTTTGTACACAAAACTATG----TCCTCATTTCT-TGGTCGTTCAATGCAGCACGGATT--AATAG-ATTTT-AATAACCATGACTATCC-TTGAT-CTAGTGGTGTCCCATGATTTAGTA-CTTCCCGTGAAATCCTCTATCCTTCCATTGAATGCTAACCATTCGACTTCTCACG [...]
+
+>Liasis fuscus
+?ACCACAACCCTCACTTCCTC--------------------CAGCCATAGTCTGTAA-TTTACAGGCTATGGC--TGATACCTTAATATA-AAACCAAAATCCCATATAAATACCACCACAACAAAG-----CTCTCTY-TCGGCCCCCCCCCTACCCCCCCCC--ACCAAAACATAGAAGAA------ATCAGCACA-AAATAACA-CTAGAAGTATTACTTCCTTGCC--CCCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTTAGCAGGAATTTCCCTTCAAATATTTTAGCCTAAAATAGCCTTCATACATAAAATTATG----TCCTCATTTCT-TGGTCGTTCAATGCAGCACGGATT--AATGG-ATTTT-AATAACCATGACTATCC-TTGAT-CTAGTGGTGTCCCATGATTTAGTA-CTTCCCGTGAAATCCTCTATCCTTCCATTGAATGCTAACCATTCGACTTCTCACG [...]
+
+>Liasis albertisii
+????GCTCCTCTCACTTCCTC--------------------AGACCACAGTCTGCAA---TGCAGACTGTGGTTTTGTGCCCAGAATATA--AACCAAAAAACCATATAAACAACACCRCGACAAAAAAGA--TCTCTC-TCGGCCCCCCCCMTACMCCCCCCC--AAAAAAACATARAGGAA------ATCAG--------------------TTCATARACT--------CTCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAGCAGGAATTCCCCTTTAAATATTTTAATCTAAATTAGCCTTCGTACACAAAATTCAG----TCCTCATTCTC-TGGTCGTTCAATGCAGCACGGATT--AATCA-GTCTT-ACTAACCATGGATATCC-TTGAT-CTAGTCGTCTCTCTTAGTCTAACA-CTTCCCGTGAAACCCTCTATCCTTCCACTGAATGCTAACCATTCGACTTCTCACG [...]
+
+>Apodora papuana
+?GCCACAACCCTCAMTTCCTT--------------------CAGCCACAGTYTGTAA-TTTACAGACTGTGGC--CCATGCCTCAATATA-AAGCCGAAAATCCATATAAATAACACCAAAACAAAG-----CTYTCCC-TYGGCCCCCCCCCTACCCCCCCCC--AAAAAAATATAGAGAA------CTATAGAACAAATAACCA---CCAAGAAGTTCACTATCCCCC--TCCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAGCCGGAATTTCCCTTTAAATATTTTAGTCTAAATATGCCCTTGTACACAAAATTCAG----TCCTCATTTCT-TGGTCGTTCAATGCAGCCAGGAAT--AATCA-ATCTT-ATTAACCATGGATATCC-TTGAT-CTAGTGGTGTCTCTTGGTCTAGTA-CTTCCCGTGAAATCCTCTATCCTTCCATTGAATGCTAACCATTCGACTTCTCACG [...]
+
+>Bothrochilus boa
+??GCACCGCCCTCACTTCCTC--------------------CGACCGCAGTCTGCC----AGCAGGCTGCGGTC-GCATGCCCAAAAACACAAACCAAAAAACCATATAAACAACGCCGCAACAAAAGG----YCYCYC-TCGGCCCCCCCCCTAC-CCCCCCC-ACAAAAAACATAGAGAAA------ATCAG--------------------TTTTCACAC---------CCCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAGCAGGAATTTCCCTTCAAATATTTTAATCTAAAATAGCCTTTGTACATAAAATTTGC----CCCTCATTTCT-TGGTCGTTCAATGCAGCATGGATT--AATCA-GTCTT-ATTAACCATGGATATTC-TCAGT-CTAGTTGTGTCTCTTAGCCTAACA-CTTCCCGTGAAATCCTCTATCCTTCCACTGAATGCTAACCATTCGACTTCTCACG [...]
+
+>Antaresia maculosa
+?ACCACAACCCTCACTTCCTCC--------------------AGCCATAGTCTGTAAATTTACAGACTATGGC--TGATACCTCAACATA-CAGCCAAAATTCCATATAATAT-CCCCACAACAA-----CTYTYTYTCYTYGGCCCCCCCCCTACCCCCCCC---ATCCAAATATATAAGAA------ATCAGCACAATAAACCT--ACTAGGAATTGCCAATAACTCC--CCCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAGCAGGAATTCCCCTTCAAATATTTTAGTCTAAAATAGCCTTTGTACAGAATATTTAG----TCCTCATTTCT-TGGTCGTTCAATGCAACACGGATT---ATCAGTTCTT-ACTAACCATGGATATCC-TTGAT-CTAGTGGTGTCTCTTAATCTAGTA-CTTCCCGTGAAATCCTCTATCCTTCCATAGAATGCTAACCATTCGACTTCTCACG [...]
+
+>Antaresia stimsoni
+?ACCACAACCCTCAMTTCCTTTCAGCCATAGTCTGTAAATACAGCCATAGTCTGTAAA--TACAGACTATGGC--TGATACCGCCATATA-GAGCCGAAAACCCATATAATATGCCACACAATAAA------CTYTYTCCTYGGCCCCCCCCCTACCCCCCCCC--ATTAAAACATATGGGAA------AACAGCACAAATACATA--TTAAAGAATGTCCAATTAATCC--TCCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAGCAGGAATTCCCCTTTAAATATTTTAGCCTAAAATGTCCTTCGTACAGAATATTAAG----TCCTCATTTTC-TGGTCGTTCAATGCAATCAGGATT--AATCA-TTCTT-ACTAACCATGGCTATCC-TTGAT-CTAGTGGTGTCCCTTAATTTAGTA-CTTCCCGTGAAATCCTCTATCCTTCCATAGAATGCTAACCATTCGACTTCTCACG [...]
+
+>Antaresia childreni
+?GCCACAACCCTCACTTCCTTCCAGCCATAGTCTGTAAATACAGCCATAGTCTGTAAA--TACAGACTATGGC--TGATGCCGCCATATA-GAGCCGAAAAACCATATAATATACCACACAATAAA------CTCTCTCCTCGGCCCCCCCCCTACCCCCCCCC--ATTAAAACATATGGGAA------AGCAGCACAAATACATA--TTAAAGAATGTCCAATTAATCC--TCCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAGCAGGAATTCCCCTTTAAATATTTTAGTCTAAAATGTCCTTTGTACAGAATATTTAG----CCCTCATTCTC-TGGTCGTTCAATGCAATCGGGATT--AATCA-TTCTT-AATAACCATGACTATCC-TTGAT-CTAGTGGTGTCCCTTGATTTAGTA-CTTCCCGTGAAATCCTCTATCCTTTCATAGAATGCTAACCATTCGACTTCTCACG [...]
+
+>Antaresia perthensis
+?ATCA-AACCC------------------------AAAACTAAGCCACAGCCTGTTT--AAACAGGCTGTGGC--TGATGCCGCCATACA-AAGCCGAAATTCCATATAACACACCACAATATAAA------CTYTYTCCTYGGCCCCCCCCCTACCCCCCCCC--AACCAAACATATAAGAA------AACAGAACAGTGAACAA--TTAGAGATTCTCCAATTAACTC--TCCTATGTATAATCTTACATTAATGGTTTGCCCCATGGATATTAAGCAGGAATTTCCCTTCAAATATTTTAGTCTAAAATAGCCTTTGTACAGTCTATTTAG----TCCTCATTTTC-TGGTCGTTCAATGCAGCATGGATT--AATCA-TTCTT-ACCGATCATGACTATCC-TTGAT-CTAGTGGTGTCTCTTAATTTAGTA-CTTCCCGTGAAATCCTCTATCCTTCCCTAGAATGCTAACCATTCGACTTCTCACG [...]
+
+>Antaresia melanocephalus
+?ACCACA----------CCTTCC-----------------CCAACCATAGTCTGTAACC--ACAGACTATGGT--CGATGTCTCAATATA-AAGCCAAAAATCTATATAAATAAA-ACACAATAAAG-----CTCTCTCCTCGGCCCCCCCC-TACMCCCCCC--ACAAGAAATATAGAAGAA------ACCAGCACATAAGACTA--TAAGGATTCCCCCCTTCTTTCC--CCCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAGCCGGAATTCCCATTTAAATATTTTAATCTAAATTTGCCTTTGTACTTAAAATTCAG----TCCTCATTTCT-TGGTCGTTCAATGCAGCACGGATT--AATAG-ATCTT-ATTAACCATGGCTATCC-TTGAT-CTAGTGGTGTCCCATGATCTAGCT-CTTCCCGTGAAATCCTCTATCCTTCCTCTGAATGCTAACCATTCGACTTCTCACG [...]
+
+>Antaresia ramsayi
+?ACCACG----------CCTTCC-----------------CCA-CCATAGTCTGTAAA-TTACAGACTATGGT--CGTTGCCTCAACATA-AAGCCAAAAACCCATATAAACAAAAC--ATATAAA----CTCTCTCTCCTCGACCCCCCCC-TACCCCCCCC--ACAAGAAATATAGAAGAA------ACCAGCACATAAGACTA--TAAGGATTTCCCCCTCCTTTCC--CCCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAGCCGGAATTCCCATTTAAATATTTTAATCTAAATT-GCCTTCGTACCTAAAATTCAG----TCCTCATTTCT-TGGTCGTTCAATGCAGCACGGATT--AATAG-ATCTT-ATTAACCATGGCTATCC-TTGAT-CTAGTGGTGTCCCATGATCTAGCT-CTTCCCGTGAAATCCTCTATCCTTCCTCTGAATGCTAACCATTCGACTTCTCACG [...]
+
+>Python reticulatus
+??CCATCACCCTCACTTCCTCC--------------------AACCATAGCCAAATA-TTT---GGCTATGGTT-TCATGCCAAAATATATCAACCAAAAACCCATATTAATATAATGCTATAAAATGG-------TCCCTCGACCCCCCCCCTACCCCCCCC--AAAAAA--CATAAGGAAA------GTCCG-CACATCATAAACCTCGTACTTTTCCCTATTTTTT-GCTCCTATGTATAATCTTACATTAATGGCTTGCCCCATGGATAATAAGCAGGAATTTCCCTTTTAATATTTTAGTCTAAATTAGCCTTCGTACAGGTAATTCAGT----CCTCATTTTC-TGGTCGTTCAATGCAGCATGGATT--AATAA-TTGTT-GATAACCATGGATATCC-TTGAT-CTAGTTGTGTCCCTTGATTTAACA-CTTCCCGTGAAATCCTCTATCCTTCCGCGTAATGCTAACCATTCGACTTCTCACG [...]
+
+>Python timoriensis
+TA-CACCACCA------------------------------AGACCATAGTCGGTAAATC----GACTATGGTCTTTTTACGCCAAAAATACAACCAAAAATCCATATTAATATAGCAATATAAAATAG-------CCCCTCGACCCCCCCCCTACCCCCCCCC-ACAAAAA-TATAAAGAAA------ACCCG-TATGTCATAAACTCCGAATTTTTCCCTATTTTT--GCCCCTATGTATAATCATACATTATTGGCTTGCCCCATGGATAATAAGCAAGAATTCCCTTTTTAATATTTTAGTCTAAAATTGCCTTT-TACAAAAAACTCAGT----CCTCATTTCT-TGGTCGTTCAATGCAGCATGGGCT--AATAA-TTATT-AATAACCATGACTATCC-TTGAT-CTAGTTGTGTCTCTTAGTTTGGTA-CTTCCCGTGAAATCCTCTATCCTTCCGCGTAATGCTAACCATTCGACTTCTCACG [...]
+
+>Python sebae
+?????????????????????????????????????????????????????????????????????????????CTTCCTCAGACAC-AAACTCA-ACCTCAAATAAAAATAAAAATAAT-----------CCTACCTCGGCCCCCCCCCTACCCCCCCC--ACTATTT-CATATGGAA-------TACAGGATATATAC-TTTGTTAGAAAAATCCATATTTTTTCTACCCTATGTATAATCTTACATTAATGGCTTGCCCCATGAATAATAAGCGGGAATTCCTAATAAAATATTTTAGCCTAAAATTGCCTTCGTACATAAAATT-AGC---TCCACATTTCTTTGGTCGTTCAATGCTGCANGGATTATAGTAC-TTCTT-AATACACATGACTATCC-TTGAT-CTAGTCGTCTCTCTTAACTTAACA-CTTCCCGTGAAATCCTCTATCCTTTCATA-CATGCTAACCATTCGACTTCTCACG [...]
+
+>Python molurus
+?????????????????????????????????????????????????????????????????????????????CTTCCTCAGACAC-AAACTCA-ACCTCAAATAAAAATAAAAACAAT-----------CCTACCTCGGCCCCCCCCCTACCCCCCCCC-ACTATTT-CATATGGAA-------TACAGGATATATACATTTGTTAGAAAAATCCATATTTTTTCTACCCTATGTATAATCTTACATTAATGGCTTGCCCCATGAATAATAAGCGGGAATTCCTAATAAAATATTTTAGCCTAAAATTGCCTTCGTACATAAAATT-AGC---TCCACATTTCTTTGGTCGTTCAATGCTGCACGGATTATAGTAC-TTCTT-AATACACATGACTATCC-TTGAT-CTAGTCGTCTCTCTTAACTTAACA-CTTCCCGTGAAATCCTCTATCCTTTCATA-CATGCTAACCATTCGACTTCTCACG [...]
+
+>Python curtus
+?CCACAAAA-----------------------------------CCAT-----------------ATTAATYTT--CCCACCTATAAYTA-AACCCGAAATTCCCTATAAA--CACAACAAAAAATA-----CTCCTTCYTCGCCCCCCCCC-TACCCCCCCCCCAC-ATTT-AATATAAGAT------TCTGG--AATATACACACATCGTTAATTTCCATATTTTTT--ATGCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATAATAAGCCGGAATTCCATATTAAATATTTTAGCCTAAAATTGCCTTAGTACCTAAAACT-AGTCCTTCCTCATTTTC-TGGTCGTTCAATGCTGCATGGATT--AATCA-TTCTTTAACAGATATGTCTATCC-TTGAT-CTAGTCGTCTCTCTTAACCTGGCG-CTTCCCGTGAAATCCTCTATCCTTTCATA-CATGCTAACCATTCGACTTCTCACG [...]
+
+>Python regius
+?????????????????????????????????????????????????????????????????????????????TTACCTCAAT----AAACCCAAACCCACTATAAAAATATAA-----------------CCCCCTCGGCCCCCCCCCTTCCCCCCCCC-ACTTACA---TAGGAGGA------TTTAG-ATATATACACATATTAGGATTTTCCCTATCTTTTC-ACCCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAACCAGAATTTCCAATTAAATATTTTAACCTAAAATTGCCTTCGTACACTACACC-AGT---CCCTCATTTCT-TGGTCGTTCAATGCTGCACGGATTATAGTAC-TTATT-AATGCTCATGTCTATCC-TTGGT-CTAGTGGTGTCTCTTAGTTTAACA-CTTCCCGTGAAATCCTCTATCCTTTCATA-CATGCTAACCATTCGACTTCTCACG [...]
+
+>Candoia aspera
+?????????????????????????????????????????????????????????????????????????????????????????????????AAA----CTA-------------------------CT-CTCTGG-GACCCCCCCC-TACCCCCCCC--AGATAAACTATACTAAAATTTACCTGAGTACACTATGTAAATATTGTACATTAGTCTATATTTC--ATGCTATGTATAATCATACATTAATGATCTGCCCCATGGATAATAAGCAGGAATTTCCCTATTAATATTTCAGCCTATTAATGCCTTAGTACAGTCAGTGTGTC---ACCACATCAT--GGGTCGTTTTATGCAGCAAGGATTA-ACTA--TTATT-GGTAATCATGCCTATCC--TGATCCAAGTTGTC-CTCTTAATCTACCTA-CTCACGTGAAATCCTCTATCCTTCCAAGAATGGCTAACAGTCCTGCTTTTCACG [...]
+
+>Morelia nauta
+???????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????? [...]
+
+>Morelia clastolepis
+???????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????? [...]
+
+>Morelia tracyae
+???????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????? [...]
+
+>Morelia kinghorni
+???????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????? [...]
+
diff --git a/doc/source/examples/pythonidae.mb.run1.t b/doc/source/examples/pythonidae.mb.run1.t
new file mode 100644
index 0000000..f04449d
--- /dev/null
+++ b/doc/source/examples/pythonidae.mb.run1.t
@@ -0,0 +1,108 @@
+#NEXUS
+
+Begin Trees;
+tree 1 = (((Candoia_aspera:0.1,Antaresia_melanocephalus:0.1):0.1,(Python_reticulatus:0.1,(Morelia_carinata:0.1,((Python_sebae:0.1,(Morelia_boeleni:0.1,(Antaresia_perthensis:0.1,(Liasis_olivaceus:0.1,Morelia_amethistina:0.1):0.1):0.1):0.1):0.1,((((Python_curtus:0.1,Bothrochilus_boa:0.1):0.1,((Morelia_nauta:0.1,Morelia_kinghorni:0.1047997):0.1067979,(((Python_regius:0.1,Python_molurus:0.1):0.1,Antaresia_stimsoni:0.1):0.1,Morelia_oenpelliensis:0.1):0.1):0.1):0.1,Morelia_bredli:0.1):0.109212 [...]
+tree 2 = (Candoia_aspera:0.4762597,(Loxocemus_bicolor:0.3099459,(((((Morelia_carinata:0.06391194,(Morelia_viridisS:0.05292008,Morelia_viridisN:0.02868007):0.02046695):0.008016698,(Antaresia_maculosa:0.06145015,((Antaresia_childreni:0.01912568,Antaresia_stimsoni:0.01455259):0.0399552,Antaresia_perthensis:0.07503277):0.01655454):0.01033184):0.01796919,((((Morelia_oenpelliensis:0.06645163,((((Morelia_nauta:0.005312666,Morelia_kinghorni:0.005963152):0.00842691,Morelia_clastolepis:0.001486153 [...]
+tree 3 = ((((((Python_molurus:0.03712269,Python_sebae:0.06592633):0.01511683,Python_curtus:0.09088368):0.009835215,Python_regius:0.1158111):0.03967019,(((Morelia_boeleni:0.07332749,((((((Morelia_clastolepis:0.002847526,(Morelia_kinghorni:0.009421269,Morelia_nauta:0.01055463):0.004331489):0.008518643,Morelia_amethistina:0.02552891):0.009771246,Morelia_tracyae:0.03928368):0.02558274,Morelia_oenpelliensis:0.05809432):0.00247521,(Morelia_spilota:0.02792733,Morelia_bredli:0.02176933):0.037122 [...]
+tree 4 = ((((((Morelia_boeleni:0.07563666,(((Apodora_papuana:0.06307092,(Liasis_olivaceus:0.03196349,(Liasis_fuscus:0.01951866,Liasis_mackloti:0.01292934):0.04061255):0.01538865):0.01399516,(Antaresia_ramsayi:0.03770951,Antaresia_melanocephalus:0.04296104):0.05789423):0.002908893,(Bothrochilus_boa:0.05269564,Liasis_albertisii:0.04802604):0.02949775):0.009274073):0.003279487,(((Morelia_spilota:0.02118692,Morelia_bredli:0.03211151):0.03342758,(((Morelia_amethistina:0.0251708,((Morelia_naut [...]
+tree 5 = ((Loxocemus_bicolor:0.3065334,Candoia_aspera:0.5468094):0.03675571,((((Morelia_boeleni:0.08892604,(((Morelia_tracyae:0.04687283,((Morelia_clastolepis:0.002615561,(Morelia_kinghorni:0.01074123,Morelia_nauta:0.009297935):0.002390255):0.0155873,Morelia_amethistina:0.02747446):0.002329138):0.0294499,Morelia_oenpelliensis:0.05901609):0.006529936,(Morelia_bredli:0.03601919,Morelia_spilota:0.02157804):0.0297887):0.03700966):0.004366086,((((Antaresia_melanocephalus:0.02525966,Antaresia_ [...]
+tree 6 = ((((Python_regius:0.1199266,((Python_sebae:0.07556567,Python_molurus:0.03590909):0.02124586,Python_curtus:0.08389774):0.03731226):0.02911354,((Python_timoriensis:0.06878349,Python_reticulatus:0.04067773):0.07340706,((((Morelia_spilota:0.02607113,Morelia_bredli:0.02415925):0.03316417,((Morelia_amethistina:0.02002678,(Morelia_tracyae:0.03658112,(Morelia_clastolepis:0.002892779,(Morelia_kinghorni:0.01044691,Morelia_nauta:0.006823334):0.004765074):0.01389969):0.004826785):0.0482326, [...]
+tree 7 = ((Loxocemus_bicolor:0.2622645,((((((Antaresia_maculosa:0.08275333,((Antaresia_childreni:0.02108157,Antaresia_stimsoni:0.01468215):0.04234037,Antaresia_perthensis:0.08253806):0.01070602):0.01162065,((Morelia_viridisN:0.0339944,Morelia_viridisS:0.04631423):0.01897653,Morelia_carinata:0.06863713):0.01176408):0.02333936,(((Liasis_fuscus:0.02156543,Liasis_mackloti:0.009183889):0.04837197,(Liasis_olivaceus:0.03342706,Apodora_papuana:0.06321774):0.01467099):0.01747876,(Antaresia_ramsay [...]
+tree 8 = (Candoia_aspera:0.3955487,(Loxocemus_bicolor:0.2885792,(((Python_reticulatus:0.04955104,Python_timoriensis:0.07814811):0.06712344,(((((((Morelia_amethistina:0.02679172,((Morelia_nauta:0.007670258,Morelia_kinghorni:0.006021479):0.004993893,Morelia_clastolepis:0.005375612):0.01302051):0.007462117,Morelia_tracyae:0.05007623):0.02318133,Morelia_oenpelliensis:0.06267219):0.006399419,(Morelia_spilota:0.02123433,Morelia_bredli:0.02676302):0.03443132):0.01874034,Morelia_boeleni:0.090095 [...]
+tree 9 = (Candoia_aspera:0.4470348,((((Python_curtus:0.1002054,(Python_sebae:0.07321433,Python_molurus:0.03805384):0.02352157):0.01523374,Python_regius:0.1328755):0.03857945,((((((Morelia_spilota:0.02935402,Morelia_bredli:0.01963859):0.03620548,(Morelia_oenpelliensis:0.05849876,(Morelia_tracyae:0.05676993,(Morelia_amethistina:0.01685644,(Morelia_clastolepis:0.006751504,(Morelia_nauta:0.007222068,Morelia_kinghorni:0.005522163):0.003674505):0.02162493):0.004586566):0.04411381):0.008189308) [...]
+tree 10 = ((((Python_regius:0.106843,(Python_curtus:0.1020408,(Python_sebae:0.08391912,Python_molurus:0.04121956):0.01197351):0.02101561):0.03577634,((((((Apodora_papuana:0.0566616,((Liasis_mackloti:0.01371158,Liasis_fuscus:0.02202989):0.04286348,Liasis_olivaceus:0.04303408):0.00436549):0.03190665,(Antaresia_ramsayi:0.03342611,Antaresia_melanocephalus:0.06189206):0.04868849):0.004697558,(((Morelia_viridisS:0.05331399,Morelia_viridisN:0.02643216):0.02252944,Morelia_carinata:0.06909746):0. [...]
+tree 11 = (Candoia_aspera:0.3846478,(Loxocemus_bicolor:0.2549529,((((Python_sebae:0.06807004,Python_molurus:0.03043545):0.01465748,Python_curtus:0.09600346):0.01247736,Python_regius:0.09841203):0.03751737,((Python_timoriensis:0.06586915,Python_reticulatus:0.04536943):0.07520284,(((((Antaresia_childreni:0.01730015,Antaresia_stimsoni:0.01262602):0.02887657,Antaresia_perthensis:0.06405242):0.01385353,Antaresia_maculosa:0.07585236):0.008884809,(Morelia_carinata:0.05554095,(Morelia_viridisS:0 [...]
+tree 12 = ((Loxocemus_bicolor:0.2306511,((Python_regius:0.1249936,(Python_curtus:0.1161678,(Python_sebae:0.07594277,Python_molurus:0.04880214):0.01050635):0.009484123):0.05837598,((((Bothrochilus_boa:0.06362487,Liasis_albertisii:0.04429535):0.05290393,(Morelia_boeleni:0.0975965,(((Morelia_tracyae:0.02920131,(Morelia_amethistina:0.02703314,((Morelia_kinghorni:0.0078452,Morelia_nauta:0.005866894):0.005442751,Morelia_clastolepis:0.003560364):0.01476575):0.008177027):0.02992863,Morelia_oenpe [...]
+tree 13 = (Candoia_aspera:0.5080599,(Loxocemus_bicolor:0.2965857,(((Python_reticulatus:0.05785765,Python_timoriensis:0.07319806):0.05928309,(((Bothrochilus_boa:0.06122711,Liasis_albertisii:0.06414279):0.03891576,(((((Liasis_fuscus:0.01426001,Liasis_mackloti:0.01729048):0.04258978,Liasis_olivaceus:0.04600247):0.0113502,Apodora_papuana:0.05986282):0.01038398,(Antaresia_ramsayi:0.03308029,Antaresia_melanocephalus:0.0411259):0.04100124):0.01325467,((Morelia_carinata:0.06009035,(Morelia_virid [...]
+tree 14 = (((((Python_reticulatus:0.06098649,Python_timoriensis:0.06479356):0.06867842,((Morelia_boeleni:0.1040542,((Bothrochilus_boa:0.05386548,Liasis_albertisii:0.06105181):0.04408371,(((Liasis_olivaceus:0.04827163,(Liasis_fuscus:0.01955875,Liasis_mackloti:0.01269835):0.03672677):0.01628567,Apodora_papuana:0.06293987):0.03062699,(Antaresia_ramsayi:0.03533973,Antaresia_melanocephalus:0.04486755):0.05072205):0.00297664):0.004512241):0.001240846,(((Morelia_carinata:0.05786984,(Morelia_vir [...]
+tree 15 = (((((Python_reticulatus:0.05439983,Python_timoriensis:0.08321393):0.04871133,((Morelia_boeleni:0.08585758,((Bothrochilus_boa:0.05362335,Liasis_albertisii:0.04403941):0.03032729,((Morelia_oenpelliensis:0.05763201,(((Morelia_kinghorni:0.00341021,Morelia_clastolepis:0.006671437):0.007917761,Morelia_nauta:0.001951335):0.01049725,(Morelia_tracyae:0.05625849,Morelia_amethistina:0.02860654):0.0006372106):0.03067896):0.01095649,(Morelia_spilota:0.02808814,Morelia_bredli:0.03112591):0.0 [...]
+tree 16 = (((((Python_reticulatus:0.04805722,Python_timoriensis:0.05322303):0.0782823,(((((Antaresia_perthensis:0.06330741,(Antaresia_childreni:0.02077168,Antaresia_stimsoni:0.01869424):0.03122662):0.01200088,Antaresia_maculosa:0.05309401):0.006744738,(Morelia_carinata:0.073235,(Morelia_viridisN:0.03121673,Morelia_viridisS:0.04001572):0.01598083):0.01197911):0.01512452,((Apodora_papuana:0.03974916,((Liasis_fuscus:0.01597086,Liasis_mackloti:0.01058074):0.05649681,Liasis_olivaceus:0.043976 [...]
+tree 17 = ((Loxocemus_bicolor:0.2487605,((Python_regius:0.1233404,(Python_curtus:0.1077844,(Python_molurus:0.0373181,Python_sebae:0.0759607):0.02438578):0.01916703):0.03225566,((Python_reticulatus:0.06111238,Python_timoriensis:0.06333372):0.06545558,(((Morelia_spilota:0.0228283,Morelia_bredli:0.02804907):0.03880804,((((Morelia_nauta:0.001533473,(Morelia_clastolepis:0.003356968,Morelia_kinghorni:0.007887915):0.005664857):0.01199247,Morelia_amethistina:0.02933171):0.01226355,Morelia_tracya [...]
+tree 18 = ((Loxocemus_bicolor:0.2507472,((Python_regius:0.1105118,(Python_curtus:0.1086967,(Python_sebae:0.07728629,Python_molurus:0.0423532):0.0213407):0.01968316):0.06155854,((Python_timoriensis:0.07817613,Python_reticulatus:0.07259407):0.07650008,(((((Morelia_viridisS:0.0465201,Morelia_viridisN:0.02961522):0.02715512,Morelia_carinata:0.07196027):0.01556349,(Antaresia_maculosa:0.06562058,((Antaresia_stimsoni:0.01004443,Antaresia_childreni:0.02254039):0.04174026,Antaresia_perthensis:0.0 [...]
+tree 19 = (Candoia_aspera:0.3965166,(Loxocemus_bicolor:0.2679677,((Python_regius:0.1029863,(Python_curtus:0.09892457,(Python_sebae:0.07480435,Python_molurus:0.04166847):0.01203458):0.03734556):0.02161339,((Python_timoriensis:0.0483175,Python_reticulatus:0.05879262):0.06698997,(((Bothrochilus_boa:0.06842803,Liasis_albertisii:0.05331119):0.0440924,((Antaresia_melanocephalus:0.0347434,Antaresia_ramsayi:0.03256232):0.03610559,(Apodora_papuana:0.07975386,((Liasis_mackloti:0.01064635,Liasis_fu [...]
+tree 20 = (Candoia_aspera:0.4488251,(Loxocemus_bicolor:0.2850208,((Python_regius:0.1072431,(Python_curtus:0.07993694,(Python_sebae:0.07800197,Python_molurus:0.03348335):0.01611311):0.008776165):0.03537475,((Python_reticulatus:0.05680797,Python_timoriensis:0.06572175):0.05325016,((((Antaresia_perthensis:0.06941898,(Antaresia_childreni:0.0139039,Antaresia_stimsoni:0.0148759):0.04123835):0.00879989,Antaresia_maculosa:0.06008866):0.007626708,(Morelia_carinata:0.07607666,(Morelia_viridisN:0.0 [...]
+tree 21 = ((((Python_timoriensis:0.07117878,Python_reticulatus:0.04821365):0.07463146,(((((Antaresia_perthensis:0.08739655,(Antaresia_stimsoni:0.01621692,Antaresia_childreni:0.02665731):0.04022228):0.008193846,Antaresia_maculosa:0.07342905):0.001017085,((Morelia_viridisS:0.04622026,Morelia_viridisN:0.03667079):0.02958224,Morelia_carinata:0.06285014):0.01282166):0.02229121,(((((Morelia_clastolepis:0.003050844,(Morelia_kinghorni:0.003807813,Morelia_nauta:0.01326833):0.002467171):0.00738360 [...]
+tree 22 = ((Loxocemus_bicolor:0.196467,Candoia_aspera:0.4000064):0.04033051,((Python_regius:0.1172232,((Python_sebae:0.07022621,Python_molurus:0.03127386):0.01725967,Python_curtus:0.1201985):0.009464382):0.03286576,((Python_timoriensis:0.054655,Python_reticulatus:0.06552768):0.04240201,(((((Antaresia_perthensis:0.05941156,(Antaresia_stimsoni:0.009209314,Antaresia_childreni:0.02528722):0.03086887):0.01323638,Antaresia_maculosa:0.05655764):0.01615778,((Morelia_viridisS:0.03740166,Morelia_v [...]
+tree 23 = (Candoia_aspera:0.4592592,(((Python_regius:0.1337113,((Python_sebae:0.05325812,Python_molurus:0.04577269):0.01464595,Python_curtus:0.09770408):0.01604463):0.0451044,((Python_timoriensis:0.07054697,Python_reticulatus:0.06483624):0.07321957,((((Morelia_viridisS:0.04158552,Morelia_viridisN:0.03267045):0.0167798,Morelia_carinata:0.06104408):0.02706362,(Antaresia_maculosa:0.0674958,(Antaresia_perthensis:0.06282181,(Antaresia_stimsoni:0.02007935,Antaresia_childreni:0.01562126):0.0348 [...]
+tree 24 = (Candoia_aspera:0.416012,(((((Python_sebae:0.08152061,Python_molurus:0.04775603):0.02331853,Python_curtus:0.09187623):0.0268565,Python_regius:0.1253132):0.04677824,((Python_reticulatus:0.04722766,Python_timoriensis:0.06954232):0.05724026,((((Morelia_spilota:0.02565724,Morelia_bredli:0.02998293):0.03298402,((((Morelia_nauta:0.002384214,(Morelia_clastolepis:0.005820083,Morelia_kinghorni:0.004446333):0.001406536):0.008647775,Morelia_tracyae:0.05487271):0.01087026,Morelia_amethisti [...]
+tree 25 = ((Loxocemus_bicolor:0.2006045,Candoia_aspera:0.5955799):0.0444977,((Python_regius:0.1046038,((Python_sebae:0.08121141,Python_molurus:0.04485486):0.02542729,Python_curtus:0.09084656):0.0145274):0.05230496,((Python_timoriensis:0.07903435,Python_reticulatus:0.07460281):0.05455651,(((Bothrochilus_boa:0.05585555,Liasis_albertisii:0.05123584):0.03234854,(Morelia_boeleni:0.09132604,((Morelia_oenpelliensis:0.07046467,(Morelia_amethistina:0.0232165,(Morelia_tracyae:0.05046845,((Morelia_ [...]
+tree 26 = ((((((Python_sebae:0.09108678,Python_molurus:0.04955886):0.01999562,Python_curtus:0.1059653):0.01601986,Python_regius:0.1440779):0.02435115,((Python_reticulatus:0.06048541,Python_timoriensis:0.06863296):0.07562467,(((((Morelia_viridisN:0.03454344,Morelia_viridisS:0.05635091):0.02151505,Morelia_carinata:0.06198796):0.01203427,(Antaresia_maculosa:0.0845287,((Antaresia_childreni:0.03515405,Antaresia_stimsoni:0.007169672):0.04130242,Antaresia_perthensis:0.05833049):0.009794389):0.0 [...]
+tree 27 = (((((Python_timoriensis:0.05751875,Python_reticulatus:0.05970213):0.09181866,(((Antaresia_maculosa:0.06428122,((Antaresia_stimsoni:0.02158674,Antaresia_childreni:0.03257027):0.04245471,Antaresia_perthensis:0.06523106):0.01066557):0.008685391,((Morelia_viridisS:0.05196139,Morelia_viridisN:0.03425494):0.02249345,Morelia_carinata:0.07069562):0.01909782):0.01948604,((((Bothrochilus_boa:0.06369204,Liasis_albertisii:0.04518104):0.02424989,(((Liasis_mackloti:0.01199028,Liasis_fuscus:0 [...]
+tree 28 = (Candoia_aspera:0.4345551,(Loxocemus_bicolor:0.314093,((Python_regius:0.173824,(Python_curtus:0.1043669,(Python_sebae:0.07896053,Python_molurus:0.05467638):0.01587752):0.03999686):0.04129383,(((Morelia_boeleni:0.0936573,((((Liasis_olivaceus:0.03802104,(Liasis_fuscus:0.02550778,Liasis_mackloti:0.007911858):0.04045908):0.01101188,Apodora_papuana:0.07327953):0.01822209,(Antaresia_melanocephalus:0.03691812,Antaresia_ramsayi:0.02370955):0.0601409):0.007223612,(Liasis_albertisii:0.06 [...]
+tree 29 = (Candoia_aspera:0.4559287,((((Python_reticulatus:0.05517757,Python_timoriensis:0.06814217):0.08340554,((((((Morelia_tracyae:0.02748149,(Morelia_amethistina:0.03098765,((Morelia_nauta:0.006576406,Morelia_kinghorni:0.007688372):0.0007096533,Morelia_clastolepis:0.00977261):0.0210015):0.006038462):0.02583509,Morelia_oenpelliensis:0.0749369):0.0101301,(Morelia_spilota:0.02004658,Morelia_bredli:0.03451659):0.04024715):0.02746889,(Bothrochilus_boa:0.08344832,Liasis_albertisii:0.045593 [...]
+tree 30 = (((((((Morelia_bredli:0.02865577,Morelia_spilota:0.03152456):0.02984838,(Morelia_oenpelliensis:0.05957398,((((Morelia_clastolepis:0.009648714,Morelia_kinghorni:0.008176357):0.008419985,Morelia_nauta:0.002053939):0.008951861,Morelia_tracyae:0.03430245):0.007474801,Morelia_amethistina:0.02655718):0.02660193):0.005862491):0.02390784,((((Antaresia_melanocephalus:0.04143373,Antaresia_ramsayi:0.02445915):0.05473143,((Liasis_mackloti:0.01147677,Liasis_fuscus:0.02043269):0.06112321,(Li [...]
+tree 31 = (Candoia_aspera:0.3564553,(((((Python_sebae:0.07223247,Python_molurus:0.03344707):0.02744703,Python_curtus:0.100609):0.02068396,Python_regius:0.1083106):0.04848281,((Python_timoriensis:0.07007541,Python_reticulatus:0.06893867):0.08352695,((((Morelia_viridisS:0.05242792,Morelia_viridisN:0.03244932):0.03188631,Morelia_carinata:0.0666426):0.01467226,(Antaresia_maculosa:0.04897325,((Antaresia_stimsoni:0.01523345,Antaresia_childreni:0.02280061):0.03972103,Antaresia_perthensis:0.0715 [...]
+tree 32 = (((((((Morelia_boeleni:0.09460149,((Morelia_oenpelliensis:0.06591956,(((Morelia_clastolepis:0.008404051,(Morelia_kinghorni:0.006978938,Morelia_nauta:0.007460564):0.000440772):0.01431461,Morelia_amethistina:0.0208417):0.006610699,Morelia_tracyae:0.02966128):0.05849246):0.008923475,(Morelia_spilota:0.0247648,Morelia_bredli:0.03024164):0.02807078):0.03123918):0.003089671,(Bothrochilus_boa:0.04782091,Liasis_albertisii:0.05497745):0.0331477):0.008159145,(((Liasis_mackloti:0.01341363 [...]
+tree 33 = ((Loxocemus_bicolor:0.2669451,(((Python_reticulatus:0.05321987,Python_timoriensis:0.06997756):0.05861088,((((((Antaresia_childreni:0.01655703,Antaresia_stimsoni:0.01637029):0.03569376,Antaresia_perthensis:0.06286153):0.007241909,Antaresia_maculosa:0.06266587):0.008208767,((Morelia_viridisN:0.03704275,Morelia_viridisS:0.04520784):0.03192493,Morelia_carinata:0.051277):0.0100963):0.01478441,(((Antaresia_melanocephalus:0.04876354,Antaresia_ramsayi:0.02593541):0.04278765,(Apodora_pa [...]
+tree 34 = (Candoia_aspera:0.4569261,(((Python_regius:0.1411037,((Python_sebae:0.06459784,Python_molurus:0.03858578):0.01245523,Python_curtus:0.1156069):0.03368835):0.04372914,((Python_reticulatus:0.04306023,Python_timoriensis:0.06315612):0.07695555,(((((Antaresia_ramsayi:0.04512156,Antaresia_melanocephalus:0.03252252):0.04167882,(Apodora_papuana:0.06102348,(Liasis_olivaceus:0.03044151,(Liasis_fuscus:0.02654637,Liasis_mackloti:0.00437689):0.04196502):0.01919925):0.01657837):0.01029805,((A [...]
+tree 35 = ((((((Python_sebae:0.07121849,Python_molurus:0.02621536):0.01770809,Python_curtus:0.1089461):0.03107291,Python_regius:0.1164904):0.05656528,((Python_reticulatus:0.05387978,Python_timoriensis:0.0633591):0.05634522,(((Morelia_boeleni:0.08749596,((Morelia_spilota:0.02898978,Morelia_bredli:0.02387138):0.0409496,(Morelia_oenpelliensis:0.06187724,((Morelia_nauta:0.006906582,(Morelia_kinghorni:0.01526566,Morelia_clastolepis:0.01043315):0.002784817):0.009692299,(Morelia_amethistina:0.0 [...]
+tree 36 = ((Loxocemus_bicolor:0.3172354,(((Python_curtus:0.08136119,(Python_sebae:0.0735508,Python_molurus:0.03640422):0.01557105):0.0277907,Python_regius:0.09717536):0.04494788,((((Antaresia_maculosa:0.06047012,((Antaresia_stimsoni:0.01936481,Antaresia_childreni:0.0204231):0.02860615,Antaresia_perthensis:0.067609):0.005236841):0.01713776,((Morelia_viridisS:0.04331104,Morelia_viridisN:0.02973395):0.02836114,Morelia_carinata:0.05650005):0.01346905):0.01404818,(((Bothrochilus_boa:0.0460792 [...]
+tree 37 = ((Loxocemus_bicolor:0.2740998,Candoia_aspera:0.4155202):0.02116592,(((((Morelia_boeleni:0.06929356,((Morelia_oenpelliensis:0.07583182,(Morelia_amethistina:0.01459558,((Morelia_nauta:0.007322429,(Morelia_clastolepis:0.01181237,Morelia_kinghorni:0.005259852):0.003398728):0.03372015,Morelia_tracyae:0.03434483):0.02091431):0.03202138):0.005658255,(Morelia_bredli:0.02809767,Morelia_spilota:0.03355095):0.02263292):0.02159314):0.00291552,(((Antaresia_melanocephalus:0.05160809,Antaresi [...]
+tree 38 = (Candoia_aspera:0.4059339,(Loxocemus_bicolor:0.3025017,(((((Morelia_boeleni:0.07452276,((Morelia_bredli:0.02106962,Morelia_spilota:0.0320159):0.03108456,(((Morelia_amethistina:0.02608054,((Morelia_nauta:0.009185291,Morelia_kinghorni:0.006006182):0.004924326,Morelia_clastolepis:0.00406642):0.01622507):0.01576933,Morelia_tracyae:0.05538706):0.02692716,Morelia_oenpelliensis:0.05802539):0.005663774):0.02119685):0.005774344,(((Antaresia_melanocephalus:0.04396474,Antaresia_ramsayi:0. [...]
+tree 39 = (Candoia_aspera:0.3487431,(Loxocemus_bicolor:0.2715437,((Python_regius:0.108148,((Python_molurus:0.04219487,Python_sebae:0.06968664):0.02150992,Python_curtus:0.1232163):0.0239361):0.0522518,(((((Bothrochilus_boa:0.06552829,Liasis_albertisii:0.03917488):0.02331611,((((Liasis_fuscus:0.02518991,Liasis_mackloti:0.00890263):0.04813051,Liasis_olivaceus:0.0405597):0.01507422,Apodora_papuana:0.07516039):0.01507437,(Antaresia_ramsayi:0.04147509,Antaresia_melanocephalus:0.045958):0.06484 [...]
+tree 40 = (Candoia_aspera:0.5223368,(((Python_regius:0.1034435,((Python_molurus:0.04249723,Python_sebae:0.07799269):0.01292014,Python_curtus:0.09253175):0.02023361):0.03960567,(((((Morelia_spilota:0.02882786,Morelia_bredli:0.02270972):0.02443599,(((Morelia_amethistina:0.02571044,((Morelia_kinghorni:0.01171321,Morelia_nauta:0.00610836):0.00422032,Morelia_clastolepis:0.004021223):0.01317024):0.00131684,Morelia_tracyae:0.03160369):0.03319636,Morelia_oenpelliensis:0.06634761):0.005747809):0. [...]
+tree 41 = (Candoia_aspera:0.4850667,(Loxocemus_bicolor:0.2902032,((Python_regius:0.1196588,(Python_curtus:0.08757062,(Python_sebae:0.07518693,Python_molurus:0.03538358):0.0142014):0.01088135):0.030351,(((((Morelia_viridisS:0.05240148,Morelia_viridisN:0.04396064):0.01791946,Morelia_carinata:0.06703203):0.01547424,(Antaresia_maculosa:0.08728829,(Antaresia_perthensis:0.06958781,(Antaresia_stimsoni:0.01361088,Antaresia_childreni:0.02863435):0.04253736):0.02274367):0.01055237):0.02506572,(((M [...]
+tree 42 = (Candoia_aspera:0.4227249,(Loxocemus_bicolor:0.2630346,((Python_regius:0.1292282,((Python_sebae:0.07375409,Python_molurus:0.05145713):0.009915304,Python_curtus:0.104214):0.02208551):0.03973028,((Python_timoriensis:0.0722339,Python_reticulatus:0.07091186):0.05739378,((((Morelia_boeleni:0.09101835,((Morelia_bredli:0.0278041,Morelia_spilota:0.03169677):0.02124261,(Morelia_oenpelliensis:0.05296945,((Morelia_tracyae:0.05372661,((Morelia_clastolepis:0.008265544,Morelia_kinghorni:0.00 [...]
+tree 43 = (Candoia_aspera:0.5153805,((((Python_curtus:0.07863771,(Python_sebae:0.08222992,Python_molurus:0.04254121):0.01317515):0.02583793,Python_regius:0.137146):0.05263082,(((Morelia_boeleni:0.1016967,(((Bothrochilus_boa:0.04778673,Liasis_albertisii:0.05529477):0.04329956,(Antaresia_ramsayi:0.02994375,Antaresia_melanocephalus:0.0416666):0.05128635):0.001794929,(Apodora_papuana:0.06170972,(Liasis_olivaceus:0.03671799,(Liasis_mackloti:0.006090192,Liasis_fuscus:0.02565038):0.03958824):0. [...]
+tree 44 = ((Loxocemus_bicolor:0.2648974,((((((Morelia_spilota:0.01775619,Morelia_bredli:0.02572672):0.03433169,((Morelia_tracyae:0.03554912,(((Morelia_kinghorni:0.004910661,Morelia_nauta:0.01120176):0.01147165,Morelia_clastolepis:0.003855517):0.01543003,Morelia_amethistina:0.02183181):0.012722):0.02702229,Morelia_oenpelliensis:0.05908537):0.007540966):0.02306979,(((((Antaresia_childreni:0.02080121,Antaresia_stimsoni:0.01730966):0.02730232,Antaresia_perthensis:0.0794485):0.01868214,Antare [...]
+tree 45 = (((((Python_reticulatus:0.06082482,Python_timoriensis:0.05976824):0.07470233,(((((Antaresia_childreni:0.01703978,Antaresia_stimsoni:0.01702941):0.03308871,Antaresia_perthensis:0.08043923):0.01555501,Antaresia_maculosa:0.0884772):0.005314314,(Morelia_carinata:0.0651085,(Morelia_viridisN:0.04315993,Morelia_viridisS:0.05203646):0.02278244):0.01707531):0.02346517,(((Antaresia_melanocephalus:0.04749625,Antaresia_ramsayi:0.03669699):0.03379984,((Liasis_olivaceus:0.04033815,Apodora_pa [...]
+tree 46 = (Candoia_aspera:0.5322171,((((Python_reticulatus:0.0535918,Python_timoriensis:0.06535871):0.06768871,(((((Antaresia_melanocephalus:0.03916935,Antaresia_ramsayi:0.0373195):0.04072262,(((Liasis_fuscus:0.01736701,Liasis_mackloti:0.01169367):0.0425045,Liasis_olivaceus:0.03651515):0.008463145,Apodora_papuana:0.06127901):0.01813412):0.006101433,(Liasis_albertisii:0.05168434,Bothrochilus_boa:0.05431427):0.03115496):0.005398566,(Morelia_boeleni:0.06280028,(((Morelia_tracyae:0.02379919, [...]
+tree 47 = ((((((Morelia_boeleni:0.08532348,((Morelia_spilota:0.02557332,Morelia_bredli:0.02833054):0.03910548,(((Morelia_amethistina:0.0250698,((Morelia_nauta:0.008027407,Morelia_kinghorni:0.01428535):0.01094484,Morelia_clastolepis:0.00227683):0.01955651):0.005372779,Morelia_tracyae:0.03145023):0.03441583,Morelia_oenpelliensis:0.05781036):0.005803634):0.02195589):0.002986267,((Bothrochilus_boa:0.06447638,Liasis_albertisii:0.04012674):0.03942857,((Antaresia_ramsayi:0.04033451,Antaresia_me [...]
+tree 48 = (Candoia_aspera:0.4436023,(Loxocemus_bicolor:0.2369616,(((Python_curtus:0.09049056,(Python_molurus:0.029384,Python_sebae:0.0627384):0.01268501):0.01953619,Python_regius:0.1171373):0.04828605,((Python_timoriensis:0.06139592,Python_reticulatus:0.0646338):0.07305738,(((((Antaresia_stimsoni:0.02057256,Antaresia_childreni:0.02154838):0.03608791,Antaresia_perthensis:0.07564235):0.01173533,Antaresia_maculosa:0.07024714):0.01354829,(Morelia_carinata:0.0709676,(Morelia_viridisS:0.046626 [...]
+tree 49 = ((Loxocemus_bicolor:0.3405555,(((((((Morelia_spilota:0.02807685,Morelia_bredli:0.02566795):0.03087109,(Morelia_oenpelliensis:0.06798181,(Morelia_tracyae:0.02098472,(Morelia_amethistina:0.02311013,((Morelia_nauta:0.004146555,Morelia_kinghorni:0.009369746):0.00429045,Morelia_clastolepis:0.008566892):0.01380615):0.006610653):0.03471999):0.003897041):0.01810336,((Morelia_carinata:0.06383306,(Morelia_viridisN:0.03476118,Morelia_viridisS:0.05680829):0.02523946):0.009249984,(Antaresia [...]
+tree 50 = (Candoia_aspera:0.365292,(Loxocemus_bicolor:0.310124,((((Python_sebae:0.08393153,Python_molurus:0.04102906):0.02198402,Python_curtus:0.08610296):0.01361938,Python_regius:0.1521334):0.04792291,((Python_timoriensis:0.08534907,Python_reticulatus:0.06216048):0.07485164,(((((((Morelia_amethistina:0.03515294,(Morelia_clastolepis:0.004999444,(Morelia_nauta:0.009645939,Morelia_kinghorni:0.008671095):0.0001768814):0.02966952):0.01516104,Morelia_tracyae:0.04027297):0.01631001,Morelia_oen [...]
+tree 51 = (Candoia_aspera:0.5449368,((((Python_reticulatus:0.09099772,Python_timoriensis:0.07144241):0.05727305,(((Morelia_carinata:0.0577648,(Morelia_viridisN:0.03065865,Morelia_viridisS:0.04847735):0.02928353):0.008153621,(Antaresia_maculosa:0.05881195,(Antaresia_perthensis:0.06472734,(Antaresia_childreni:0.02932234,Antaresia_stimsoni:0.02067186):0.04013394):0.01273477):0.01701452):0.01169918,(((Antaresia_ramsayi:0.03842948,Antaresia_melanocephalus:0.03766409):0.0452689,((Liasis_olivac [...]
+tree 52 = (Candoia_aspera:0.4119319,(((Python_regius:0.1152994,((Python_sebae:0.07552402,Python_molurus:0.03490164):0.01108603,Python_curtus:0.1145467):0.01901207):0.0443746,((((Morelia_carinata:0.06566802,(Morelia_viridisN:0.03596026,Morelia_viridisS:0.0471616):0.01708947):0.01026524,(Antaresia_maculosa:0.06025642,(Antaresia_perthensis:0.05098101,(Antaresia_childreni:0.01934081,Antaresia_stimsoni:0.0135667):0.03058171):0.009517906):0.01267591):0.008263889,((((Morelia_spilota:0.02344491, [...]
+tree 53 = ((((Python_regius:0.1254009,(Python_curtus:0.08753147,(Python_molurus:0.04027961,Python_sebae:0.08576586):0.02140595):0.02317131):0.0390212,((((Morelia_boeleni:0.07962354,((Morelia_spilota:0.03086129,Morelia_bredli:0.03248024):0.03102577,((Morelia_tracyae:0.04475659,(Morelia_amethistina:0.03098438,((Morelia_kinghorni:0.008669885,Morelia_nauta:0.005987475):0.002591122,Morelia_clastolepis:0.002634122):0.01566736):0.01338949):0.02701571,Morelia_oenpelliensis:0.07358275):0.00887946 [...]
+tree 54 = (Candoia_aspera:0.4297135,((((((Antaresia_maculosa:0.07003845,(Antaresia_perthensis:0.06743232,(Antaresia_childreni:0.02884627,Antaresia_stimsoni:0.02139588):0.03896306):0.01121203):0.008262347,((Morelia_viridisS:0.046964,Morelia_viridisN:0.03312958):0.03465966,Morelia_carinata:0.06247932):0.01828836):0.01619593,((((Apodora_papuana:0.04879782,((Liasis_mackloti:0.01053472,Liasis_fuscus:0.02365744):0.04023937,Liasis_olivaceus:0.03599876):0.01405198):0.01631438,(Antaresia_melanoce [...]
+tree 55 = (Candoia_aspera:0.4032337,(((((Python_molurus:0.03143687,Python_sebae:0.09823878):0.02140932,Python_curtus:0.09608987):0.01506344,Python_regius:0.1271778):0.05095627,((((((Morelia_oenpelliensis:0.0740013,((Morelia_amethistina:0.02689424,(Morelia_nauta:0.005721368,(Morelia_clastolepis:0.00501394,Morelia_kinghorni:0.007579701):0.008862685):0.01332026):0.002459013,Morelia_tracyae:0.03682387):0.0356683):0.005829372,(Morelia_spilota:0.02072617,Morelia_bredli:0.0303542):0.02974856):0 [...]
+tree 56 = ((((Python_curtus:0.09610909,(Python_sebae:0.0763935,Python_molurus:0.04010346):0.01665276):0.01579041,Python_regius:0.1201013):0.02228269,((((((Antaresia_stimsoni:0.009933382,Antaresia_childreni:0.02504422):0.03898344,Antaresia_perthensis:0.07441505):0.01655102,Antaresia_maculosa:0.06341264):0.00423628,(Morelia_carinata:0.06782785,(Morelia_viridisS:0.0406782,Morelia_viridisN:0.03474428):0.02682416):0.01516531):0.02205259,((Morelia_boeleni:0.07700987,((((Morelia_amethistina:0.0 [...]
+tree 57 = (Candoia_aspera:0.4153505,(Loxocemus_bicolor:0.2791384,((Python_regius:0.1064145,(Python_curtus:0.1071383,(Python_sebae:0.07241633,Python_molurus:0.04887044):0.01224031):0.020618):0.04332308,((Python_reticulatus:0.06484751,Python_timoriensis:0.05405059):0.04751213,((((Bothrochilus_boa:0.05362835,Liasis_albertisii:0.04960141):0.0356816,((Antaresia_ramsayi:0.03268517,Antaresia_melanocephalus:0.03758129):0.03736965,((Liasis_olivaceus:0.04277857,Apodora_papuana:0.06016667):0.009949 [...]
+tree 58 = (Candoia_aspera:0.4873354,(Loxocemus_bicolor:0.2332783,(((((((((Liasis_mackloti:0.007309739,Liasis_fuscus:0.0279446):0.05690776,Liasis_olivaceus:0.03166848):0.01372647,Apodora_papuana:0.0585181):0.01686857,(Antaresia_melanocephalus:0.04486908,Antaresia_ramsayi:0.03001735):0.04537131):0.007726529,(Bothrochilus_boa:0.08559648,Liasis_albertisii:0.06387931):0.0357606):0.007852339,Morelia_boeleni:0.08361377):0.0009829988,(((Antaresia_maculosa:0.07797792,(Antaresia_perthensis:0.06673 [...]
+tree 59 = ((Candoia_aspera:0.4578877,(((Python_reticulatus:0.06169376,Python_timoriensis:0.08422366):0.05572204,(((Morelia_boeleni:0.0931297,((((Antaresia_childreni:0.02158899,Antaresia_stimsoni:0.01438034):0.03998282,Antaresia_perthensis:0.06970084):0.01535176,Antaresia_maculosa:0.07081544):0.01052213,(Morelia_carinata:0.0774876,(Morelia_viridisN:0.03236672,Morelia_viridisS:0.07599355):0.01839833):0.01302615):0.01439441):0.0008917252,((Liasis_albertisii:0.039823,Bothrochilus_boa:0.05183 [...]
+tree 60 = ((Loxocemus_bicolor:0.2564752,Candoia_aspera:0.430636):0.02776712,(((Python_reticulatus:0.05389938,Python_timoriensis:0.07081924):0.05042149,((Bothrochilus_boa:0.0657108,Liasis_albertisii:0.04306774):0.03297476,((((Morelia_spilota:0.02325149,Morelia_bredli:0.02818583):0.02944659,(Morelia_oenpelliensis:0.05882524,(Morelia_amethistina:0.02618559,(((Morelia_clastolepis:0.00525444,Morelia_kinghorni:0.004066344):0.003764506,Morelia_nauta:0.005986931):0.00816707,Morelia_tracyae:0.070 [...]
+tree 61 = ((Loxocemus_bicolor:0.2390967,Candoia_aspera:0.4592643):0.04768498,((((((((Apodora_papuana:0.06974535,Liasis_olivaceus:0.03615791):0.01678672,(Liasis_fuscus:0.01981257,Liasis_mackloti:0.007387913):0.03607835):0.02837702,(Antaresia_melanocephalus:0.0413593,Antaresia_ramsayi:0.02633209):0.05602189):0.01375102,((Antaresia_maculosa:0.06634786,((Antaresia_childreni:0.02528178,Antaresia_stimsoni:0.01857345):0.03243086,Antaresia_perthensis:0.06599134):0.007734964):0.00491988,(Morelia_ [...]
+tree 62 = ((Loxocemus_bicolor:0.3119484,((Python_regius:0.09866725,(Python_curtus:0.1245086,(Python_sebae:0.07169133,Python_molurus:0.03607564):0.01980528):0.02430393):0.02232077,((Python_reticulatus:0.0580492,Python_timoriensis:0.07219332):0.05736778,((((Bothrochilus_boa:0.06789342,Liasis_albertisii:0.0626714):0.0318012,((Antaresia_ramsayi:0.0316683,Antaresia_melanocephalus:0.04699017):0.05802338,(Apodora_papuana:0.06741561,(Liasis_olivaceus:0.03440653,(Liasis_fuscus:0.0262918,Liasis_ma [...]
+tree 63 = ((((Python_regius:0.1045362,(Python_curtus:0.0815732,(Python_sebae:0.0731453,Python_molurus:0.04121527):0.01532189):0.01994899):0.03798915,((Python_timoriensis:0.06294615,Python_reticulatus:0.05094862):0.0574045,((Morelia_boeleni:0.08758336,(((((Liasis_mackloti:0.004929908,Liasis_fuscus:0.02028416):0.0462742,Liasis_olivaceus:0.03706543):0.01458087,Apodora_papuana:0.05878766):0.01585728,(Antaresia_ramsayi:0.01785121,Antaresia_melanocephalus:0.05271079):0.04691403):0.005697472,(B [...]
+tree 64 = (Candoia_aspera:0.5268895,((((Python_reticulatus:0.05913797,Python_timoriensis:0.06249719):0.05855848,(((((Antaresia_perthensis:0.06857628,(Antaresia_childreni:0.02184403,Antaresia_stimsoni:0.01644374):0.02731027):0.01049891,Antaresia_maculosa:0.06825385):0.0212855,(Morelia_carinata:0.05797942,(Morelia_viridisN:0.04222686,Morelia_viridisS:0.03983184):0.02372562):0.007988327):0.01161671,(Morelia_boeleni:0.08816773,((Bothrochilus_boa:0.0735602,Liasis_albertisii:0.05135359):0.0357 [...]
+tree 65 = ((((((Python_molurus:0.05248859,Python_sebae:0.08445334):0.01912769,Python_curtus:0.1068431):0.01303875,Python_regius:0.1060619):0.04121669,((Python_reticulatus:0.06641269,Python_timoriensis:0.06510396):0.05472704,(((Apodora_papuana:0.06501856,((Liasis_fuscus:0.01938097,Liasis_mackloti:0.007993713):0.04233311,Liasis_olivaceus:0.04417499):0.007100968):0.01449947,(Antaresia_melanocephalus:0.03318174,Antaresia_ramsayi:0.02608021):0.04401241):0.02197335,(((((Morelia_spilota:0.02298 [...]
+tree 66 = ((((((Python_molurus:0.04811833,Python_sebae:0.07004114):0.02196186,Python_curtus:0.1096454):0.01956063,Python_regius:0.1123615):0.03691369,(((((Liasis_albertisii:0.05370764,Bothrochilus_boa:0.06265907):0.03561458,((Apodora_papuana:0.07272776,((Liasis_fuscus:0.01370828,Liasis_mackloti:0.02309465):0.05741218,Liasis_olivaceus:0.04167728):0.01163812):0.01665479,(Antaresia_melanocephalus:0.0253543,Antaresia_ramsayi:0.03190032):0.05173058):0.0008003378):0.005472764,Morelia_boeleni:0 [...]
+tree 67 = ((((((Python_sebae:0.06236479,Python_molurus:0.04213767):0.01777824,Python_curtus:0.1021113):0.01363883,Python_regius:0.1001127):0.01958544,((Python_timoriensis:0.04812869,Python_reticulatus:0.06212032):0.06951745,(((Morelia_boeleni:0.07786193,((Morelia_spilota:0.02928192,Morelia_bredli:0.01920753):0.02754254,((Morelia_amethistina:0.02519712,(Morelia_tracyae:0.03750275,((Morelia_kinghorni:0.0112295,Morelia_nauta:0.01662518):0.008346236,Morelia_clastolepis:0.005399784):0.0103013 [...]
+tree 68 = ((Loxocemus_bicolor:0.3421064,((Python_regius:0.1115622,(Python_curtus:0.09826584,(Python_sebae:0.07191922,Python_molurus:0.04064791):0.01578384):0.01438764):0.04523101,((Python_timoriensis:0.06358136,Python_reticulatus:0.05833856):0.06307472,((((Morelia_carinata:0.05734906,(Morelia_viridisS:0.04318783,Morelia_viridisN:0.04583721):0.01821118):0.01545327,((Antaresia_perthensis:0.06296287,(Antaresia_stimsoni:0.01706827,Antaresia_childreni:0.02104141):0.03055942):0.01182927,Antare [...]
+tree 69 = ((Candoia_aspera:0.4450836,Loxocemus_bicolor:0.3174019):0.02447175,((Python_regius:0.1424483,((Python_molurus:0.03302827,Python_sebae:0.07146083):0.01905547,Python_curtus:0.1159114):0.0381509):0.04241104,(((((((Antaresia_childreni:0.01890977,Antaresia_stimsoni:0.01517946):0.035416,Antaresia_perthensis:0.06857496):0.01161928,Antaresia_maculosa:0.07220173):0.007211953,(Morelia_carinata:0.0642391,(Morelia_viridisN:0.03726853,Morelia_viridisS:0.05549287):0.01029716):0.0189687):0.02 [...]
+tree 70 = ((Loxocemus_bicolor:0.2857882,(((Python_curtus:0.09804145,(Python_molurus:0.04031657,Python_sebae:0.07843305):0.02059991):0.02529989,Python_regius:0.117949):0.04936197,((((((Morelia_oenpelliensis:0.06557468,(Morelia_tracyae:0.04528187,((Morelia_clastolepis:0.004341288,(Morelia_kinghorni:0.01006468,Morelia_nauta:0.004045206):0.004556852):0.01467889,Morelia_amethistina:0.02757685):0.005371513):0.05121582):0.003292884,(Morelia_spilota:0.01948951,Morelia_bredli:0.03155399):0.032303 [...]
+tree 71 = (Candoia_aspera:0.4717339,(((((((Bothrochilus_boa:0.05883323,Liasis_albertisii:0.05456283):0.02271329,((Apodora_papuana:0.05998257,(Liasis_olivaceus:0.02913736,(Liasis_fuscus:0.01732647,Liasis_mackloti:0.01674711):0.04874046):0.008143937):0.01887131,(Antaresia_ramsayi:0.04183237,Antaresia_melanocephalus:0.04076158):0.04427686):0.009855444):0.01066004,(((Morelia_oenpelliensis:0.04305822,((Morelia_clastolepis:0.002836661,(Morelia_nauta:0.01515712,Morelia_kinghorni:0.004505663):0. [...]
+tree 72 = (Candoia_aspera:0.4905738,(Loxocemus_bicolor:0.2519591,(((Python_timoriensis:0.07404226,Python_reticulatus:0.05474481):0.06644049,(((Morelia_boeleni:0.09095004,((Morelia_bredli:0.02959755,Morelia_spilota:0.0353426):0.02959985,(((Morelia_amethistina:0.02093452,(Morelia_nauta:0.004317592,(Morelia_kinghorni:0.009871938,Morelia_clastolepis:0.0136011):0.006546919):0.03087369):0.01212434,Morelia_tracyae:0.03055861):0.05770397,Morelia_oenpelliensis:0.07079792):0.003571392):0.03535009) [...]
+tree 73 = (Candoia_aspera:0.4598036,(((((((Morelia_spilota:0.01982865,Morelia_bredli:0.02966733):0.03884227,((Morelia_tracyae:0.03055964,(((Morelia_kinghorni:0.004122602,Morelia_clastolepis:0.006418796):0.006158391,Morelia_nauta:0.00751089):0.02200455,Morelia_amethistina:0.02131889):0.01090798):0.01356813,Morelia_oenpelliensis:0.05887202):0.01445794):0.02558764,((((Antaresia_ramsayi:0.02479341,Antaresia_melanocephalus:0.04232038):0.04453974,(Apodora_papuana:0.05413614,((Liasis_fuscus:0.0 [...]
+tree 74 = (((Python_regius:0.122777,((Python_molurus:0.03861148,Python_sebae:0.08854008):0.01836058,Python_curtus:0.1087409):0.02868272):0.03219403,((((((Morelia_oenpelliensis:0.0601058,((Morelia_amethistina:0.0244574,Morelia_tracyae:0.0397486):0.0001275072,(Morelia_nauta:0.004788438,(Morelia_clastolepis:0.005838887,Morelia_kinghorni:0.01017217):0.001899022):0.01904309):0.04300782):0.00633599,(Morelia_spilota:0.02559737,Morelia_bredli:0.02940281):0.02435702):0.0246874,((Antaresia_maculos [...]
+tree 75 = (((Python_regius:0.1185489,(Python_curtus:0.08720485,(Python_molurus:0.0435094,Python_sebae:0.08370481):0.01269984):0.0270439):0.05088809,((((Liasis_albertisii:0.05173274,Bothrochilus_boa:0.05183286):0.05072088,(((Antaresia_melanocephalus:0.03656163,Antaresia_ramsayi:0.03155388):0.03835047,(((Liasis_fuscus:0.01454665,Liasis_mackloti:0.01162582):0.0407249,Liasis_olivaceus:0.03344422):0.01447517,Apodora_papuana:0.06149126):0.01772949):0.01052499,((((Antaresia_childreni:0.01902816 [...]
+tree 76 = ((Loxocemus_bicolor:0.289421,(((Python_curtus:0.1034425,(Python_molurus:0.03838318,Python_sebae:0.0657545):0.005487207):0.01794188,Python_regius:0.1158322):0.02708483,((((((Apodora_papuana:0.05140632,(Liasis_olivaceus:0.03483575,(Liasis_fuscus:0.0167882,Liasis_mackloti:0.01303193):0.0447589):0.007661923):0.0181732,(Antaresia_melanocephalus:0.03623783,Antaresia_ramsayi:0.02679624):0.04371616):0.001420114,(Liasis_albertisii:0.05907394,Bothrochilus_boa:0.06117998):0.04004438):0.00 [...]
+tree 77 = ((Loxocemus_bicolor:0.3190314,(((((Morelia_boeleni:0.07978362,((Morelia_spilota:0.02172516,Morelia_bredli:0.026232):0.03155658,((Morelia_amethistina:0.02728021,(Morelia_tracyae:0.05859969,((Morelia_clastolepis:0.004245739,Morelia_kinghorni:0.007346214):0.006021728,Morelia_nauta:0.01110223):0.0109052):0.00218281):0.02807543,Morelia_oenpelliensis:0.05684937):0.006062989):0.01907406):0.00915462,(Liasis_albertisii:0.03894934,Bothrochilus_boa:0.0623029):0.0322232):0.002144932,(((Mor [...]
+tree 78 = ((Loxocemus_bicolor:0.2229325,Candoia_aspera:0.4421281):0.03062222,(((((((Morelia_bredli:0.03399205,Morelia_spilota:0.02517605):0.02337728,(((Morelia_amethistina:0.01529103,(Morelia_clastolepis:0.006187776,(Morelia_kinghorni:0.01337353,Morelia_nauta:0.005734063):0.005374756):0.02460788):0.001540486,Morelia_tracyae:0.03329582):0.0333974,Morelia_oenpelliensis:0.07525567):0.00578434):0.02385279,Morelia_boeleni:0.08113851):0.006018764,(((Antaresia_melanocephalus:0.04012,Antaresia_r [...]
+tree 79 = ((Loxocemus_bicolor:0.2068851,Candoia_aspera:0.4157484):0.06394592,((Python_regius:0.1119306,((Python_molurus:0.02507726,Python_sebae:0.06768835):0.01273502,Python_curtus:0.09793023):0.02951751):0.03910106,(((((Morelia_spilota:0.02664926,Morelia_bredli:0.03702698):0.02468232,(Morelia_oenpelliensis:0.05764724,((Morelia_amethistina:0.02414779,(Morelia_clastolepis:0.005833709,(Morelia_nauta:0.007372257,Morelia_kinghorni:0.0086475):0.00620881):0.01279634):0.008251266,Morelia_tracya [...]
+tree 80 = (Candoia_aspera:0.391632,(Loxocemus_bicolor:0.2646089,(((Python_reticulatus:0.05737131,Python_timoriensis:0.06872314):0.05957824,(((((Antaresia_childreni:0.02040491,Antaresia_stimsoni:0.01350101):0.03175717,Antaresia_perthensis:0.06307919):0.01255095,Antaresia_maculosa:0.07096548):0.01294142,(Morelia_carinata:0.0554948,(Morelia_viridisN:0.03206945,Morelia_viridisS:0.04474466):0.02886815):0.008750372):0.01658624,(((Bothrochilus_boa:0.05867584,Liasis_albertisii:0.04518102):0.0527 [...]
+tree 81 = (Candoia_aspera:0.3679055,((((Python_curtus:0.09829191,(Python_molurus:0.03905549,Python_sebae:0.08446096):0.01455288):0.02320233,Python_regius:0.1102946):0.05339913,(((((Antaresia_perthensis:0.06928507,(Antaresia_childreni:0.03003637,Antaresia_stimsoni:0.01953789):0.03258011):0.0164155,Antaresia_maculosa:0.06494583):0.007981955,(Morelia_carinata:0.06003471,(Morelia_viridisN:0.02537246,Morelia_viridisS:0.04879795):0.01950855):0.01576949):0.0175171,((((Morelia_spilota:0.02909157 [...]
+tree 82 = ((Loxocemus_bicolor:0.2390659,Candoia_aspera:0.3844317):0.05418046,((((Python_molurus:0.04026471,Python_sebae:0.0644374):0.01042403,Python_curtus:0.1225831):0.02166972,Python_regius:0.09710642):0.03811794,(((((Antaresia_perthensis:0.06993962,(Antaresia_childreni:0.01769138,Antaresia_stimsoni:0.01418347):0.03850968):0.01233473,Antaresia_maculosa:0.06562401):0.002737597,(Morelia_carinata:0.05781147,(Morelia_viridisN:0.04477895,Morelia_viridisS:0.06053305):0.01641598):0.01962619): [...]
+tree 83 = ((Loxocemus_bicolor:0.2036892,Candoia_aspera:0.4028089):0.04007441,(((((Morelia_boeleni:0.06613699,((((((Morelia_kinghorni:0.008681592,Morelia_clastolepis:0.0096482):0.006886978,Morelia_nauta:0.004116863):0.007086856,Morelia_amethistina:0.03765005):0.007846691,Morelia_tracyae:0.05190823):0.02842286,Morelia_oenpelliensis:0.05083896):0.006002964,(Morelia_spilota:0.03349041,Morelia_bredli:0.03408026):0.02509088):0.01676837):0.01044434,(Bothrochilus_boa:0.05595351,Liasis_albertisii [...]
+tree 84 = ((((Python_curtus:0.1223827,(Python_sebae:0.06621626,Python_molurus:0.03832782):0.01086444):0.02337286,Python_regius:0.124018):0.02184441,((Python_timoriensis:0.08372909,Python_reticulatus:0.04903101):0.06903388,(Morelia_boeleni:0.0884944,((((Morelia_bredli:0.0312634,Morelia_spilota:0.02388251):0.02731682,((Morelia_amethistina:0.02294787,(((Morelia_nauta:0.007833751,Morelia_kinghorni:0.008007233):0.005648212,Morelia_clastolepis:0.009921374):0.01119791,Morelia_tracyae:0.04130311 [...]
+tree 85 = ((Loxocemus_bicolor:0.324078,(((Python_curtus:0.09449341,(Python_molurus:0.03660124,Python_sebae:0.08110901):0.02108984):0.01929663,Python_regius:0.1138374):0.07513523,((Python_timoriensis:0.05849644,Python_reticulatus:0.07499078):0.05284259,((((Antaresia_ramsayi:0.02524694,Antaresia_melanocephalus:0.04279777):0.05075451,(Apodora_papuana:0.05920609,(Liasis_olivaceus:0.03153405,(Liasis_mackloti:0.01327589,Liasis_fuscus:0.01920955):0.04526949):0.01172603):0.01473238):0.006227815, [...]
+tree 86 = (Candoia_aspera:0.4640829,(((((Python_molurus:0.03415465,Python_sebae:0.07069288):0.01969121,Python_curtus:0.08093054):0.006925213,Python_regius:0.1301421):0.02361257,(((((Morelia_boeleni:0.08796563,((Morelia_oenpelliensis:0.06088415,((((Morelia_kinghorni:0.007620532,Morelia_nauta:0.009314096):0.008601672,Morelia_clastolepis:0.002071879):0.01426838,Morelia_amethistina:0.03194737):0.00157039,Morelia_tracyae:0.04071359):0.03416296):0.003333199,(Morelia_spilota:0.03133059,Morelia_ [...]
+tree 87 = (((((Python_sebae:0.05806749,Python_molurus:0.04863067):0.01977878,Python_curtus:0.08771888):0.02114992,Python_regius:0.1133869):0.03790215,((((Morelia_boeleni:0.0781837,(((Apodora_papuana:0.05732655,(Liasis_olivaceus:0.04020414,(Liasis_mackloti:0.009338819,Liasis_fuscus:0.02041633):0.04082824):0.008338371):0.01673676,(Antaresia_melanocephalus:0.03542311,Antaresia_ramsayi:0.03377948):0.04833716):0.01207703,(Bothrochilus_boa:0.05472008,Liasis_albertisii:0.04752972):0.02980977):0 [...]
+tree 88 = (Candoia_aspera:0.5266748,(Loxocemus_bicolor:0.2863724,(((Python_curtus:0.1113482,(Python_molurus:0.0369985,Python_sebae:0.07977591):0.0133193):0.02088768,Python_regius:0.109057):0.03589019,((Python_timoriensis:0.06994488,Python_reticulatus:0.06733911):0.05117117,(((((Antaresia_perthensis:0.07476111,(Antaresia_stimsoni:0.0102694,Antaresia_childreni:0.02539743):0.04680466):0.007219799,Antaresia_maculosa:0.07683412):0.005258364,((Morelia_viridisS:0.03428402,Morelia_viridisN:0.046 [...]
+tree 89 = ((((Python_timoriensis:0.06707148,Python_reticulatus:0.06506125):0.04355094,(((Morelia_boeleni:0.08020637,((((Morelia_amethistina:0.02208421,((Morelia_clastolepis:0.006524679,Morelia_kinghorni:0.009968198):0.005432657,Morelia_nauta:0.004671117):0.0219751):0.005202818,Morelia_tracyae:0.04945317):0.0290873,Morelia_oenpelliensis:0.06477172):0.00946036,(Morelia_bredli:0.02027949,Morelia_spilota:0.02893624):0.02610896):0.01580249):0.01046464,((Bothrochilus_boa:0.06404848,Liasis_albe [...]
+tree 90 = ((Loxocemus_bicolor:0.2342189,((Python_regius:0.125989,((Python_molurus:0.03747382,Python_sebae:0.08511339):0.01629249,Python_curtus:0.08855054):0.01128598):0.03744629,((Python_timoriensis:0.06794617,Python_reticulatus:0.06178539):0.04416913,((((Bothrochilus_boa:0.05820688,Liasis_albertisii:0.05675853):0.02458397,(((Liasis_olivaceus:0.03059831,(Liasis_mackloti:0.008783671,Liasis_fuscus:0.01852068):0.04863702):0.01346526,Apodora_papuana:0.06713094):0.01489775,(Antaresia_ramsayi: [...]
+tree 91 = ((((Python_regius:0.1516846,((Python_sebae:0.07523123,Python_molurus:0.04721355):0.02365172,Python_curtus:0.09479072):0.03676268):0.04198977,((Python_reticulatus:0.07135687,Python_timoriensis:0.05190384):0.06425761,(((((Morelia_spilota:0.02512919,Morelia_bredli:0.02644278):0.03247482,(Morelia_oenpelliensis:0.0642112,((Morelia_tracyae:0.03612823,Morelia_amethistina:0.03026808):0.001308547,(Morelia_clastolepis:0.003069521,(Morelia_nauta:0.01104297,Morelia_kinghorni:0.008335905):0 [...]
+tree 92 = (Candoia_aspera:0.4547292,(Loxocemus_bicolor:0.3272833,(((((((((Morelia_amethistina:0.02060874,((Morelia_nauta:0.005561469,Morelia_kinghorni:0.008690786):0.005051559,Morelia_clastolepis:0.007840901):0.01022098):0.0133147,Morelia_tracyae:0.05010387):0.02268618,Morelia_oenpelliensis:0.06417942):0.005575328,(Morelia_spilota:0.02642943,Morelia_bredli:0.02261605):0.03579255):0.01411255,Morelia_boeleni:0.07721844):0.0007152459,((Liasis_albertisii:0.04806457,Bothrochilus_boa:0.0576084 [...]
+tree 93 = (Candoia_aspera:0.5254491,(((((Python_sebae:0.07638384,Python_molurus:0.03664319):0.01567718,Python_curtus:0.1210042):0.01515347,Python_regius:0.1274836):0.04894325,((((Antaresia_maculosa:0.05457504,(Antaresia_perthensis:0.0810693,(Antaresia_childreni:0.02907495,Antaresia_stimsoni:0.0118365):0.02838937):0.01649198):0.006525643,(Morelia_carinata:0.05356569,(Morelia_viridisS:0.04860563,Morelia_viridisN:0.0308708):0.02143981):0.01601209):0.01907801,((((((Liasis_mackloti:0.01208765 [...]
+tree 94 = (Candoia_aspera:0.4325058,(Loxocemus_bicolor:0.29547,((((Python_sebae:0.07775931,Python_molurus:0.03063228):0.01949916,Python_curtus:0.09782399):0.01065466,Python_regius:0.1565605):0.04138757,(((Morelia_boeleni:0.1021874,((Morelia_bredli:0.03767743,Morelia_spilota:0.03995434):0.02511893,(Morelia_oenpelliensis:0.06688421,((Morelia_tracyae:0.04224919,((Morelia_nauta:0.007632277,Morelia_kinghorni:0.007195393):0.0051102,Morelia_clastolepis:0.006891541):0.01861817):0.001031537,Morel [...]
+tree 95 = (Candoia_aspera:0.4667417,(Loxocemus_bicolor:0.2636684,((((Python_sebae:0.06724253,Python_molurus:0.04279827):0.0137619,Python_curtus:0.1224426):0.01620571,Python_regius:0.105882):0.05012959,((((((Morelia_bredli:0.02989658,Morelia_spilota:0.02336917):0.03265544,(((Morelia_amethistina:0.02684736,((Morelia_kinghorni:0.009531203,Morelia_nauta:0.005903659):0.003909769,Morelia_clastolepis:0.004228678):0.006278146):0.01098578,Morelia_tracyae:0.04320166):0.03115355,Morelia_oenpelliens [...]
+tree 96 = (((((((Liasis_albertisii:0.05597893,Bothrochilus_boa:0.06708494):0.02136291,(((Morelia_oenpelliensis:0.06692067,((((Morelia_nauta:0.006490195,Morelia_kinghorni:0.008862895):0.002589311,Morelia_clastolepis:0.006537181):0.0124676,Morelia_amethistina:0.02783844):0.007327408,Morelia_tracyae:0.04129567):0.04256243):0.00284383,(Morelia_spilota:0.02580417,Morelia_bredli:0.04117144):0.02719414):0.02677905,Morelia_boeleni:0.08508827):0.004503982):0.001474833,((((Apodora_papuana:0.061120 [...]
+tree 97 = ((Loxocemus_bicolor:0.2661032,(((((Morelia_boeleni:0.09289861,((Bothrochilus_boa:0.06138615,Liasis_albertisii:0.05013821):0.03171201,(((Liasis_olivaceus:0.02512497,(Liasis_fuscus:0.02569574,Liasis_mackloti:0.006837142):0.04839449):0.009461557,Apodora_papuana:0.07488417):0.02025706,(Antaresia_ramsayi:0.02520289,Antaresia_melanocephalus:0.04226063):0.04379259):0.004359125):0.003586254):0.002317637,((Morelia_spilota:0.02530919,Morelia_bredli:0.02428088):0.02863846,((Morelia_amethi [...]
+tree 98 = (Candoia_aspera:0.4338458,(Loxocemus_bicolor:0.2634376,(((Python_curtus:0.1105749,(Python_sebae:0.09556636,Python_molurus:0.03273635):0.01150569):0.008801576,Python_regius:0.1625759):0.03722187,(((((Morelia_viridisN:0.04247342,Morelia_viridisS:0.03651036):0.03260268,Morelia_carinata:0.07507731):0.00841834,(Antaresia_maculosa:0.0826,(Antaresia_perthensis:0.08604039,(Antaresia_childreni:0.03150816,Antaresia_stimsoni:0.01530298):0.03460219):0.01527724):0.01680449):0.01551863,(((An [...]
+tree 99 = ((((((Morelia_boeleni:0.07341909,((Morelia_spilota:0.02253496,Morelia_bredli:0.02604728):0.03621784,(Morelia_oenpelliensis:0.07207704,(Morelia_tracyae:0.04248111,(((Morelia_kinghorni:0.007760683,Morelia_nauta:0.0199143):0.002944562,Morelia_clastolepis:0.003695029):0.0118863,Morelia_amethistina:0.02630623):0.01516716):0.02096247):0.00987943):0.0225472):0.006581086,(((Apodora_papuana:0.05859656,(Liasis_olivaceus:0.03719666,(Liasis_fuscus:0.01950437,Liasis_mackloti:0.007994642):0. [...]
+tree 100 = (Candoia_aspera:0.4341243,(Loxocemus_bicolor:0.2408998,(((Python_curtus:0.09611847,(Python_sebae:0.1025681,Python_molurus:0.02687551):0.03656667):0.02051083,Python_regius:0.1336665):0.05131532,((Python_timoriensis:0.06018468,Python_reticulatus:0.06436806):0.06278732,(((((Morelia_oenpelliensis:0.06656884,(Morelia_tracyae:0.02729484,(((Morelia_nauta:0.009314223,Morelia_kinghorni:0.01130303):0.003158153,Morelia_clastolepis:0.008433545):0.0182326,Morelia_amethistina:0.02933323):0. [...]
+tree 101 = (Candoia_aspera:0.4509276,((((((Morelia_boeleni:0.07145028,((Morelia_bredli:0.02447092,Morelia_spilota:0.02234732):0.03985019,(Morelia_oenpelliensis:0.06013741,((((Morelia_kinghorni:0.006472318,Morelia_nauta:0.003261386):0.003279182,Morelia_clastolepis:0.003404097):0.01056659,Morelia_tracyae:0.05587622):0.006968234,Morelia_amethistina:0.02026666):0.03641767):0.005075176):0.03110211):0.01046361,((Bothrochilus_boa:0.06200234,Liasis_albertisii:0.05664673):0.04321532,((Apodora_pap [...]
+End;
+
+[Total of 101 trees sourced from:]
+[ ../mb/pythonidae.mb.run1.t: 1001 trees in file, sampling 1 tree of every 10 trees after 0 tree burn-in: 101 trees added (current total = 101 trees) ]
diff --git a/doc/source/examples/pythonidae.mb.run2.t b/doc/source/examples/pythonidae.mb.run2.t
new file mode 100644
index 0000000..2ac1ef6
--- /dev/null
+++ b/doc/source/examples/pythonidae.mb.run2.t
@@ -0,0 +1,108 @@
+#NEXUS
+
+Begin Trees;
+tree 1 = (((Morelia_tracyae:0.1,Python_curtus:0.1):0.1,Morelia_amethistina:0.1):0.1,(Morelia_carinata:0.1,((Morelia_nauta:0.1,((Python_timoriensis:0.1,((Python_molurus:0.1,Antaresia_melanocephalus:0.1):0.1,(Bothrochilus_boa:0.1,Morelia_viridisS:0.1):0.1):0.1):0.1,Loxocemus_bicolor:0.1):0.1):0.1,((Antaresia_ramsayi:0.1,Apodora_papuana:0.1):0.1,(Antaresia_maculosa:0.1,((Antaresia_stimsoni:0.1,Morelia_boeleni:0.1):0.1,(Morelia_clastolepis:0.1,(Antaresia_perthensis:0.1,((((Python_reticulatus [...]
+tree 2 = (Candoia_aspera:0.3855131,(Loxocemus_bicolor:0.2822228,((Python_regius:0.1164919,(Python_curtus:0.1085751,(Python_molurus:0.04505304,Python_sebae:0.08202312):0.01461068):0.02244988):0.03508221,((((Morelia_boeleni:0.08079974,((Morelia_oenpelliensis:0.06094981,(((Morelia_clastolepis:0.003390157,(Morelia_kinghorni:0.008526177,Morelia_nauta:0.01249619):0.005565495):0.02693315,Morelia_amethistina:0.02747574):0.006666887,Morelia_tracyae:0.04692551):0.03276842):0.009764295,(Morelia_bre [...]
+tree 3 = ((Loxocemus_bicolor:0.2995824,((((Python_molurus:0.03562488,Python_sebae:0.06860982):0.02211028,Python_curtus:0.1114662):0.00808162,Python_regius:0.1227432):0.03417956,((((((Morelia_oenpelliensis:0.05441414,(Morelia_tracyae:0.04302678,(Morelia_amethistina:0.02380949,((Morelia_kinghorni:0.006663258,Morelia_nauta:0.008536853):0.00593769,Morelia_clastolepis:0.005436829):0.0149349):0.008467315):0.02762504):0.005437219,(Morelia_bredli:0.03031582,Morelia_spilota:0.02248076):0.03835474 [...]
+tree 4 = (Candoia_aspera:0.4229772,(((((((Antaresia_maculosa:0.08151846,((Antaresia_childreni:0.02408682,Antaresia_stimsoni:0.01523166):0.03935342,Antaresia_perthensis:0.05275257):0.01394556):0.00756796,((Morelia_viridisS:0.04714978,Morelia_viridisN:0.04197429):0.0189726,Morelia_carinata:0.0604007):0.01392866):0.0193024,((Antaresia_ramsayi:0.03355351,Antaresia_melanocephalus:0.05827502):0.05378413,((Liasis_fuscus:0.02072153,Liasis_mackloti:0.006583407):0.04324247,(Liasis_olivaceus:0.0390 [...]
+tree 5 = ((Loxocemus_bicolor:0.277608,(((Python_curtus:0.07919973,(Python_sebae:0.08860816,Python_molurus:0.04352531):0.01387568):0.01960406,Python_regius:0.1222966):0.04252735,((((Morelia_boeleni:0.08085868,(((Apodora_papuana:0.05624252,(Liasis_olivaceus:0.03488931,(Liasis_mackloti:0.008039568,Liasis_fuscus:0.02512498):0.05449861):0.005017927):0.008966744,(Antaresia_ramsayi:0.03119344,Antaresia_melanocephalus:0.04868954):0.04777877):0.01303985,((((Antaresia_childreni:0.02340874,Antaresi [...]
+tree 6 = (Candoia_aspera:0.4528169,((((Python_timoriensis:0.07279624,Python_reticulatus:0.0485015):0.05671684,((((Morelia_bredli:0.0286111,Morelia_spilota:0.01893465):0.03310185,(Morelia_oenpelliensis:0.05903652,((Morelia_tracyae:0.05791632,Morelia_amethistina:0.01176247):0.008955382,(Morelia_nauta:0.00115051,(Morelia_kinghorni:0.009987536,Morelia_clastolepis:0.008054818):0.004003021):0.008157886):0.03900359):0.003814613):0.03100684,((((Antaresia_childreni:0.01984453,Antaresia_stimsoni:0 [...]
+tree 7 = ((Loxocemus_bicolor:0.277552,((((Bothrochilus_boa:0.07198469,Liasis_albertisii:0.03908044):0.03981177,((Morelia_boeleni:0.08086593,((Morelia_oenpelliensis:0.0594705,((((Morelia_nauta:0.01279521,Morelia_kinghorni:0.005758899):0.003873558,Morelia_clastolepis:0.003920603):0.01539564,Morelia_tracyae:0.0437989):0.01199567,Morelia_amethistina:0.02841317):0.03937981):0.00570032,(Morelia_bredli:0.02831014,Morelia_spilota:0.03793336):0.03818073):0.02209914):0.0128329,(((Antaresia_ramsayi [...]
+tree 8 = ((Loxocemus_bicolor:0.2684448,(((Python_timoriensis:0.07653466,Python_reticulatus:0.05814225):0.06185966,(((((Morelia_spilota:0.03155331,Morelia_bredli:0.02986011):0.03092881,(Morelia_oenpelliensis:0.05994963,(((Morelia_clastolepis:0.001045091,(Morelia_kinghorni:0.007617839,Morelia_nauta:0.009509182):0.006632748):0.01687425,Morelia_amethistina:0.02878696):0.004091376,Morelia_tracyae:0.05355693):0.03362626):0.002952035):0.02542518,Morelia_boeleni:0.08002744):0.007212532,(((Antare [...]
+tree 9 = ((Loxocemus_bicolor:0.2458011,Candoia_aspera:0.4227715):0.0271417,((Python_regius:0.1114543,((Python_molurus:0.03806044,Python_sebae:0.0830271):0.02125727,Python_curtus:0.1194816):0.01721329):0.02776197,((Morelia_boeleni:0.09656272,(((Bothrochilus_boa:0.05184842,Liasis_albertisii:0.05287073):0.02784318,(((Liasis_olivaceus:0.03938482,Apodora_papuana:0.05816622):0.009893014,(Liasis_mackloti:0.01100998,Liasis_fuscus:0.02213265):0.04001528):0.01988333,(Antaresia_ramsayi:0.03540086,A [...]
+tree 10 = ((Loxocemus_bicolor:0.2787033,((Python_regius:0.1481234,(Python_curtus:0.1031887,(Python_sebae:0.09595909,Python_molurus:0.03791429):0.01213063):0.03071488):0.03967552,((Python_timoriensis:0.07096605,Python_reticulatus:0.07573886):0.06926865,((((Morelia_viridisN:0.03663133,Morelia_viridisS:0.05317344):0.0291517,Morelia_carinata:0.06730154):0.02239977,(Antaresia_maculosa:0.07531073,(Antaresia_perthensis:0.07822047,(Antaresia_childreni:0.02278018,Antaresia_stimsoni:0.02043254):0. [...]
+tree 11 = ((((((Morelia_boeleni:0.105931,((Bothrochilus_boa:0.05131693,Liasis_albertisii:0.04545252):0.03797298,((Antaresia_ramsayi:0.02228446,Antaresia_melanocephalus:0.03885661):0.05087393,((Liasis_olivaceus:0.0281452,(Liasis_fuscus:0.02249635,Liasis_mackloti:0.01251578):0.04465905):0.006607861,Apodora_papuana:0.05587392):0.008114536):0.01409248):0.002334905):0.005272702,((((Morelia_viridisS:0.03675911,Morelia_viridisN:0.02198791):0.0215533,Morelia_carinata:0.06453783):0.005055946,(Ant [...]
+tree 12 = ((Loxocemus_bicolor:0.2458238,((Python_regius:0.1106511,(Python_curtus:0.1001811,(Python_molurus:0.04448544,Python_sebae:0.07099682):0.008173695):0.02536996):0.04260848,((Python_timoriensis:0.06684263,Python_reticulatus:0.03900576):0.06959,(((Morelia_carinata:0.05905979,(Morelia_viridisS:0.04271627,Morelia_viridisN:0.03918736):0.04180082):0.00903691,((Antaresia_perthensis:0.06119587,(Antaresia_stimsoni:0.01571635,Antaresia_childreni:0.02290186):0.04092202):0.003749944,Antaresia [...]
+tree 13 = ((((Python_regius:0.125521,(Python_curtus:0.1073245,(Python_molurus:0.03657539,Python_sebae:0.06864878):0.03002534):0.01433173):0.04825549,((((Morelia_carinata:0.05949514,(Morelia_viridisS:0.04806101,Morelia_viridisN:0.02823277):0.04405801):0.01385116,(((Antaresia_stimsoni:0.01413133,Antaresia_childreni:0.02703797):0.02343367,Antaresia_perthensis:0.07249178):0.008990339,Antaresia_maculosa:0.06567229):0.01366012):0.01659183,(((Bothrochilus_boa:0.05828401,Liasis_albertisii:0.0525 [...]
+tree 14 = ((Loxocemus_bicolor:0.2311109,((((((Morelia_oenpelliensis:0.07476196,(Morelia_tracyae:0.04489038,(((Morelia_kinghorni:0.008629219,Morelia_nauta:0.01117212):0.001642294,Morelia_clastolepis:0.005837899):0.005027789,Morelia_amethistina:0.02727465):0.007558527):0.03979655):0.004991851,(Morelia_spilota:0.03343332,Morelia_bredli:0.03267572):0.03095787):0.03054975,((Bothrochilus_boa:0.07451251,Liasis_albertisii:0.05084842):0.02736059,Morelia_boeleni:0.08948575):0.003187523):0.00388060 [...]
+tree 15 = ((Loxocemus_bicolor:0.2896212,((Python_regius:0.09775786,(Python_curtus:0.09624462,(Python_sebae:0.07141944,Python_molurus:0.03844634):0.01642442):0.01540055):0.03715156,((Python_timoriensis:0.07277793,Python_reticulatus:0.0524129):0.08165967,(Morelia_boeleni:0.08817638,((((Morelia_carinata:0.06965416,(Morelia_viridisN:0.04107285,Morelia_viridisS:0.05026227):0.02040291):0.01957017,(((Antaresia_childreni:0.02846544,Antaresia_stimsoni:0.01113197):0.05445025,Antaresia_perthensis:0 [...]
+tree 16 = ((Loxocemus_bicolor:0.2332114,((((Python_sebae:0.08310605,Python_molurus:0.03309147):0.02673089,Python_curtus:0.09236202):0.01485251,Python_regius:0.09270653):0.04843605,((Python_timoriensis:0.06307732,Python_reticulatus:0.05258899):0.06886643,((((Morelia_viridisS:0.0484855,Morelia_viridisN:0.03151694):0.02682321,Morelia_carinata:0.06154282):0.01581032,(Antaresia_maculosa:0.07283047,(Antaresia_perthensis:0.07071279,(Antaresia_childreni:0.01633722,Antaresia_stimsoni:0.01847321): [...]
+tree 17 = (Candoia_aspera:0.4823539,(Loxocemus_bicolor:0.2773091,((Python_regius:0.1146737,(Python_curtus:0.1202873,(Python_sebae:0.07350455,Python_molurus:0.04267794):0.01202121):0.02153016):0.04880882,((Python_timoriensis:0.06966236,Python_reticulatus:0.06563854):0.04079652,(((Antaresia_maculosa:0.06757648,((Antaresia_childreni:0.01839516,Antaresia_stimsoni:0.01265365):0.04068759,Antaresia_perthensis:0.07210109):0.007794484):0.009137391,(Morelia_carinata:0.08291748,(Morelia_viridisN:0. [...]
+tree 18 = ((((Python_timoriensis:0.07692143,Python_reticulatus:0.06844929):0.06878282,(((Morelia_carinata:0.06970537,(Morelia_viridisS:0.04785833,Morelia_viridisN:0.03674054):0.02311062):0.01471066,(((Antaresia_childreni:0.02932162,Antaresia_stimsoni:0.0193905):0.03769673,Antaresia_perthensis:0.07640949):0.01295086,Antaresia_maculosa:0.07700848):0.009436204):0.01981386,((((((Morelia_amethistina:0.03582876,((Morelia_nauta:0.00598038,Morelia_kinghorni:0.009249085):0.006765277,Morelia_clast [...]
+tree 19 = (((((Python_timoriensis:0.07824341,Python_reticulatus:0.06773204):0.05436504,((((((Liasis_olivaceus:0.04213837,Apodora_papuana:0.06076422):0.006128772,(Liasis_fuscus:0.01053002,Liasis_mackloti:0.01818946):0.04563388):0.02132318,(Antaresia_ramsayi:0.03897168,Antaresia_melanocephalus:0.03769482):0.04549028):0.01191707,((Morelia_carinata:0.07412641,(Morelia_viridisS:0.06176765,Morelia_viridisN:0.02758298):0.02351254):0.0115188,(((Antaresia_childreni:0.02294752,Antaresia_stimsoni:0 [...]
+tree 20 = ((Loxocemus_bicolor:0.3315088,(((Python_timoriensis:0.08265575,Python_reticulatus:0.04842836):0.06637015,((((Bothrochilus_boa:0.06045392,Liasis_albertisii:0.05932411):0.03647307,Morelia_boeleni:0.09952603):0.002094248,(((((Antaresia_childreni:0.02364124,Antaresia_stimsoni:0.01538722):0.03483375,Antaresia_perthensis:0.07842851):0.01029187,Antaresia_maculosa:0.05794009):0.01418545,(Morelia_carinata:0.06118829,(Morelia_viridisS:0.05108068,Morelia_viridisN:0.03327827):0.02953659):0 [...]
+tree 21 = (Candoia_aspera:0.3879355,(Loxocemus_bicolor:0.2635277,(((Python_timoriensis:0.07488745,Python_reticulatus:0.07165347):0.06286752,(((Bothrochilus_boa:0.05066114,Liasis_albertisii:0.04631989):0.03495751,((Apodora_papuana:0.06336035,((Liasis_fuscus:0.02679166,Liasis_mackloti:0.01508707):0.04512154,Liasis_olivaceus:0.03170831):0.02130021):0.01225882,(Antaresia_ramsayi:0.02715579,Antaresia_melanocephalus:0.03692815):0.05242907):0.006242959):0.001901093,((((Morelia_bredli:0.02877114 [...]
+tree 22 = ((Loxocemus_bicolor:0.3484046,((Python_regius:0.1299487,(Python_curtus:0.08751524,(Python_sebae:0.07268371,Python_molurus:0.04254277):0.02008602):0.01126513):0.04984823,(((((Liasis_albertisii:0.04502909,Bothrochilus_boa:0.05899998):0.0354086,((Apodora_papuana:0.05684881,((Liasis_mackloti:0.01544956,Liasis_fuscus:0.02130693):0.04872514,Liasis_olivaceus:0.03650952):0.005483124):0.01448147,(Antaresia_ramsayi:0.02598716,Antaresia_melanocephalus:0.03041967):0.0519068):0.004927418):0 [...]
+tree 23 = ((Loxocemus_bicolor:0.3065548,(((((((Antaresia_ramsayi:0.03385554,Antaresia_melanocephalus:0.03199838):0.0479804,((Liasis_olivaceus:0.03510994,(Liasis_mackloti:0.005883193,Liasis_fuscus:0.02171762):0.05961237):0.01021559,Apodora_papuana:0.0559756):0.01603474):0.01175948,(Bothrochilus_boa:0.06437692,Liasis_albertisii:0.03322093):0.04863423):0.003630452,((Antaresia_maculosa:0.06591439,((Antaresia_stimsoni:0.02082665,Antaresia_childreni:0.02123506):0.05057495,Antaresia_perthensis: [...]
+tree 24 = (Candoia_aspera:0.4485051,(Loxocemus_bicolor:0.2753823,(((Morelia_boeleni:0.07826681,((((Antaresia_ramsayi:0.02174183,Antaresia_melanocephalus:0.0487837):0.05696255,((Liasis_olivaceus:0.03226077,Apodora_papuana:0.05015353):0.002679578,(Liasis_fuscus:0.02321992,Liasis_mackloti:0.01623561):0.05603043):0.01825385):0.01154037,(Bothrochilus_boa:0.0626663,Liasis_albertisii:0.04735238):0.03097784):0.007929447,((((Morelia_viridisS:0.04523728,Morelia_viridisN:0.02992506):0.02415625,More [...]
+tree 25 = (Candoia_aspera:0.4007296,(Loxocemus_bicolor:0.3023659,((((Liasis_albertisii:0.04484209,Bothrochilus_boa:0.05694912):0.03509212,((((((Antaresia_childreni:0.02381686,Antaresia_stimsoni:0.01965734):0.04160441,Antaresia_perthensis:0.06501918):0.01110509,Antaresia_maculosa:0.06540036):0.01027523,((Morelia_viridisN:0.02966337,Morelia_viridisS:0.04604234):0.01806969,Morelia_carinata:0.06961854):0.008454373):0.01388917,((Antaresia_ramsayi:0.03352314,Antaresia_melanocephalus:0.04315351 [...]
+tree 26 = (((((Python_timoriensis:0.06449001,Python_reticulatus:0.05702399):0.0703105,(((((Antaresia_childreni:0.02325914,Antaresia_stimsoni:0.01170238):0.04134304,Antaresia_perthensis:0.05331392):0.01818774,Antaresia_maculosa:0.09053385):0.0108276,((Morelia_viridisN:0.03599152,Morelia_viridisS:0.04161514):0.02303153,Morelia_carinata:0.07374027):0.00817112):0.02066169,((((Morelia_oenpelliensis:0.05305869,(Morelia_amethistina:0.03233761,(((Morelia_kinghorni:0.005085139,Morelia_clastolepis [...]
+tree 27 = (Candoia_aspera:0.5072671,(Loxocemus_bicolor:0.2771276,(((Python_timoriensis:0.06508521,Python_reticulatus:0.06118689):0.0753944,(Morelia_boeleni:0.06473072,(((Morelia_oenpelliensis:0.06621928,(((Morelia_clastolepis:0.001220996,(Morelia_nauta:0.005497285,Morelia_kinghorni:0.005154365):0.0134602):0.01821363,Morelia_tracyae:0.04629116):0.01280376,Morelia_amethistina:0.01395589):0.05991748):0.002735354,(Morelia_bredli:0.03028891,Morelia_spilota:0.03007576):0.02473189):0.02110337,( [...]
+tree 28 = ((Loxocemus_bicolor:0.2255088,(((Python_timoriensis:0.07514369,Python_reticulatus:0.0635069):0.05347764,(((Bothrochilus_boa:0.06150959,Liasis_albertisii:0.06228012):0.02139156,((Antaresia_ramsayi:0.03413077,Antaresia_melanocephalus:0.0425458):0.05410898,((Liasis_mackloti:0.01188359,Liasis_fuscus:0.02223782):0.0468034,(Liasis_olivaceus:0.03498516,Apodora_papuana:0.06292781):0.01013783):0.01420626):0.008876288):0.003198402,((((Morelia_spilota:0.02490744,Morelia_bredli:0.02845289) [...]
+tree 29 = (Candoia_aspera:0.4094712,(Loxocemus_bicolor:0.2688637,(((Python_timoriensis:0.06294914,Python_reticulatus:0.04985823):0.06440863,(((((Liasis_mackloti:0.01262452,Liasis_fuscus:0.0218546):0.04041808,(Apodora_papuana:0.07252098,Liasis_olivaceus:0.03194157):0.01107863):0.02129059,(Antaresia_ramsayi:0.03048903,Antaresia_melanocephalus:0.04901979):0.03837136):0.007696275,((Morelia_boeleni:0.06125945,((Morelia_oenpelliensis:0.06644476,((Morelia_tracyae:0.03592499,((Morelia_kinghorni: [...]
+tree 30 = ((((Python_regius:0.1118698,(Python_curtus:0.1179117,(Python_molurus:0.0405954,Python_sebae:0.08683961):0.01242979):0.02079802):0.03544352,((Python_timoriensis:0.07695059,Python_reticulatus:0.05132446):0.06203875,((Morelia_boeleni:0.07927639,((Bothrochilus_boa:0.06279745,Liasis_albertisii:0.0399234):0.03098695,(((Morelia_tracyae:0.04101332,(Morelia_amethistina:0.02455261,(Morelia_clastolepis:0.008176434,(Morelia_kinghorni:0.007268036,Morelia_nauta:0.006141666):0.005785571):0.01 [...]
+tree 31 = (Candoia_aspera:0.4832148,(Loxocemus_bicolor:0.3404342,((Python_regius:0.1242429,((Python_sebae:0.07265737,Python_molurus:0.040688):0.01471806,Python_curtus:0.1033765):0.03181258):0.0267147,((Python_timoriensis:0.07822391,Python_reticulatus:0.06789191):0.04945783,(((Antaresia_maculosa:0.07109217,(Antaresia_perthensis:0.06324736,(Antaresia_childreni:0.02405573,Antaresia_stimsoni:0.01254822):0.03062618):0.008852923):0.0138846,((Morelia_viridisN:0.05039706,Morelia_viridisS:0.05209 [...]
+tree 32 = ((((Python_timoriensis:0.07113778,Python_reticulatus:0.05017493):0.06319461,(Morelia_boeleni:0.09190941,((((Antaresia_ramsayi:0.01813054,Antaresia_melanocephalus:0.04870207):0.05265934,((Liasis_olivaceus:0.03281665,Apodora_papuana:0.05671593):0.01603054,(Liasis_fuscus:0.01277387,Liasis_mackloti:0.01399295):0.05404157):0.02837849):0.008398836,(((Antaresia_perthensis:0.07601852,(Antaresia_childreni:0.01693324,Antaresia_stimsoni:0.02617617):0.04277307):0.006759305,Antaresia_maculo [...]
+tree 33 = (Candoia_aspera:0.4186248,(Loxocemus_bicolor:0.3108572,(((Python_timoriensis:0.08343283,Python_reticulatus:0.05111982):0.06141429,(((((Antaresia_childreni:0.02790229,Antaresia_stimsoni:0.01830717):0.03279611,Antaresia_perthensis:0.07987803):0.01287194,Antaresia_maculosa:0.07316171):0.01139226,((Morelia_viridisS:0.05397353,Morelia_viridisN:0.03585342):0.02003358,Morelia_carinata:0.07596764):0.01745623):0.01212332,((Morelia_boeleni:0.08229089,((Bothrochilus_boa:0.06919409,Liasis_ [...]
+tree 34 = ((Loxocemus_bicolor:0.3168767,((((((Bothrochilus_boa:0.05350748,Liasis_albertisii:0.06348142):0.03421965,((Antaresia_ramsayi:0.03163343,Antaresia_melanocephalus:0.03221392):0.04818877,(Apodora_papuana:0.07619766,((Liasis_mackloti:0.0113392,Liasis_fuscus:0.03694665):0.05096152,Liasis_olivaceus:0.02886953):0.01151106):0.02490245):0.007102472):0.00291576,(((Morelia_viridisS:0.04798357,Morelia_viridisN:0.03581978):0.03136736,Morelia_carinata:0.06647472):0.01053297,(Antaresia_maculo [...]
+tree 35 = ((((Python_timoriensis:0.08489141,Python_reticulatus:0.04570306):0.06504991,(((((Morelia_bredli:0.02840635,Morelia_spilota:0.02926897):0.02782231,(Morelia_oenpelliensis:0.08431588,(Morelia_amethistina:0.01708426,((Morelia_nauta:0.003973568,(Morelia_clastolepis:0.01300467,Morelia_kinghorni:0.007130242):0.003691815):0.02251268,Morelia_tracyae:0.06377869):0.0124496):0.03801378):0.004192503):0.02315028,Morelia_boeleni:0.09657489):0.0009557305,(((Antaresia_ramsayi:0.02655588,Antares [...]
+tree 36 = ((((Python_regius:0.1228491,((Python_molurus:0.04996319,Python_sebae:0.07473049):0.02363043,Python_curtus:0.1170907):0.02227195):0.05125574,((Python_timoriensis:0.07688387,Python_reticulatus:0.06000572):0.05840354,((Bothrochilus_boa:0.05783399,Liasis_albertisii:0.04766374):0.04125558,((((Morelia_spilota:0.02963587,Morelia_bredli:0.03057292):0.02700875,(Morelia_oenpelliensis:0.06021393,(Morelia_tracyae:0.04262755,(((Morelia_kinghorni:0.008991961,Morelia_nauta:0.0139383):0.010103 [...]
+tree 37 = ((Candoia_aspera:0.434272,Loxocemus_bicolor:0.2009677):0.0214365,(((Python_timoriensis:0.07098969,Python_reticulatus:0.05926525):0.06595034,(((((Antaresia_maculosa:0.06951293,((Antaresia_childreni:0.02431376,Antaresia_stimsoni:0.02256829):0.03600323,Antaresia_perthensis:0.07393358):0.01268363):0.007852837,((Morelia_viridisS:0.04831104,Morelia_viridisN:0.04151247):0.02619356,Morelia_carinata:0.06205216):0.008328914):0.01571979,((Antaresia_ramsayi:0.02940384,Antaresia_melanocepha [...]
+tree 38 = (((((((Antaresia_ramsayi:0.01789625,Antaresia_melanocephalus:0.0471464):0.0499587,((Liasis_olivaceus:0.03622664,(Liasis_mackloti:0.009455217,Liasis_fuscus:0.03042734):0.0510626):0.01244135,Apodora_papuana:0.06968692):0.02203981):0.005449505,(((Morelia_viridisS:0.05494915,Morelia_viridisN:0.04257566):0.02547149,Morelia_carinata:0.05330917):0.00924233,(Antaresia_maculosa:0.07141571,(Antaresia_perthensis:0.06815105,(Antaresia_stimsoni:0.01596413,Antaresia_childreni:0.02007385):0.0 [...]
+tree 39 = (Candoia_aspera:0.4378878,(((Python_regius:0.1263037,(Python_curtus:0.09068406,(Python_sebae:0.08332073,Python_molurus:0.0359485):0.01736963):0.01742147):0.07144359,((Python_timoriensis:0.06079161,Python_reticulatus:0.04868454):0.09412199,((Morelia_boeleni:0.1011311,((((Morelia_bredli:0.03894734,Morelia_spilota:0.02293709):0.02468898,((Morelia_tracyae:0.04868068,Morelia_amethistina:0.03063431):0.001146831,(Morelia_nauta:0.0113829,(Morelia_kinghorni:0.01139991,Morelia_clastolepi [...]
+tree 40 = ((Loxocemus_bicolor:0.2781926,(((Python_curtus:0.09622619,(Python_sebae:0.08267726,Python_molurus:0.0336874):0.01590162):0.02341003,Python_regius:0.1740821):0.03569153,((((Morelia_boeleni:0.06351209,(Bothrochilus_boa:0.06119387,Liasis_albertisii:0.05070669):0.03869329):0.01018021,((Morelia_bredli:0.0365599,Morelia_spilota:0.03117491):0.02423704,(Morelia_oenpelliensis:0.07314733,((((Morelia_clastolepis:0.0102972,Morelia_kinghorni:0.009981516):0.004881791,Morelia_nauta:0.00162908 [...]
+tree 41 = ((((Python_regius:0.1324015,((Python_sebae:0.07499595,Python_molurus:0.04531034):0.01317716,Python_curtus:0.08063795):0.02063596):0.03899759,((Python_timoriensis:0.04793628,Python_reticulatus:0.06554662):0.06914582,(((((Morelia_viridisS:0.0384385,Morelia_viridisN:0.02843732):0.01649712,Morelia_carinata:0.07616136):0.01154934,(Antaresia_maculosa:0.0688894,(Antaresia_perthensis:0.07525537,(Antaresia_childreni:0.02342297,Antaresia_stimsoni:0.0103098):0.0301651):0.01238226):0.00758 [...]
+tree 42 = (Candoia_aspera:0.3942506,(Loxocemus_bicolor:0.3248309,((((Morelia_boeleni:0.06702698,((Morelia_bredli:0.03352857,Morelia_spilota:0.02423749):0.02958618,(Morelia_oenpelliensis:0.05828146,(((Morelia_nauta:0.007829247,Morelia_kinghorni:0.009557425):0.001428237,Morelia_clastolepis:0.008807874):0.01725166,(Morelia_amethistina:0.01830064,Morelia_tracyae:0.04028426):0.004051179):0.02744396):0.006409835):0.02247126):0.004948553,(((((Antaresia_childreni:0.0254365,Antaresia_stimsoni:0.0 [...]
+tree 43 = (((((Python_timoriensis:0.08248895,Python_reticulatus:0.04622255):0.06467156,((((Antaresia_perthensis:0.06518122,(Antaresia_childreni:0.02174977,Antaresia_stimsoni:0.02278117):0.02851192):0.009920717,Antaresia_maculosa:0.06678848):0.01147817,((Morelia_viridisN:0.03490955,Morelia_viridisS:0.05647367):0.02252135,Morelia_carinata:0.07676534):0.0144076):0.01561488,(((Liasis_albertisii:0.03902664,Bothrochilus_boa:0.07108618):0.03421936,((Antaresia_ramsayi:0.029093,Antaresia_melanoce [...]
+tree 44 = (Candoia_aspera:0.4228648,((((Python_timoriensis:0.06553675,Python_reticulatus:0.050714):0.06988906,(((Morelia_boeleni:0.0890654,((Morelia_spilota:0.02392142,Morelia_bredli:0.02193456):0.03376877,(Morelia_oenpelliensis:0.06419856,((Morelia_amethistina:0.01759292,((Morelia_kinghorni:0.008233864,Morelia_nauta:0.007319697):0.007122637,Morelia_clastolepis:0.002995375):0.01946149):0.004642841,Morelia_tracyae:0.05521657):0.03035044):0.002240405):0.01754026):0.003550021,(Bothrochilus_ [...]
+tree 45 = ((Loxocemus_bicolor:0.2980402,(((Python_timoriensis:0.0792743,Python_reticulatus:0.05323046):0.06059866,(((Morelia_boeleni:0.06727104,((((Morelia_viridisS:0.04189977,Morelia_viridisN:0.03729936):0.02232826,Morelia_carinata:0.0526073):0.0180869,(((Antaresia_stimsoni:0.01263764,Antaresia_childreni:0.01925792):0.03451767,Antaresia_perthensis:0.06742619):0.005139024,Antaresia_maculosa:0.07294127):0.007092231):0.02540082,((Antaresia_ramsayi:0.03761965,Antaresia_melanocephalus:0.0399 [...]
+tree 46 = (Candoia_aspera:0.43241,(Loxocemus_bicolor:0.2980358,(((Python_timoriensis:0.07726999,Python_reticulatus:0.05869396):0.06019174,(((((Bothrochilus_boa:0.06085901,Liasis_albertisii:0.05363194):0.02773079,(Antaresia_ramsayi:0.03121562,Antaresia_melanocephalus:0.03375609):0.0527325):0.00288087,(Apodora_papuana:0.05122583,((Liasis_fuscus:0.01757352,Liasis_mackloti:0.0137212):0.04446291,Liasis_olivaceus:0.0470775):0.01009023):0.0231038):0.005364432,(((Morelia_oenpelliensis:0.06700266 [...]
+tree 47 = ((((Python_regius:0.1096265,(Python_curtus:0.1035287,(Python_molurus:0.04023382,Python_sebae:0.09929416):0.02592233):0.028048):0.05840156,((((((Liasis_albertisii:0.06455336,Bothrochilus_boa:0.0507599):0.02743137,((Apodora_papuana:0.06914814,((Liasis_mackloti:0.01141442,Liasis_fuscus:0.01855708):0.0620712,Liasis_olivaceus:0.03846675):0.0171175):0.0129019,(Antaresia_ramsayi:0.03210937,Antaresia_melanocephalus:0.03694964):0.04985598):0.01385926):0.001769898,Morelia_boeleni:0.08003 [...]
+tree 48 = (((((((Bothrochilus_boa:0.05890789,Liasis_albertisii:0.0609866):0.04237209,(((Antaresia_ramsayi:0.03573155,Antaresia_melanocephalus:0.03963692):0.04796978,((Liasis_fuscus:0.02907654,Liasis_mackloti:0.007246393):0.04074824,(Apodora_papuana:0.06019183,Liasis_olivaceus:0.03074218):0.0179236):0.01904604):0.008197873,((Antaresia_maculosa:0.06474745,((Antaresia_childreni:0.0257656,Antaresia_stimsoni:0.01504488):0.03637985,Antaresia_perthensis:0.06523604):0.007883245):0.009561559,(Mor [...]
+tree 49 = ((Candoia_aspera:0.470702,Loxocemus_bicolor:0.2956604):0.02335185,((Python_regius:0.09584661,((Python_sebae:0.0732703,Python_molurus:0.04720622):0.01763467,Python_curtus:0.117376):0.02483888):0.03117274,((Python_timoriensis:0.08037376,Python_reticulatus:0.04589197):0.05405132,((((Morelia_oenpelliensis:0.07746088,((((Morelia_nauta:0.006172668,Morelia_kinghorni:0.006447997):0.00859351,Morelia_clastolepis:0.002835837):0.02486692,Morelia_tracyae:0.03653335):0.009312386,Morelia_amet [...]
+tree 50 = (Candoia_aspera:0.4437574,(((((Python_sebae:0.08086658,Python_molurus:0.03252307):0.02637126,Python_curtus:0.09904016):0.01531335,Python_regius:0.143583):0.0451362,(((((Antaresia_ramsayi:0.03718754,Antaresia_melanocephalus:0.03961356):0.04265559,((Apodora_papuana:0.05513246,Liasis_olivaceus:0.04102737):0.009581713,(Liasis_fuscus:0.01992797,Liasis_mackloti:0.008490396):0.05762563):0.01375438):0.01045857,(((Antaresia_perthensis:0.06574388,(Antaresia_childreni:0.02109943,Antaresia [...]
+tree 51 = ((((((((Antaresia_maculosa:0.07388047,(Antaresia_perthensis:0.05894037,(Antaresia_stimsoni:0.01014789,Antaresia_childreni:0.02392858):0.03272025):0.0127324):0.005261333,((Morelia_viridisS:0.04814902,Morelia_viridisN:0.03931801):0.02118613,Morelia_carinata:0.06743429):0.01713753):0.01871503,((Morelia_spilota:0.02897399,Morelia_bredli:0.02928582):0.03210568,((((Morelia_nauta:0.01230512,Morelia_kinghorni:0.00972926):0.00676493,Morelia_clastolepis:0.002086417):0.02346979,(Morelia_t [...]
+tree 52 = ((((Python_regius:0.122739,(Python_curtus:0.1320061,(Python_sebae:0.08815482,Python_molurus:0.03987976):0.008925839):0.02849865):0.03213177,(((((Morelia_viridisS:0.05520765,Morelia_viridisN:0.03292917):0.02684103,Morelia_carinata:0.05485755):0.01440183,(((Antaresia_childreni:0.02415124,Antaresia_stimsoni:0.01568463):0.04201286,Antaresia_perthensis:0.06779229):0.006306221,Antaresia_maculosa:0.05300762):0.009265044):0.01964058,(((((Liasis_fuscus:0.01770021,Liasis_mackloti:0.01303 [...]
+tree 53 = (((((Python_curtus:0.09620483,(Python_sebae:0.06334457,Python_molurus:0.0538921):0.01668897):0.01641654,Python_regius:0.1202121):0.03786821,(((((Bothrochilus_boa:0.04681597,Liasis_albertisii:0.04765389):0.03343482,((Antaresia_ramsayi:0.02933084,Antaresia_melanocephalus:0.0393689):0.03713987,((Liasis_olivaceus:0.03347025,Apodora_papuana:0.06483148):0.02100701,(Liasis_fuscus:0.02707484,Liasis_mackloti:0.007527475):0.03672839):0.02435954):0.006702324):0.00436005,((Antaresia_maculo [...]
+tree 54 = ((Loxocemus_bicolor:0.3139416,Candoia_aspera:0.4187669):0.04921504,((((Python_molurus:0.04363615,Python_sebae:0.0972421):0.02066382,Python_curtus:0.09211249):0.01832167,Python_regius:0.1204208):0.06289011,(((Morelia_boeleni:0.07412663,((Morelia_bredli:0.03194801,Morelia_spilota:0.01803354):0.03580703,(Morelia_oenpelliensis:0.07438324,((Morelia_amethistina:0.01798105,(Morelia_clastolepis:0.00212618,(Morelia_kinghorni:0.003500397,Morelia_nauta:0.009602255):0.006876329):0.00788007 [...]
+tree 55 = (Candoia_aspera:0.5149506,(Loxocemus_bicolor:0.250681,((((((Liasis_albertisii:0.05545887,Bothrochilus_boa:0.06595798):0.03867875,((Morelia_oenpelliensis:0.07140551,((Morelia_bredli:0.02271119,Morelia_spilota:0.03039649):0.0216839,((((Morelia_nauta:0.002195662,Morelia_kinghorni:0.007748968):0.007796946,Morelia_clastolepis:0.002227638):0.008569473,Morelia_tracyae:0.02678873):0.005578755,Morelia_amethistina:0.02918388):0.03912099):0.002376657):0.02392946,((((Antaresia_childreni:0. [...]
+tree 56 = (Candoia_aspera:0.4813065,((((((((Antaresia_ramsayi:0.02598769,Antaresia_melanocephalus:0.04109392):0.04781481,(Liasis_albertisii:0.04425997,Bothrochilus_boa:0.06753105):0.03135549):0.004135552,(((Liasis_mackloti:0.01633037,Liasis_fuscus:0.03000881):0.04510967,Liasis_olivaceus:0.04156987):0.0107285,Apodora_papuana:0.06327569):0.02760798):0.0088871,Morelia_boeleni:0.09115155):0.001504997,(((Antaresia_maculosa:0.07446543,(Antaresia_perthensis:0.07047215,(Antaresia_childreni:0.030 [...]
+tree 57 = ((Loxocemus_bicolor:0.2977819,(((((((Morelia_viridisN:0.02454729,Morelia_viridisS:0.04512018):0.02222539,Morelia_carinata:0.06200557):0.01268124,(((Antaresia_childreni:0.02473758,Antaresia_stimsoni:0.01624843):0.04789705,Antaresia_perthensis:0.07009377):0.008545523,Antaresia_maculosa:0.08388544):0.007578775):0.01782145,(((Morelia_tracyae:0.04447008,((Morelia_kinghorni:0.0066021,Morelia_nauta:0.00551457):0.01082195,Morelia_clastolepis:0.001697196):0.0059375):0.004922532,Morelia_ [...]
+tree 58 = ((((Python_regius:0.1200325,((Python_molurus:0.03591442,Python_sebae:0.0707592):0.01993933,Python_curtus:0.1045325):0.01853241):0.02707036,((Python_timoriensis:0.06786831,Python_reticulatus:0.0322445):0.08257587,(((((((Morelia_clastolepis:0.004018062,(Morelia_nauta:0.01075374,Morelia_kinghorni:0.006308406):0.005730398):0.01861746,Morelia_amethistina:0.0232728):0.00946243,Morelia_tracyae:0.03755202):0.02764432,Morelia_oenpelliensis:0.05912281):0.01292832,(Morelia_bredli:0.030169 [...]
+tree 59 = (((Python_regius:0.1335572,(Python_curtus:0.09927795,(Python_molurus:0.03561646,Python_sebae:0.07830893):0.01697234):0.02824134):0.04031233,(((((Bothrochilus_boa:0.07133514,Liasis_albertisii:0.04973854):0.04588717,((Antaresia_ramsayi:0.03310703,Antaresia_melanocephalus:0.03941893):0.05882317,(Apodora_papuana:0.07295411,((Liasis_fuscus:0.01782433,Liasis_mackloti:0.01512761):0.05644884,Liasis_olivaceus:0.02642465):0.01125838):0.01140388):0.004935439):0.0101065,(Morelia_boeleni:0. [...]
+tree 60 = (Candoia_aspera:0.424371,(Loxocemus_bicolor:0.3179898,((Python_regius:0.1449508,(Python_curtus:0.1015309,(Python_molurus:0.04869052,Python_sebae:0.09786226):0.01882742):0.02882628):0.03786653,((Python_timoriensis:0.05229728,Python_reticulatus:0.06380921):0.05973712,(((((Antaresia_ramsayi:0.02761203,Antaresia_melanocephalus:0.03813273):0.05830588,((Liasis_olivaceus:0.04693766,(Liasis_fuscus:0.01833181,Liasis_mackloti:0.01696186):0.04960517):0.01702719,Apodora_papuana:0.05487917) [...]
+tree 61 = ((((Python_regius:0.1228097,((Python_molurus:0.04315806,Python_sebae:0.07493422):0.009730695,Python_curtus:0.1072914):0.02183078):0.04432623,((Python_timoriensis:0.08054852,Python_reticulatus:0.05729257):0.06275327,(((Morelia_carinata:0.0753587,(Morelia_viridisS:0.05041945,Morelia_viridisN:0.03648667):0.02461579):0.0115986,(((Antaresia_stimsoni:0.01180749,Antaresia_childreni:0.02516542):0.03703654,Antaresia_perthensis:0.06873682):0.01651968,Antaresia_maculosa:0.07148424):0.0126 [...]
+tree 62 = (((((Python_curtus:0.08909703,(Python_molurus:0.03328324,Python_sebae:0.0715058):0.01824785):0.007554571,Python_regius:0.1454022):0.03384772,((((((Morelia_viridisS:0.05543826,Morelia_viridisN:0.03630355):0.01622295,Morelia_carinata:0.07316485):0.01367374,(Antaresia_maculosa:0.06581564,((Antaresia_childreni:0.0310494,Antaresia_stimsoni:0.01245022):0.03712444,Antaresia_perthensis:0.06435933):0.01742895):0.004963226):0.02577853,(Morelia_boeleni:0.09387192,((Morelia_bredli:0.029732 [...]
+tree 63 = (Candoia_aspera:0.4745529,(((Python_regius:0.1135173,(Python_curtus:0.1012712,(Python_molurus:0.04331739,Python_sebae:0.07786967):0.01125215):0.03818491):0.03021289,((Python_timoriensis:0.07194533,Python_reticulatus:0.04773572):0.07113062,((((Morelia_spilota:0.02865189,Morelia_bredli:0.03249746):0.02966558,(((Morelia_tracyae:0.04997421,(Morelia_nauta:0.004001658,(Morelia_clastolepis:0.006449675,Morelia_kinghorni:0.007933115):0.006298222):0.02287617):0.00191708,Morelia_amethisti [...]
+tree 64 = ((((((((Morelia_viridisS:0.04558879,Morelia_viridisN:0.03250553):0.01332789,Morelia_carinata:0.0753067):0.01978827,(Antaresia_maculosa:0.07612639,((Antaresia_stimsoni:0.0233159,Antaresia_childreni:0.02086903):0.02612487,Antaresia_perthensis:0.06545956):0.01015421):0.01066412):0.01823755,((((Antaresia_ramsayi:0.04189037,Antaresia_melanocephalus:0.04454489):0.04572384,(((Liasis_mackloti:0.004894573,Liasis_fuscus:0.01614364):0.05091172,Liasis_olivaceus:0.02327591):0.01019945,Apodo [...]
+tree 65 = ((((Python_regius:0.1065045,((Python_molurus:0.02903804,Python_sebae:0.07866277):0.01231844,Python_curtus:0.1314011):0.027084):0.03262673,((Python_timoriensis:0.06663792,Python_reticulatus:0.0784987):0.07588092,(((Bothrochilus_boa:0.07634096,Liasis_albertisii:0.04452527):0.02809324,(((Morelia_oenpelliensis:0.06509642,(Morelia_tracyae:0.0458238,(((Morelia_nauta:0.0121269,Morelia_kinghorni:0.006513538):0.009945509,Morelia_clastolepis:0.01318087):0.02319886,Morelia_amethistina:0.0 [...]
+tree 66 = (Candoia_aspera:0.5274461,(((Python_timoriensis:0.06902759,Python_reticulatus:0.06108568):0.06781643,((((((Antaresia_childreni:0.02527601,Antaresia_stimsoni:0.01240928):0.04320152,Antaresia_perthensis:0.06080926):0.01246198,Antaresia_maculosa:0.06380906):0.009021079,((Morelia_viridisS:0.07220582,Morelia_viridisN:0.0327611):0.01721496,Morelia_carinata:0.06624881):0.007599396):0.02715264,(((Bothrochilus_boa:0.06799821,Liasis_albertisii:0.06149457):0.0249193,((Antaresia_ramsayi:0. [...]
+tree 67 = ((((Python_regius:0.143336,((Python_molurus:0.05192443,Python_sebae:0.08336195):0.02057265,Python_curtus:0.09573655):0.02544282):0.04651235,((((((((Liasis_fuscus:0.01732133,Liasis_mackloti:0.01183748):0.0552515,Liasis_olivaceus:0.03096171):0.01753867,Apodora_papuana:0.06168452):0.02608198,(Antaresia_ramsayi:0.02889135,Antaresia_melanocephalus:0.03600234):0.05855319):0.009485744,(Bothrochilus_boa:0.06625782,Liasis_albertisii:0.04617117):0.03191976):0.002849137,(Morelia_boeleni:0 [...]
+tree 68 = ((Loxocemus_bicolor:0.2719207,Candoia_aspera:0.4906847):0.04718485,((Python_regius:0.1124593,((Python_molurus:0.0509666,Python_sebae:0.07738415):0.008989942,Python_curtus:0.09897486):0.03516115):0.03687499,((Python_timoriensis:0.06800461,Python_reticulatus:0.06016279):0.06116017,(((Liasis_albertisii:0.06944622,Bothrochilus_boa:0.05920302):0.03120987,(((Liasis_olivaceus:0.05416647,(Liasis_mackloti:0.01101725,Liasis_fuscus:0.01722447):0.05929124):0.008101523,Apodora_papuana:0.067 [...]
+tree 69 = ((((Python_timoriensis:0.07901859,Python_reticulatus:0.04337921):0.05715234,((((Bothrochilus_boa:0.07370024,Liasis_albertisii:0.05462079):0.02485725,((((Liasis_fuscus:0.02829583,Liasis_mackloti:0.009995232):0.04878129,Liasis_olivaceus:0.03673495):0.01427831,Apodora_papuana:0.05211239):0.01637035,(Antaresia_ramsayi:0.03273632,Antaresia_melanocephalus:0.03475002):0.04373889):0.009508868):0.004413184,(((Morelia_oenpelliensis:0.05244673,(Morelia_tracyae:0.02733242,(((Morelia_nauta: [...]
+tree 70 = (Candoia_aspera:0.4106247,(Loxocemus_bicolor:0.2331907,((((((((Antaresia_childreni:0.02215029,Antaresia_stimsoni:0.009244344):0.03566251,Antaresia_perthensis:0.07581442):0.01354362,Antaresia_maculosa:0.06464157):0.00675194,((Morelia_viridisN:0.03936049,Morelia_viridisS:0.04292648):0.01785218,Morelia_carinata:0.05712708):0.01866541):0.01866694,((Morelia_oenpelliensis:0.06002338,((Morelia_tracyae:0.03985103,Morelia_amethistina:0.03190266):0.006075925,(Morelia_nauta:0.002771607,(M [...]
+tree 71 = (Candoia_aspera:0.4427327,((((Python_timoriensis:0.07904484,Python_reticulatus:0.05445465):0.06086328,((((Morelia_spilota:0.02948698,Morelia_bredli:0.04304928):0.02883553,(((Morelia_tracyae:0.03379621,((Morelia_clastolepis:0.00613919,Morelia_kinghorni:0.005279703):0.007664687,Morelia_nauta:0.002861372):0.02350224):0.004278877,Morelia_amethistina:0.02920345):0.03896447,Morelia_oenpelliensis:0.05935515):0.004404024):0.02608104,(((Antaresia_perthensis:0.06830344,(Antaresia_stimson [...]
+tree 72 = ((Loxocemus_bicolor:0.3099623,((Python_regius:0.1377144,((Python_sebae:0.06061368,Python_molurus:0.04563488):0.03354163,Python_curtus:0.08988597):0.02558453):0.03970288,((Python_timoriensis:0.08715385,Python_reticulatus:0.05952325):0.08806109,((((Morelia_bredli:0.03275956,Morelia_spilota:0.025875):0.03529276,(Morelia_oenpelliensis:0.06489886,(Morelia_tracyae:0.03946847,(Morelia_amethistina:0.02457683,(Morelia_clastolepis:0.003320996,(Morelia_kinghorni:0.002810903,Morelia_nauta: [...]
+tree 73 = (Candoia_aspera:0.4348448,((((((Morelia_boeleni:0.07664532,((Morelia_bredli:0.03846738,Morelia_spilota:0.03022961):0.03426844,(Morelia_oenpelliensis:0.05603453,(((Morelia_nauta:0.008166571,(Morelia_clastolepis:0.009570103,Morelia_kinghorni:0.008340767):0.002700153):0.01590274,Morelia_amethistina:0.02135429):0.003489883,Morelia_tracyae:0.02901899):0.0362906):0.004718814):0.03173114):0.003755895,(((Antaresia_ramsayi:0.02953742,Antaresia_melanocephalus:0.03996934):0.05390883,(Both [...]
+tree 74 = (Candoia_aspera:0.4742071,((((Python_timoriensis:0.06586218,Python_reticulatus:0.06259166):0.06589371,((((Antaresia_maculosa:0.07162944,((Antaresia_childreni:0.021786,Antaresia_stimsoni:0.01148695):0.02689859,Antaresia_perthensis:0.07131155):0.005756796):0.01229476,((Morelia_viridisS:0.04413955,Morelia_viridisN:0.05235628):0.02208944,Morelia_carinata:0.07115939):0.01801993):0.01841187,((Apodora_papuana:0.06862411,(Liasis_olivaceus:0.03289194,(Liasis_fuscus:0.02546166,Liasis_mac [...]
+tree 75 = ((Loxocemus_bicolor:0.2760261,(((Python_timoriensis:0.05565623,Python_reticulatus:0.0666305):0.06163908,(((((Morelia_bredli:0.02809343,Morelia_spilota:0.02234101):0.04248839,((Morelia_tracyae:0.03855563,(Morelia_amethistina:0.0194194,((Morelia_kinghorni:0.007230503,Morelia_nauta:0.01597799):0.003673781,Morelia_clastolepis:0.002712423):0.01983554):0.00770982):0.0348642,Morelia_oenpelliensis:0.0778497):0.007505558):0.02584305,Morelia_boeleni:0.08255867):0.004142258,((Bothrochilus [...]
+tree 76 = (Candoia_aspera:0.4489852,(Loxocemus_bicolor:0.2784511,((((((Morelia_viridisS:0.0478914,Morelia_viridisN:0.03382179):0.01390197,Morelia_carinata:0.06547169):0.01186083,(((Antaresia_childreni:0.02366335,Antaresia_stimsoni:0.01261576):0.0373104,Antaresia_perthensis:0.0698693):0.009951808,Antaresia_maculosa:0.0621237):0.009221396):0.01693595,((Morelia_boeleni:0.0985658,((Bothrochilus_boa:0.06368156,Liasis_albertisii:0.05812566):0.04186163,(((Liasis_olivaceus:0.03976913,(Liasis_fus [...]
+tree 77 = ((Loxocemus_bicolor:0.2980009,(((Python_timoriensis:0.07492163,Python_reticulatus:0.05726328):0.06871158,(((((Morelia_viridisS:0.04674211,Morelia_viridisN:0.03242869):0.02905742,Morelia_carinata:0.0640633):0.01010355,(((Antaresia_childreni:0.01838723,Antaresia_stimsoni:0.01565296):0.04438823,Antaresia_perthensis:0.08484077):0.01913492,Antaresia_maculosa:0.07844496):0.003377368):0.01497016,((Bothrochilus_boa:0.06132987,Liasis_albertisii:0.03927122):0.03479677,((((Liasis_fuscus:0 [...]
+tree 78 = (Candoia_aspera:0.3932904,(Loxocemus_bicolor:0.2749132,(((Python_curtus:0.09183693,(Python_sebae:0.07637954,Python_molurus:0.03762318):0.02709624):0.02812698,Python_regius:0.1154282):0.03559674,(((((((Antaresia_stimsoni:0.01446194,Antaresia_childreni:0.02531378):0.04006727,Antaresia_perthensis:0.06666538):0.005905826,Antaresia_maculosa:0.07190364):0.006189105,(Morelia_carinata:0.06419419,(Morelia_viridisS:0.05339065,Morelia_viridisN:0.02297795):0.03153359):0.01090103):0.0168032 [...]
+tree 79 = ((Candoia_aspera:0.4868107,Loxocemus_bicolor:0.2301868):0.06696102,(((Python_curtus:0.09902725,(Python_sebae:0.07607854,Python_molurus:0.03042101):0.01754461):0.01284939,Python_regius:0.1175586):0.05074195,((((((((Morelia_nauta:0.008126706,Morelia_kinghorni:0.005022595):0.002074662,Morelia_clastolepis:0.004239987):0.01940375,Morelia_amethistina:0.02282263):0.006885183,Morelia_tracyae:0.02829901):0.05350385,Morelia_oenpelliensis:0.07531652):0.003938017,(Morelia_spilota:0.0258772 [...]
+tree 80 = ((((Python_regius:0.1445806,(Python_curtus:0.1271761,(Python_sebae:0.08660773,Python_molurus:0.03642296):0.02308224):0.03951594):0.03432268,((Python_timoriensis:0.05008058,Python_reticulatus:0.08503014):0.07968167,((((((Liasis_fuscus:0.02531552,Liasis_mackloti:0.00943019):0.05551294,Liasis_olivaceus:0.03532999):0.01382014,Apodora_papuana:0.06672151):0.01296964,(Antaresia_ramsayi:0.02616247,Antaresia_melanocephalus:0.04629737):0.03687331):0.01178619,(((Morelia_viridisS:0.0455379 [...]
+tree 81 = (Candoia_aspera:0.4575053,((((Python_timoriensis:0.06194463,Python_reticulatus:0.04911996):0.06573001,((Morelia_boeleni:0.07789338,((Morelia_bredli:0.02561557,Morelia_spilota:0.02375866):0.04673673,(Morelia_oenpelliensis:0.06590139,((((Morelia_kinghorni:0.004965855,Morelia_clastolepis:0.01490083):0.0009494989,Morelia_nauta:0.002852371):0.0104046,Morelia_amethistina:0.02724809):0.01478698,Morelia_tracyae:0.03441991):0.02561766):0.004808734):0.02705979):0.006449734,(((((Morelia_v [...]
+tree 82 = ((Loxocemus_bicolor:0.2755664,((Python_regius:0.1286608,((Python_sebae:0.08250047,Python_molurus:0.0305042):0.01309972,Python_curtus:0.09564767):0.01610108):0.04804593,((Python_timoriensis:0.07700127,Python_reticulatus:0.0470671):0.07753829,(((Morelia_boeleni:0.09111672,(Morelia_oenpelliensis:0.06892866,(((((Morelia_kinghorni:0.003380163,Morelia_nauta:0.00575853):0.006891641,Morelia_clastolepis:0.007008237):0.01516912,Morelia_amethistina:0.0213821):0.01504177,Morelia_tracyae:0. [...]
+tree 83 = ((Loxocemus_bicolor:0.2393583,((Python_regius:0.1362413,(Python_curtus:0.09981791,(Python_sebae:0.08732982,Python_molurus:0.03965756):0.02106739):0.01324574):0.04504424,((((((Liasis_mackloti:0.01502157,Liasis_fuscus:0.02332284):0.04350871,(Apodora_papuana:0.06142209,Liasis_olivaceus:0.03680022):0.01491341):0.0237058,(Antaresia_ramsayi:0.03335222,Antaresia_melanocephalus:0.03614687):0.05208556):0.01276464,(Morelia_boeleni:0.08159735,(((Morelia_spilota:0.02708637,Morelia_bredli:0 [...]
+tree 84 = ((Loxocemus_bicolor:0.2557088,((((Python_sebae:0.08304661,Python_molurus:0.04328625):0.02159618,Python_curtus:0.06878299):0.02377922,Python_regius:0.10624):0.02370423,((((Morelia_carinata:0.06516085,(Morelia_viridisS:0.03397963,Morelia_viridisN:0.03858964):0.01392689):0.01952269,(Antaresia_maculosa:0.05096261,((Antaresia_stimsoni:0.01894518,Antaresia_childreni:0.02643887):0.03187506,Antaresia_perthensis:0.06112747):0.01289749):0.007764347):0.01485805,((Morelia_boeleni:0.0785772 [...]
+tree 85 = (Candoia_aspera:0.446456,(((((Python_sebae:0.0664236,Python_molurus:0.05748319):0.01384624,Python_curtus:0.1246156):0.01920644,Python_regius:0.09562755):0.03579852,((Python_timoriensis:0.06854112,Python_reticulatus:0.07423406):0.06198347,(((((Antaresia_maculosa:0.0753139,(Antaresia_perthensis:0.05776215,(Antaresia_childreni:0.0188791,Antaresia_stimsoni:0.01203823):0.03974037):0.01302284):0.01107751,((Morelia_viridisS:0.04315065,Morelia_viridisN:0.0447115):0.01881795,Morelia_car [...]
+tree 86 = ((((Python_regius:0.1195237,((Python_molurus:0.05424041,Python_sebae:0.06745426):0.01500456,Python_curtus:0.09363999):0.01373712):0.07174102,((Python_timoriensis:0.05317857,Python_reticulatus:0.07866524):0.06443481,((((Antaresia_perthensis:0.06871814,(Antaresia_childreni:0.01631375,Antaresia_stimsoni:0.01858184):0.03573909):0.00997242,Antaresia_maculosa:0.06653337):0.01156942,((Morelia_viridisS:0.05041764,Morelia_viridisN:0.03210906):0.02278023,Morelia_carinata:0.06324297):0.01 [...]
+tree 87 = (Candoia_aspera:0.4403734,(Loxocemus_bicolor:0.2585825,(((Python_curtus:0.09600904,(Python_molurus:0.04325963,Python_sebae:0.06144044):0.02193821):0.03512357,Python_regius:0.1158208):0.0226537,((((((Antaresia_ramsayi:0.02896362,Antaresia_melanocephalus:0.04374086):0.05633091,(Bothrochilus_boa:0.07453325,Liasis_albertisii:0.04886916):0.0406942):0.00253113,(Apodora_papuana:0.06827618,((Liasis_fuscus:0.0190238,Liasis_mackloti:0.01240811):0.04949205,Liasis_olivaceus:0.04193583):0.0 [...]
+tree 88 = ((((Python_regius:0.1262411,(Python_curtus:0.1008612,(Python_sebae:0.07364004,Python_molurus:0.03651178):0.02407779):0.0260306):0.02824526,((((Morelia_boeleni:0.07070576,((Morelia_bredli:0.03022369,Morelia_spilota:0.02205107):0.02783955,(Morelia_oenpelliensis:0.0634195,(Morelia_tracyae:0.03485566,((Morelia_clastolepis:0.007226027,(Morelia_nauta:0.01372385,Morelia_kinghorni:0.01134917):0.003584604):0.009064959,Morelia_amethistina:0.02136233):0.006429469):0.02626722):0.007624656) [...]
+tree 89 = ((Loxocemus_bicolor:0.2893372,((Python_regius:0.1073912,(Python_curtus:0.09348169,(Python_sebae:0.08938309,Python_molurus:0.03446394):0.01312052):0.02814457):0.0358765,((((((((Morelia_tracyae:0.03510133,((Morelia_clastolepis:0.0046623,Morelia_kinghorni:0.001573419):0.006380224,Morelia_nauta:0.005709148):0.01415564):0.004559492,Morelia_amethistina:0.02868146):0.03694752,Morelia_oenpelliensis:0.06467572):0.009860806,(Morelia_spilota:0.02221128,Morelia_bredli:0.02633986):0.0298753 [...]
+tree 90 = (((((Python_timoriensis:0.09696215,Python_reticulatus:0.05811112):0.0655036,((((((Liasis_olivaceus:0.0365148,Apodora_papuana:0.06857998):0.01276053,(Liasis_mackloti:0.008411,Liasis_fuscus:0.01901003):0.04816006):0.02602376,(Antaresia_ramsayi:0.03002571,Antaresia_melanocephalus:0.03848471):0.03704017):0.007975364,((((Antaresia_stimsoni:0.01721904,Antaresia_childreni:0.02350975):0.02876343,Antaresia_perthensis:0.07909582):0.01076695,Antaresia_maculosa:0.07836454):0.006590077,(Mor [...]
+tree 91 = (Candoia_aspera:0.4052354,(((Python_regius:0.1509565,(Python_curtus:0.07502647,(Python_molurus:0.03578726,Python_sebae:0.08224294):0.02128537):0.02903882):0.04946381,((Python_timoriensis:0.07174778,Python_reticulatus:0.05925871):0.07438774,(((((Antaresia_ramsayi:0.0296355,Antaresia_melanocephalus:0.03872158):0.05083471,(Bothrochilus_boa:0.04803823,Liasis_albertisii:0.04184282):0.03385926):0.006801014,(((Liasis_fuscus:0.01315756,Liasis_mackloti:0.01757213):0.04596919,Liasis_oliv [...]
+tree 92 = ((Candoia_aspera:0.4832728,Loxocemus_bicolor:0.3133952):0.05045952,(((Python_curtus:0.1003648,(Python_sebae:0.07701747,Python_molurus:0.04519635):0.02379189):0.018113,Python_regius:0.1339096):0.04035469,((Python_timoriensis:0.0802541,Python_reticulatus:0.05762582):0.07422132,((Morelia_boeleni:0.07869006,(((Morelia_oenpelliensis:0.06127573,(((Morelia_clastolepis:0.003375919,(Morelia_nauta:0.005666266,Morelia_kinghorni:0.0128026):0.002713472):0.007898866,Morelia_amethistina:0.030 [...]
+tree 93 = ((((Python_regius:0.1272317,(Python_curtus:0.09829021,(Python_sebae:0.09489939,Python_molurus:0.04318147):0.01798881):0.034006):0.0462139,((Python_timoriensis:0.05756501,Python_reticulatus:0.05855822):0.07458395,(((((Morelia_carinata:0.0619902,(Morelia_viridisS:0.05075946,Morelia_viridisN:0.03469798):0.02745101):0.007211946,(((Antaresia_stimsoni:0.0163066,Antaresia_childreni:0.02426005):0.03499836,Antaresia_perthensis:0.06839545):0.01688134,Antaresia_maculosa:0.06387924):0.0061 [...]
+tree 94 = ((Loxocemus_bicolor:0.315418,(((Python_curtus:0.1002578,(Python_sebae:0.06770584,Python_molurus:0.0298364):0.01918509):0.01848734,Python_regius:0.1144905):0.04568069,((Morelia_boeleni:0.08331781,((((Morelia_carinata:0.06687796,(Morelia_viridisS:0.05088015,Morelia_viridisN:0.03185982):0.0174918):0.01702257,(((Antaresia_stimsoni:0.00951792,Antaresia_childreni:0.02495718):0.0336015,Antaresia_perthensis:0.06321583):0.01991029,Antaresia_maculosa:0.0697469):0.006936756):0.01055057,(( [...]
+tree 95 = (((((Python_sebae:0.0744125,Python_molurus:0.03313892):0.01573011,Python_curtus:0.1037118):0.01292763,Python_regius:0.1029389):0.03822617,((Morelia_boeleni:0.08820164,((((Antaresia_maculosa:0.06042653,(Antaresia_perthensis:0.0608496,(Antaresia_childreni:0.0271723,Antaresia_stimsoni:0.01380911):0.03972871):0.01204421):0.007583394,((Morelia_viridisN:0.03192192,Morelia_viridisS:0.05435536):0.02514259,Morelia_carinata:0.07010455):0.01334626):0.01615479,(((Antaresia_ramsayi:0.029544 [...]
+tree 96 = (Candoia_aspera:0.4928541,(Loxocemus_bicolor:0.2816248,(((((Morelia_oenpelliensis:0.08393614,(Morelia_amethistina:0.02446814,(Morelia_tracyae:0.04530357,((Morelia_kinghorni:0.007960707,Morelia_clastolepis:0.01675912):0.004557217,Morelia_nauta:0.008147904):0.01430799):0.004738885):0.0274262):0.003287132,(Morelia_bredli:0.0284451,Morelia_spilota:0.02765243):0.02726073):0.02848068,((((((Morelia_viridisS:0.05303163,Morelia_viridisN:0.03323683):0.01782342,Morelia_carinata:0.06126222 [...]
+tree 97 = (Candoia_aspera:0.4487899,((((Python_timoriensis:0.06146061,Python_reticulatus:0.05946495):0.07191773,((((Morelia_tracyae:0.0347971,(Morelia_amethistina:0.02803992,((Morelia_nauta:0.00500443,Morelia_kinghorni:0.006595522):0.003929872,Morelia_clastolepis:0.007496731):0.013329):0.01062581):0.02225971,Morelia_oenpelliensis:0.05555536):0.006840461,(Morelia_bredli:0.03307905,Morelia_spilota:0.02268619):0.02215481):0.03594669,((((Liasis_albertisii:0.03243545,Bothrochilus_boa:0.052788 [...]
+tree 98 = (((((Python_curtus:0.1013429,(Python_molurus:0.03905245,Python_sebae:0.06607451):0.01882211):0.02961412,Python_regius:0.1080206):0.04239082,((Python_timoriensis:0.05902375,Python_reticulatus:0.0508136):0.06075674,((Morelia_boeleni:0.06727115,((Bothrochilus_boa:0.05342648,Liasis_albertisii:0.05218567):0.02163257,((Antaresia_ramsayi:0.02493475,Antaresia_melanocephalus:0.03825297):0.04441375,((Liasis_olivaceus:0.04086729,Apodora_papuana:0.05537593):0.009908886,(Liasis_fuscus:0.023 [...]
+tree 99 = ((((Python_timoriensis:0.05073709,Python_reticulatus:0.07277766):0.06283212,(Morelia_boeleni:0.08695013,(((((Antaresia_maculosa:0.08512621,(Antaresia_perthensis:0.07026724,(Antaresia_childreni:0.03156755,Antaresia_stimsoni:0.0139927):0.03766207):0.01538491):0.004298772,((Morelia_viridisS:0.03700609,Morelia_viridisN:0.04516503):0.03111741,Morelia_carinata:0.05135428):0.006007194):0.02055132,((((Liasis_fuscus:0.01989677,Liasis_mackloti:0.006589301):0.05645972,Liasis_olivaceus:0.0 [...]
+tree 100 = (Candoia_aspera:0.4332915,((((Python_curtus:0.1152573,(Python_molurus:0.03795271,Python_sebae:0.0840193):0.01046507):0.01900506,Python_regius:0.1219839):0.03456921,((Python_timoriensis:0.05963154,Python_reticulatus:0.06979965):0.0631162,((((((Morelia_oenpelliensis:0.0510031,(Morelia_tracyae:0.05728741,((Morelia_clastolepis:0.00554597,(Morelia_kinghorni:0.007204392,Morelia_nauta:0.01336245):0.004239199):0.01553404,Morelia_amethistina:0.0174348):0.009414604):0.04888199):0.004489 [...]
+tree 101 = ((((Python_timoriensis:0.05801513,Python_reticulatus:0.07722868):0.06553758,((((Morelia_bredli:0.03139869,Morelia_spilota:0.02840328):0.02824259,((((Morelia_clastolepis:0.00205874,(Morelia_nauta:0.01238758,Morelia_kinghorni:0.00354312):0.006115592):0.01059278,Morelia_tracyae:0.04180855):0.0009798313,Morelia_amethistina:0.02698061):0.03620676,Morelia_oenpelliensis:0.07046009):0.01248873):0.02405634,((Morelia_carinata:0.07603419,(Morelia_viridisN:0.02986943,Morelia_viridisS:0.05 [...]
+End;
+
+[Total of 101 trees sourced from:]
+[ ../mb/pythonidae.mb.run2.t: 1001 trees in file, sampling 1 tree of every 10 trees after 0 tree burn-in: 101 trees added (current total = 101 trees) ]
diff --git a/doc/source/examples/pythonidae.mb.run3.t b/doc/source/examples/pythonidae.mb.run3.t
new file mode 100644
index 0000000..fe93e9b
--- /dev/null
+++ b/doc/source/examples/pythonidae.mb.run3.t
@@ -0,0 +1,108 @@
+#NEXUS
+
+Begin Trees;
+tree 1 = (Morelia_kinghorni:0.1,(Liasis_mackloti:0.1,(Morelia_viridisS:0.1,((Python_molurus:0.1,Python_sebae:0.1):0.1,((Antaresia_maculosa:0.1,(Liasis_albertisii:0.1,((Antaresia_stimsoni:0.1,(Python_timoriensis:0.1,((Morelia_tracyae:0.1,(Candoia_aspera:0.1,Bothrochilus_boa:0.1):0.1):0.1,Liasis_olivaceus:0.1):0.1):0.1):0.1,(Antaresia_perthensis:0.1,Morelia_amethistina:0.1):0.1):0.1):0.1):0.1,((Antaresia_ramsayi:0.1,Antaresia_childreni:0.1):0.1,(Morelia_oenpelliensis:0.1,(Morelia_viridisN: [...]
+tree 2 = (Candoia_aspera:0.5104306,(((Python_regius:0.1373124,(Python_curtus:0.1154985,(Python_sebae:0.1021246,Python_molurus:0.03647906):0.01675914):0.03189342):0.05260508,((Python_timoriensis:0.06575276,Python_reticulatus:0.0663581):0.05792938,(((Morelia_carinata:0.06230501,(Morelia_viridisN:0.03212214,Morelia_viridisS:0.04783048):0.02139605):0.008488669,(Antaresia_maculosa:0.06443819,((Antaresia_stimsoni:0.01181279,Antaresia_childreni:0.02349062):0.04648495,Antaresia_perthensis:0.0672 [...]
+tree 3 = (Candoia_aspera:0.3740453,(((Python_regius:0.1306888,(Python_curtus:0.08131422,(Python_molurus:0.04680748,Python_sebae:0.06268339):0.01897984):0.01155706):0.06929659,((Python_reticulatus:0.04885916,Python_timoriensis:0.0770979):0.06620889,(((Liasis_albertisii:0.05905712,Bothrochilus_boa:0.06073066):0.03433056,((((Morelia_amethistina:0.01358769,(Morelia_tracyae:0.03879992,((Morelia_nauta:0.00845468,Morelia_kinghorni:0.007304627):0.005805021,Morelia_clastolepis:0.001895741):0.0074 [...]
+tree 4 = (Candoia_aspera:0.4642525,(Loxocemus_bicolor:0.2569975,(((((((Apodora_papuana:0.0543912,(Liasis_olivaceus:0.0418918,(Liasis_mackloti:0.009608325,Liasis_fuscus:0.01946075):0.05125063):0.01292208):0.01052249,(Antaresia_ramsayi:0.02809281,Antaresia_melanocephalus:0.035138):0.05567057):0.008242803,(Liasis_albertisii:0.04996856,Bothrochilus_boa:0.05982118):0.03842518):0.007145555,((((Antaresia_stimsoni:0.01634421,Antaresia_childreni:0.02856886):0.03126208,Antaresia_perthensis:0.05644 [...]
+tree 5 = (((Python_regius:0.1363268,(Python_curtus:0.1197014,(Python_molurus:0.03440908,Python_sebae:0.07136774):0.02074256):0.009979104):0.03999763,((((Antaresia_maculosa:0.06615887,(Antaresia_perthensis:0.07394163,(Antaresia_childreni:0.02614011,Antaresia_stimsoni:0.01556194):0.03950891):0.00723233):0.006972011,(Morelia_carinata:0.0509172,(Morelia_viridisN:0.03721384,Morelia_viridisS:0.03654155):0.02356716):0.00849585):0.009637364,((Morelia_boeleni:0.0796165,((Morelia_bredli:0.01773138 [...]
+tree 6 = (Candoia_aspera:0.4135338,(((Python_regius:0.1310873,((Python_sebae:0.07075347,Python_molurus:0.02905853):0.01547685,Python_curtus:0.129375):0.01749734):0.0490635,((((Bothrochilus_boa:0.06620556,Liasis_albertisii:0.03982987):0.03954384,(((Morelia_amethistina:0.03540347,((Morelia_clastolepis:0.007300453,(Morelia_nauta:0.01069645,Morelia_kinghorni:0.005075689):0.001247419):0.009339338,Morelia_tracyae:0.06682682):0.008833476):0.03592672,Morelia_oenpelliensis:0.06898142):0.006173093 [...]
+tree 7 = (((((Morelia_boeleni:0.09341667,((((Antaresia_ramsayi:0.02432962,Antaresia_melanocephalus:0.03108267):0.04383269,(Apodora_papuana:0.0682918,((Liasis_fuscus:0.02240199,Liasis_mackloti:0.008644258):0.0368359,Liasis_olivaceus:0.05575301):0.006307167):0.02635447):0.01031992,((Antaresia_maculosa:0.06379196,(Antaresia_perthensis:0.0595648,(Antaresia_childreni:0.0238847,Antaresia_stimsoni:0.01825338):0.03511199):0.01469618):0.007279461,(Morelia_carinata:0.07084697,(Morelia_viridisN:0.0 [...]
+tree 8 = (Candoia_aspera:0.4547772,(Loxocemus_bicolor:0.2395744,((((((((Apodora_papuana:0.06080321,Liasis_olivaceus:0.02978599):0.009394813,(Liasis_mackloti:0.01742433,Liasis_fuscus:0.01806577):0.0566476):0.01527548,(Antaresia_melanocephalus:0.04009299,Antaresia_ramsayi:0.03097279):0.04512079):0.005934537,(Bothrochilus_boa:0.06148062,Liasis_albertisii:0.04125302):0.03211257):0.003801559,((((Antaresia_stimsoni:0.01236615,Antaresia_childreni:0.01700049):0.04247379,Antaresia_perthensis:0.07 [...]
+tree 9 = ((((((((Antaresia_ramsayi:0.03265491,Antaresia_melanocephalus:0.04476174):0.04144292,((Liasis_fuscus:0.02188248,Liasis_mackloti:0.006821209):0.04403209,(Apodora_papuana:0.07247948,Liasis_olivaceus:0.02853124):0.01877607):0.02169384):0.00470111,((Morelia_carinata:0.07416548,(Morelia_viridisN:0.02739861,Morelia_viridisS:0.0555129):0.01684767):0.01573996,(Antaresia_maculosa:0.06232283,((Antaresia_stimsoni:0.01291429,Antaresia_childreni:0.02308267):0.02532498,Antaresia_perthensis:0. [...]
+tree 10 = (Candoia_aspera:0.3561303,(Loxocemus_bicolor:0.2416931,(((Python_reticulatus:0.07008454,Python_timoriensis:0.06291992):0.06767802,(((((Morelia_carinata:0.07114217,(Morelia_viridisN:0.03252794,Morelia_viridisS:0.04486335):0.02321627):0.005808629,(((Antaresia_childreni:0.02245576,Antaresia_stimsoni:0.01635012):0.03085948,Antaresia_perthensis:0.05037886):0.01504499,Antaresia_maculosa:0.06258223):0.003222958):0.0105925,((Antaresia_ramsayi:0.01932455,Antaresia_melanocephalus:0.03296 [...]
+tree 11 = (Candoia_aspera:0.4514987,((((Python_reticulatus:0.05002665,Python_timoriensis:0.07676918):0.07413768,((Liasis_albertisii:0.0645974,Bothrochilus_boa:0.0641873):0.03771287,((Morelia_boeleni:0.09023818,(((Morelia_carinata:0.05665086,(Morelia_viridisN:0.02441905,Morelia_viridisS:0.05618434):0.03360802):0.01153914,(Antaresia_maculosa:0.06928336,(Antaresia_perthensis:0.06768544,(Antaresia_childreni:0.0232448,Antaresia_stimsoni:0.01980798):0.0361576):0.01361446):0.007641934):0.013007 [...]
+tree 12 = (((Python_regius:0.1195811,(Python_curtus:0.101114,(Python_molurus:0.03377527,Python_sebae:0.08745063):0.01391006):0.01872767):0.0423752,((((((Liasis_olivaceus:0.04405596,Apodora_papuana:0.05899017):0.01893467,(Liasis_mackloti:0.01092328,Liasis_fuscus:0.01664283):0.0462348):0.0195773,(Antaresia_melanocephalus:0.03674829,Antaresia_ramsayi:0.02147645):0.04341939):0.005780893,((Morelia_carinata:0.07176092,(Morelia_viridisN:0.03358656,Morelia_viridisS:0.05472175):0.02865343):0.0160 [...]
+tree 13 = ((Loxocemus_bicolor:0.255402,(((Morelia_boeleni:0.07990584,((((((Antaresia_childreni:0.02146837,Antaresia_stimsoni:0.01322464):0.03931901,Antaresia_perthensis:0.06801511):0.01225223,Antaresia_maculosa:0.08151301):0.008640029,(Morelia_carinata:0.05972546,(Morelia_viridisN:0.04594044,Morelia_viridisS:0.05674085):0.0189179):0.007972871):0.01533096,((Morelia_bredli:0.02451975,Morelia_spilota:0.0358741):0.03381143,(Morelia_oenpelliensis:0.05865898,(Morelia_tracyae:0.03683725,((Morel [...]
+tree 14 = ((Candoia_aspera:0.4473626,Loxocemus_bicolor:0.2344362):0.02036165,((((Python_molurus:0.03949217,Python_sebae:0.08887615):0.0135172,Python_regius:0.1395213):0.01295109,Python_curtus:0.09972717):0.05815295,(((((Apodora_papuana:0.05956359,(Liasis_olivaceus:0.04120433,(Liasis_mackloti:0.01339387,Liasis_fuscus:0.01811019):0.05259457):0.01505299):0.02662488,(Antaresia_melanocephalus:0.03133705,Antaresia_ramsayi:0.0411973):0.05179539):0.01417946,((Antaresia_maculosa:0.05978383,(Antar [...]
+tree 15 = (((((Python_curtus:0.1133995,(Python_molurus:0.03883978,Python_sebae:0.08963129):0.02452025):0.02667311,Python_regius:0.09942004):0.04684523,((Python_timoriensis:0.05561288,Python_reticulatus:0.05469003):0.0510733,((Morelia_boeleni:0.08459562,((Morelia_oenpelliensis:0.07649581,(Morelia_tracyae:0.06857963,(Morelia_amethistina:0.03090716,((Morelia_kinghorni:0.01143323,Morelia_clastolepis:0.009882759):0.005533701,Morelia_nauta:0.005203006):0.01649407):0.006763233):0.03048232):0.00 [...]
+tree 16 = (Candoia_aspera:0.454411,((((Python_curtus:0.1047953,(Python_molurus:0.04792883,Python_sebae:0.06590967):0.01822905):0.02141573,Python_regius:0.1076625):0.04547916,((Python_timoriensis:0.0652374,Python_reticulatus:0.05757644):0.06733267,(((Bothrochilus_boa:0.06078097,Liasis_albertisii:0.05895738):0.03564124,((Morelia_spilota:0.03153276,Morelia_bredli:0.03022422):0.03004519,(Morelia_oenpelliensis:0.05195458,(Morelia_tracyae:0.04952684,(Morelia_amethistina:0.0167668,(Morelia_clas [...]
+tree 17 = ((Loxocemus_bicolor:0.2950447,Candoia_aspera:0.4956194):0.03409912,(((Python_reticulatus:0.06170708,Python_timoriensis:0.07679534):0.06662456,((((Liasis_fuscus:0.02247032,Liasis_mackloti:0.01165581):0.05333089,(Liasis_olivaceus:0.03201475,Apodora_papuana:0.07040275):0.01349114):0.01793611,(Antaresia_ramsayi:0.02707999,Antaresia_melanocephalus:0.04144063):0.07271472):0.0143494,((Morelia_boeleni:0.07941718,((Morelia_oenpelliensis:0.07000013,((Morelia_tracyae:0.04297898,(Morelia_c [...]
+tree 18 = (Candoia_aspera:0.4206473,((((((Bothrochilus_boa:0.05545258,Liasis_albertisii:0.05086388):0.02245116,(((((Morelia_tracyae:0.04497596,(Morelia_clastolepis:0.002300433,(Morelia_kinghorni:0.007580557,Morelia_nauta:0.01209723):0.007905989):0.009573244):0.003758006,Morelia_amethistina:0.02453728):0.04117076,Morelia_oenpelliensis:0.05460769):0.009776291,(Morelia_spilota:0.02406363,Morelia_bredli:0.02809877):0.02882019):0.03137162,Morelia_boeleni:0.08935267):0.00152077):0.01108653,((( [...]
+tree 19 = ((Loxocemus_bicolor:0.2758306,(((Morelia_boeleni:0.09712222,((((Apodora_papuana:0.06032311,(Liasis_olivaceus:0.03718835,(Liasis_fuscus:0.01776308,Liasis_mackloti:0.02078969):0.0453022):0.01128282):0.01605221,(Antaresia_ramsayi:0.02804757,Antaresia_melanocephalus:0.03257871):0.06131908):0.008631585,(Liasis_albertisii:0.06289567,Bothrochilus_boa:0.05847094):0.02659356):0.001774679,(((((Antaresia_childreni:0.0293294,Antaresia_stimsoni:0.01419086):0.03654786,Antaresia_perthensis:0. [...]
+tree 20 = (((Python_regius:0.09707175,(Python_curtus:0.105097,(Python_molurus:0.03923706,Python_sebae:0.06965362):0.02029509):0.02375806):0.04989682,((Python_reticulatus:0.06519806,Python_timoriensis:0.06124822):0.07887286,((((((Antaresia_stimsoni:0.01019489,Antaresia_childreni:0.02732365):0.03463165,Antaresia_perthensis:0.06931207):0.0109482,Antaresia_maculosa:0.07265653):0.007259249,((Morelia_viridisS:0.0544687,Morelia_viridisN:0.03351613):0.02469302,Morelia_carinata:0.05918813):0.0079 [...]
+tree 21 = (Candoia_aspera:0.5100953,(((((Python_sebae:0.07591037,Python_molurus:0.04685888):0.0159693,Python_curtus:0.1194869):0.03190542,Python_regius:0.1219057):0.03764082,((Python_reticulatus:0.06955304,Python_timoriensis:0.06829867):0.07106028,(((((Morelia_viridisN:0.04546841,Morelia_viridisS:0.03973028):0.03274359,Morelia_carinata:0.07279532):0.01029072,(Antaresia_maculosa:0.06514227,(Antaresia_perthensis:0.04889998,(Antaresia_childreni:0.02577757,Antaresia_stimsoni:0.01387249):0.03 [...]
+tree 22 = ((Loxocemus_bicolor:0.2604766,Candoia_aspera:0.477939):0.04974364,((Python_regius:0.1139038,(Python_curtus:0.07771172,(Python_molurus:0.0329663,Python_sebae:0.07713101):0.01611444):0.02797897):0.03766492,((Python_reticulatus:0.07004576,Python_timoriensis:0.0529521):0.07873452,(((Liasis_albertisii:0.06200187,Bothrochilus_boa:0.04445638):0.04020958,(Morelia_boeleni:0.09025494,((Antaresia_ramsayi:0.03416303,Antaresia_melanocephalus:0.04850508):0.05644415,(Apodora_papuana:0.0549481 [...]
+tree 23 = ((Loxocemus_bicolor:0.262835,Candoia_aspera:0.4233526):0.05244274,((Python_regius:0.1431143,(Python_curtus:0.1154717,(Python_molurus:0.03285879,Python_sebae:0.08925105):0.0224638):0.03287098):0.04580999,((((Morelia_spilota:0.03001221,Morelia_bredli:0.02447762):0.02814649,(((((Morelia_kinghorni:0.006512097,Morelia_nauta:0.005269942):0.004088566,Morelia_clastolepis:0.006666917):0.01101066,Morelia_tracyae:0.03173798):0.003501345,Morelia_amethistina:0.02439264):0.04355536,Morelia_o [...]
+tree 24 = (Candoia_aspera:0.4612549,(((Python_regius:0.1091584,(Python_curtus:0.09906034,(Python_molurus:0.04003386,Python_sebae:0.08731364):0.007798586):0.02269536):0.0371614,(((((((Antaresia_stimsoni:0.01576134,Antaresia_childreni:0.01874793):0.0397746,Antaresia_perthensis:0.06988391):0.0103778,Antaresia_maculosa:0.07115922):0.01047385,((Morelia_viridisN:0.03623554,Morelia_viridisS:0.05393487):0.02483759,Morelia_carinata:0.06020019):0.01557238):0.01448435,(((Morelia_spilota:0.03095601, [...]
+tree 25 = (Candoia_aspera:0.5061518,(((Python_regius:0.1146303,(Python_curtus:0.09799544,(Python_sebae:0.07231277,Python_molurus:0.03281137):0.02135392):0.02387459):0.0490128,((((((((Morelia_kinghorni:0.008090824,Morelia_nauta:0.00677865):0.004636801,Morelia_clastolepis:0.006176168):0.02159217,(Morelia_amethistina:0.02774497,Morelia_tracyae:0.06031651):0.005094471):0.0378395,Morelia_oenpelliensis:0.06740112):0.002512368,(Morelia_bredli:0.02727586,Morelia_spilota:0.02655475):0.03457449):0 [...]
+tree 26 = (Candoia_aspera:0.4117214,(((((((Morelia_spilota:0.02966092,Morelia_bredli:0.02248443):0.03277605,(((((Morelia_kinghorni:0.008210078,Morelia_nauta:0.007966455):0.004548794,Morelia_clastolepis:0.006720723):0.009598362,Morelia_amethistina:0.02268765):0.007693427,Morelia_tracyae:0.03961947):0.03682493,Morelia_oenpelliensis:0.05115676):0.008050052):0.03203021,((((Antaresia_stimsoni:0.02444624,Antaresia_childreni:0.0252333):0.0318095,Antaresia_perthensis:0.0763034):0.01333969,Antare [...]
+tree 27 = (((((Morelia_boeleni:0.08136462,(((((Liasis_mackloti:0.01469392,Liasis_fuscus:0.02004376):0.04217133,(Liasis_olivaceus:0.04287108,Apodora_papuana:0.04818141):0.01302773):0.02332967,(Antaresia_ramsayi:0.02924354,Antaresia_melanocephalus:0.03579885):0.03955978):0.01125768,(Liasis_albertisii:0.0485576,Bothrochilus_boa:0.05695902):0.02309585):0.001063857,(((Morelia_spilota:0.02560891,Morelia_bredli:0.03012455):0.02098186,((Morelia_tracyae:0.03688196,((Morelia_clastolepis:0.00313740 [...]
+tree 28 = (Candoia_aspera:0.4449872,(((((Python_molurus:0.03453011,Python_sebae:0.08373404):0.008449763,Python_curtus:0.08456065):0.03067617,Python_regius:0.1044486):0.03640394,(((((Antaresia_melanocephalus:0.03645403,Antaresia_ramsayi:0.0313611):0.05991626,((Liasis_olivaceus:0.03968883,Apodora_papuana:0.0610017):0.007050161,(Liasis_mackloti:0.006764323,Liasis_fuscus:0.02324538):0.05418435):0.01189356):0.003175782,(((Morelia_viridisN:0.03366704,Morelia_viridisS:0.04775397):0.01973658,Mor [...]
+tree 29 = ((((Python_regius:0.08981924,((Python_sebae:0.07555498,Python_molurus:0.03563937):0.02936048,Python_curtus:0.08984143):0.009248537):0.03159094,((Python_reticulatus:0.05470638,Python_timoriensis:0.0862496):0.06055097,(((Liasis_albertisii:0.04162999,Bothrochilus_boa:0.0473893):0.0379235,((Antaresia_ramsayi:0.02152948,Antaresia_melanocephalus:0.03427138):0.05178105,((Apodora_papuana:0.05024696,Liasis_olivaceus:0.04916695):0.01541814,(Liasis_fuscus:0.02006362,Liasis_mackloti:0.0109 [...]
+tree 30 = ((((Python_reticulatus:0.05656672,Python_timoriensis:0.0615847):0.06869705,(Morelia_boeleni:0.08434733,(((((Morelia_viridisN:0.02822289,Morelia_viridisS:0.04455322):0.0211784,Morelia_carinata:0.06338099):0.01717543,(Antaresia_maculosa:0.06683547,((Antaresia_childreni:0.01892756,Antaresia_stimsoni:0.01717541):0.03864759,Antaresia_perthensis:0.07570149):0.006255439):0.01023573):0.01303267,((Morelia_oenpelliensis:0.05786269,(Morelia_tracyae:0.03557059,(Morelia_amethistina:0.013780 [...]
+tree 31 = (((((Python_molurus:0.04063373,Python_sebae:0.07514814):0.01706889,Python_curtus:0.08816209):0.02794301,Python_regius:0.126422):0.04901962,((Python_timoriensis:0.07078427,Python_reticulatus:0.06491265):0.06423406,((Bothrochilus_boa:0.05380913,Liasis_albertisii:0.0547089):0.03656184,((((Morelia_spilota:0.02872934,Morelia_bredli:0.02312623):0.03137145,((Morelia_tracyae:0.03510673,((Morelia_clastolepis:0.006385572,(Morelia_nauta:0.007006281,Morelia_kinghorni:0.00643803):0.00337046 [...]
+tree 32 = ((Candoia_aspera:0.455204,Loxocemus_bicolor:0.2917998):0.04406212,(((Python_timoriensis:0.08183563,Python_reticulatus:0.04512444):0.06887124,((Morelia_boeleni:0.07321775,((Bothrochilus_boa:0.05326229,Liasis_albertisii:0.05245776):0.02197908,((Morelia_spilota:0.02674636,Morelia_bredli:0.03649268):0.0215502,(((Morelia_amethistina:0.03120252,((Morelia_kinghorni:0.005435696,Morelia_nauta:0.007088191):0.01118478,Morelia_clastolepis:0.01091913):0.01733689):0.01347604,Morelia_tracyae: [...]
+tree 33 = ((Loxocemus_bicolor:0.2844377,(((Python_curtus:0.1019715,(Python_molurus:0.0429412,Python_sebae:0.08466812):0.02263815):0.02075651,Python_regius:0.1237921):0.03677393,((((Morelia_bredli:0.03119378,Morelia_spilota:0.0233967):0.03652172,(Morelia_oenpelliensis:0.06481902,(Morelia_tracyae:0.04483205,(Morelia_amethistina:0.02436113,((Morelia_kinghorni:0.01747546,Morelia_clastolepis:0.00549594):0.01011681,Morelia_nauta:0.002591119):0.0210983):0.01280651):0.0297356):0.01493592):0.0316 [...]
+tree 34 = (Candoia_aspera:0.4195906,((((Python_timoriensis:0.04978334,Python_reticulatus:0.06443191):0.05665736,(((((Antaresia_perthensis:0.06844294,(Antaresia_stimsoni:0.01290088,Antaresia_childreni:0.02636597):0.03241306):0.01075944,Antaresia_maculosa:0.07323313):0.006654512,(Morelia_carinata:0.0807921,(Morelia_viridisN:0.03638272,Morelia_viridisS:0.04498738):0.02539871):0.00670575):0.02185193,((Antaresia_melanocephalus:0.03991443,Antaresia_ramsayi:0.03267628):0.04173048,((Apodora_papu [...]
+tree 35 = (Candoia_aspera:0.4486796,((((Python_timoriensis:0.06160357,Python_reticulatus:0.05899547):0.06210065,((((Morelia_carinata:0.07039268,(Morelia_viridisN:0.02717392,Morelia_viridisS:0.05285822):0.01629177):0.01622682,((Antaresia_perthensis:0.06306544,(Antaresia_stimsoni:0.01439114,Antaresia_childreni:0.02655157):0.0412621):0.01020385,Antaresia_maculosa:0.06175795):0.004266058):0.01947729,(((Morelia_spilota:0.02145622,Morelia_bredli:0.02229659):0.03046973,Morelia_oenpelliensis:0.0 [...]
+tree 36 = (Candoia_aspera:0.4677103,(((((Python_sebae:0.06512575,Python_molurus:0.03856891):0.02502744,Python_curtus:0.09008242):0.01898717,Python_regius:0.1106015):0.03137335,((Python_reticulatus:0.04563797,Python_timoriensis:0.09058164):0.08538711,((((Liasis_albertisii:0.06089568,Bothrochilus_boa:0.05524926):0.04386574,((Apodora_papuana:0.06280342,(Liasis_olivaceus:0.04780041,(Liasis_mackloti:0.008650393,Liasis_fuscus:0.02488376):0.04392434):0.008986177):0.02061411,(Antaresia_ramsayi:0 [...]
+tree 37 = (Candoia_aspera:0.4769489,((((Python_curtus:0.09808627,(Python_sebae:0.08711612,Python_molurus:0.03361382):0.01601466):0.0235876,Python_regius:0.1375817):0.04256167,((((Morelia_boeleni:0.07606632,((Morelia_spilota:0.03282064,Morelia_bredli:0.02378048):0.02364717,((Morelia_tracyae:0.02877539,((Morelia_nauta:0.00251251,(Morelia_kinghorni:0.008524486,Morelia_clastolepis:0.004699519):0.006302931):0.01200433,Morelia_amethistina:0.02906535):0.004316516):0.02884597,Morelia_oenpelliens [...]
+tree 38 = (Candoia_aspera:0.3773662,(Loxocemus_bicolor:0.2249788,((((Python_sebae:0.06651892,Python_molurus:0.03498734):0.02464912,Python_curtus:0.07673166):0.01052472,Python_regius:0.1129523):0.04464117,((Python_reticulatus:0.05448691,Python_timoriensis:0.06477214):0.07192476,((((Antaresia_ramsayi:0.03292673,Antaresia_melanocephalus:0.04036128):0.04562737,(Apodora_papuana:0.06416247,((Liasis_mackloti:0.01160595,Liasis_fuscus:0.02231178):0.04907739,Liasis_olivaceus:0.03522126):0.00946260 [...]
+tree 39 = (((((Python_timoriensis:0.08188618,Python_reticulatus:0.06378417):0.08001537,(((((Morelia_spilota:0.02781278,Morelia_bredli:0.03197371):0.03154039,((Morelia_tracyae:0.02607517,((Morelia_clastolepis:0.002039059,(Morelia_nauta:0.008565151,Morelia_kinghorni:0.011925):0.001514738):0.02744645,Morelia_amethistina:0.01836739):0.02055508):0.03028903,Morelia_oenpelliensis:0.04405405):0.008923189):0.0305229,(Bothrochilus_boa:0.06679369,Liasis_albertisii:0.06716246):0.02623561):0.00413182 [...]
+tree 40 = (Candoia_aspera:0.4208013,(Loxocemus_bicolor:0.2355042,(((Python_curtus:0.1002398,(Python_molurus:0.04971005,Python_sebae:0.07534479):0.01490631):0.02483788,Python_regius:0.13122):0.04845283,(((((((Antaresia_childreni:0.02476396,Antaresia_stimsoni:0.01173612):0.03504246,Antaresia_perthensis:0.06446316):0.007543391,Antaresia_maculosa:0.05640942):0.007865894,(Morelia_carinata:0.04620256,(Morelia_viridisN:0.03105739,Morelia_viridisS:0.06619639):0.02895136):0.01983984):0.01412999,( [...]
+tree 41 = ((((Python_timoriensis:0.06996509,Python_reticulatus:0.06574926):0.06676801,((((((Antaresia_stimsoni:0.02009407,Antaresia_childreni:0.02232226):0.04431973,Antaresia_perthensis:0.06572423):0.01161341,Antaresia_maculosa:0.07414321):0.006251875,(Morelia_carinata:0.05239441,(Morelia_viridisN:0.03753331,Morelia_viridisS:0.06370906):0.02254906):0.01009813):0.02270443,((Bothrochilus_boa:0.0655155,Liasis_albertisii:0.04513337):0.0402801,((Antaresia_melanocephalus:0.0488194,Antaresia_ra [...]
+tree 42 = ((Loxocemus_bicolor:0.2558653,(((Python_reticulatus:0.06041035,Python_timoriensis:0.0495409):0.05712625,(((((Morelia_oenpelliensis:0.06264648,(Morelia_tracyae:0.03295818,((Morelia_nauta:0.004162228,(Morelia_kinghorni:0.007212826,Morelia_clastolepis:0.005783804):0.004869042):0.007822257,Morelia_amethistina:0.02845611):0.008115491):0.02341889):0.004693379,(Morelia_bredli:0.0245021,Morelia_spilota:0.03264814):0.03222283):0.03051242,Morelia_boeleni:0.08301486):0.002222387,(((Antare [...]
+tree 43 = (Candoia_aspera:0.4912831,(Loxocemus_bicolor:0.3305818,(((Python_timoriensis:0.08964648,Python_reticulatus:0.05531334):0.06829904,((((Antaresia_ramsayi:0.02711878,Antaresia_melanocephalus:0.04111096):0.04432045,((Liasis_olivaceus:0.03420107,Apodora_papuana:0.06162698):0.01501303,(Liasis_fuscus:0.01828714,Liasis_mackloti:0.01248727):0.04523117):0.02292171):0.01276623,(Liasis_albertisii:0.06893302,Bothrochilus_boa:0.06296997):0.0452054):0.006619568,((((Antaresia_perthensis:0.0694 [...]
+tree 44 = ((Candoia_aspera:0.4282801,Loxocemus_bicolor:0.2557565):0.07118735,((((((Morelia_spilota:0.028086,Morelia_bredli:0.03424453):0.04515606,(Morelia_oenpelliensis:0.04825777,(Morelia_tracyae:0.04446028,(Morelia_amethistina:0.02748901,((Morelia_kinghorni:0.007643079,Morelia_clastolepis:0.008964228):0.003093159,Morelia_nauta:0.009113297):0.0223688):0.01055831):0.02851381):0.01327409):0.02105642,Morelia_boeleni:0.1081844):0.0002377406,(((Antaresia_maculosa:0.06232342,((Antaresia_stims [...]
+tree 45 = ((Loxocemus_bicolor:0.2120497,Candoia_aspera:0.4029712):0.01332453,(((Python_reticulatus:0.05061441,Python_timoriensis:0.06134717):0.05990154,(((((Morelia_spilota:0.02971883,Morelia_bredli:0.02865454):0.02874998,(((Morelia_amethistina:0.02414642,((Morelia_kinghorni:0.005704897,Morelia_nauta:0.002618733):0.008513417,Morelia_clastolepis:0.008472477):0.01483608):0.01170621,Morelia_tracyae:0.03162107):0.02879083,Morelia_oenpelliensis:0.05522263):0.001620195):0.01422465,Morelia_boel [...]
+tree 46 = (((((((((Morelia_spilota:0.02655895,Morelia_bredli:0.03511595):0.03519584,(((Morelia_nauta:0.001228679,(Morelia_clastolepis:0.007831828,Morelia_kinghorni:0.004732577):0.009794926):0.0277251,(Morelia_tracyae:0.05416658,Morelia_amethistina:0.02597948):0.00171186):0.04197238,Morelia_oenpelliensis:0.06708394):0.004278351):0.02486795,Morelia_boeleni:0.09303988):0.006333662,(((Apodora_papuana:0.06684645,((Liasis_mackloti:0.01200195,Liasis_fuscus:0.02916414):0.03911876,Liasis_olivaceu [...]
+tree 47 = ((Loxocemus_bicolor:0.2318016,Candoia_aspera:0.4164175):0.03116619,(((Python_timoriensis:0.08144411,Python_reticulatus:0.04430293):0.05610403,((((((Morelia_viridisN:0.03542506,Morelia_viridisS:0.0572556):0.01990815,Morelia_carinata:0.06651258):0.01302045,((Antaresia_perthensis:0.06573364,(Antaresia_stimsoni:0.01771713,Antaresia_childreni:0.0257728):0.04164447):0.02274534,Antaresia_maculosa:0.05726758):0.007223601):0.01653506,((Morelia_bredli:0.03162414,Morelia_spilota:0.0244059 [...]
+tree 48 = ((Loxocemus_bicolor:0.2613632,Candoia_aspera:0.4005454):0.03571461,(((Python_timoriensis:0.06195748,Python_reticulatus:0.0623511):0.05797901,((((Morelia_bredli:0.02536408,Morelia_spilota:0.02897979):0.02486894,(Morelia_oenpelliensis:0.05232564,(Morelia_tracyae:0.0343304,(Morelia_amethistina:0.02615847,((Morelia_kinghorni:0.01046447,Morelia_nauta:0.005524241):0.004390938,Morelia_clastolepis:0.001738627):0.01218575):0.005278279):0.03427753):0.002326722):0.01558027,Morelia_boeleni [...]
+tree 49 = (Candoia_aspera:0.4611198,(((((Python_molurus:0.03619845,Python_sebae:0.0821524):0.01438549,Python_curtus:0.1332483):0.02854173,Python_regius:0.1243103):0.02839463,((Python_reticulatus:0.06396125,Python_timoriensis:0.07307362):0.07136561,(((((Morelia_oenpelliensis:0.06325029,(Morelia_tracyae:0.06238511,((Morelia_nauta:0.004572369,(Morelia_kinghorni:0.005569767,Morelia_clastolepis:0.005787334):0.006707192):0.0199103,Morelia_amethistina:0.02755908):0.004445603):0.04539337):0.0024 [...]
+tree 50 = ((Loxocemus_bicolor:0.2767771,((Python_regius:0.1271252,(Python_curtus:0.09296392,(Python_sebae:0.09383303,Python_molurus:0.03726204):0.01252763):0.01701399):0.03783387,((((((Liasis_fuscus:0.01831367,Liasis_mackloti:0.01081487):0.04196987,(Liasis_olivaceus:0.03974469,Apodora_papuana:0.05023767):0.005300062):0.01275833,(Antaresia_ramsayi:0.02844938,Antaresia_melanocephalus:0.03307184):0.046393):0.004489448,(Liasis_albertisii:0.05207137,Bothrochilus_boa:0.05848669):0.03651395):0. [...]
+tree 51 = ((Loxocemus_bicolor:0.1953644,Candoia_aspera:0.4090651):0.07135976,((Python_regius:0.08788631,(Python_curtus:0.09299281,(Python_sebae:0.08835472,Python_molurus:0.04266492):0.01617591):0.02337683):0.03766133,(((((((Morelia_tracyae:0.02987496,((Morelia_nauta:0.005035972,(Morelia_clastolepis:0.009349248,Morelia_kinghorni:0.011742):0.006892148):0.007373184,Morelia_amethistina:0.02331562):0.01523232):0.03311424,Morelia_oenpelliensis:0.06063279):0.01006941,(Morelia_bredli:0.03232284, [...]
+tree 52 = (Candoia_aspera:0.4675925,(Loxocemus_bicolor:0.3794991,((((((((Liasis_fuscus:0.01909646,Liasis_mackloti:0.0121694):0.04627307,Liasis_olivaceus:0.03963934):0.01307675,Apodora_papuana:0.06285513):0.009469208,(Antaresia_ramsayi:0.03031447,Antaresia_melanocephalus:0.04619856):0.05525247):0.01141626,((Morelia_boeleni:0.07503417,((Morelia_bredli:0.02530584,Morelia_spilota:0.02564753):0.0288385,(((Morelia_amethistina:0.03204628,((Morelia_kinghorni:0.008460891,Morelia_nauta:0.0062635): [...]
+tree 53 = ((Loxocemus_bicolor:0.2333575,(((((Antaresia_maculosa:0.06466482,(Antaresia_perthensis:0.06424014,(Antaresia_stimsoni:0.02296361,Antaresia_childreni:0.01801302):0.03375228):0.01303995):0.007496688,(Morelia_carinata:0.06781954,(Morelia_viridisN:0.03859534,Morelia_viridisS:0.05089484):0.02204771):0.01143718):0.02112735,(((Apodora_papuana:0.06130652,(Liasis_olivaceus:0.0411087,(Liasis_fuscus:0.01402491,Liasis_mackloti:0.01771778):0.05455264):0.006364489):0.02208133,(Antaresia_rams [...]
+tree 54 = ((Loxocemus_bicolor:0.3201657,(((Python_timoriensis:0.0581893,Python_reticulatus:0.05907456):0.06282391,(((Bothrochilus_boa:0.06679214,Liasis_albertisii:0.06431971):0.03525423,((Morelia_spilota:0.02382529,Morelia_bredli:0.03023749):0.04178667,((((Morelia_clastolepis:0.005759701,(Morelia_nauta:0.009415851,Morelia_kinghorni:0.005412919):0.01556562):0.01821965,Morelia_tracyae:0.05033836):0.013102,Morelia_amethistina:0.01994143):0.03911183,Morelia_oenpelliensis:0.06652114):0.004536 [...]
+tree 55 = (Candoia_aspera:0.4363239,(Loxocemus_bicolor:0.274237,(((Python_reticulatus:0.04577835,Python_timoriensis:0.1033163):0.06046119,((((Antaresia_maculosa:0.07537991,(Antaresia_perthensis:0.06182352,(Antaresia_childreni:0.02228823,Antaresia_stimsoni:0.02032132):0.04573505):0.009092116):0.008593031,((Morelia_viridisN:0.02746149,Morelia_viridisS:0.05577087):0.02431754,Morelia_carinata:0.05783515):0.01411612):0.01847424,(Morelia_boeleni:0.08157485,((Apodora_papuana:0.06043711,((Liasis [...]
+tree 56 = ((Loxocemus_bicolor:0.2326946,(((Python_reticulatus:0.04186498,Python_timoriensis:0.08553525):0.0631943,((((Morelia_oenpelliensis:0.06840764,((Morelia_amethistina:0.02496099,(((Morelia_kinghorni:0.008738223,Morelia_clastolepis:0.006100998):0.009642316,Morelia_nauta:0.005706378):0.02156259,Morelia_tracyae:0.05879199):0.002015818):0.03385588,(Morelia_bredli:0.02514599,Morelia_spilota:0.02152035):0.04255435):0.003652759):0.0287093,Morelia_boeleni:0.08673253):0.004519537,(((Antares [...]
+tree 57 = ((((((((Antaresia_maculosa:0.06753452,((Antaresia_stimsoni:0.0174042,Antaresia_childreni:0.02406376):0.03572501,Antaresia_perthensis:0.0853869):0.009132408):0.007193958,((Morelia_viridisS:0.04378142,Morelia_viridisN:0.0372949):0.02842223,Morelia_carinata:0.06825358):0.01252652):0.02117735,((((Morelia_amethistina:0.02015902,Morelia_tracyae:0.05018288):0.006975072,(Morelia_clastolepis:0.008999008,(Morelia_nauta:0.01056224,Morelia_kinghorni:0.006781781):0.004545883):0.0169317):0.0 [...]
+tree 58 = (Candoia_aspera:0.4610956,(((Python_regius:0.1232013,(Python_curtus:0.1051752,(Python_sebae:0.07803735,Python_molurus:0.04395386):0.02047164):0.02053317):0.02733087,((Python_timoriensis:0.0786524,Python_reticulatus:0.05985872):0.05331236,((Morelia_boeleni:0.08349698,(((Bothrochilus_boa:0.0507127,Liasis_albertisii:0.05617265):0.03450997,((Antaresia_melanocephalus:0.03825353,Antaresia_ramsayi:0.02731536):0.05230166,(Apodora_papuana:0.05528345,((Liasis_mackloti:0.01561896,Liasis_f [...]
+tree 59 = ((Loxocemus_bicolor:0.2573866,(((Python_curtus:0.09793072,(Python_sebae:0.08371425,Python_molurus:0.04299976):0.01409514):0.02153569,Python_regius:0.09829285):0.05479509,((Python_timoriensis:0.07710499,Python_reticulatus:0.05454431):0.07863418,(((Morelia_boeleni:0.08457115,((Morelia_spilota:0.03016478,Morelia_bredli:0.02802035):0.03625794,(Morelia_oenpelliensis:0.05700879,((((Morelia_nauta:0.01178942,Morelia_kinghorni:0.008054588):0.002399088,Morelia_clastolepis:0.001528491):0. [...]
+tree 60 = (((((Python_timoriensis:0.07926416,Python_reticulatus:0.07137747):0.07251304,((((Morelia_carinata:0.06855047,(Morelia_viridisN:0.04314394,Morelia_viridisS:0.04266962):0.01684549):0.01320995,(Antaresia_maculosa:0.09354209,((Antaresia_stimsoni:0.01078989,Antaresia_childreni:0.02589992):0.04603335,Antaresia_perthensis:0.05790963):0.006737519):0.009808671):0.02509982,(((Antaresia_melanocephalus:0.04321766,Antaresia_ramsayi:0.03739094):0.05413265,(Bothrochilus_boa:0.06058307,Liasis_ [...]
+tree 61 = (Candoia_aspera:0.4378292,(Loxocemus_bicolor:0.2662484,(((Python_timoriensis:0.06384791,Python_reticulatus:0.07183585):0.07502361,((((Morelia_carinata:0.06462666,(Morelia_viridisN:0.03903769,Morelia_viridisS:0.0577421):0.02035461):0.01936315,(Antaresia_maculosa:0.05436798,((Antaresia_stimsoni:0.01573073,Antaresia_childreni:0.0211433):0.03806001,Antaresia_perthensis:0.0699794):0.004109115):0.005422296):0.01634019,((Antaresia_melanocephalus:0.03891313,Antaresia_ramsayi:0.02936461 [...]
+tree 62 = ((((((((((Antaresia_childreni:0.01558902,Antaresia_stimsoni:0.01595246):0.04430847,Antaresia_perthensis:0.07562575):0.007701339,Antaresia_maculosa:0.08918545):0.00634405,((Morelia_viridisN:0.03328426,Morelia_viridisS:0.05089523):0.01773241,Morelia_carinata:0.06823346):0.009729754):0.0283175,((Liasis_albertisii:0.03628829,Bothrochilus_boa:0.06273807):0.05446115,((Antaresia_ramsayi:0.02195563,Antaresia_melanocephalus:0.04103237):0.04937809,(Apodora_papuana:0.06748454,((Liasis_fus [...]
+tree 63 = (Candoia_aspera:0.3903282,(Loxocemus_bicolor:0.248947,(((Python_timoriensis:0.05996622,Python_reticulatus:0.04079137):0.05661379,((Bothrochilus_boa:0.06544201,Liasis_albertisii:0.05598568):0.03264463,((((Antaresia_melanocephalus:0.04274463,Antaresia_ramsayi:0.0229638):0.05227536,(Apodora_papuana:0.05124347,(Liasis_olivaceus:0.032322,(Liasis_mackloti:0.01322112,Liasis_fuscus:0.02499189):0.04084609):0.01081107):0.01437584):0.006629841,(((Morelia_viridisN:0.04032599,Morelia_viridi [...]
+tree 64 = ((((Python_regius:0.1027857,(Python_curtus:0.09600793,(Python_molurus:0.0279114,Python_sebae:0.06603829):0.02608479):0.02383835):0.05037635,((((Apodora_papuana:0.07130196,((Liasis_mackloti:0.01891925,Liasis_fuscus:0.01624055):0.04795536,Liasis_olivaceus:0.04357267):0.0212919):0.01519329,((Liasis_albertisii:0.05178711,Bothrochilus_boa:0.06173864):0.02414452,(Antaresia_ramsayi:0.04078327,Antaresia_melanocephalus:0.03471582):0.04901001):0.002867909):0.003315471,(Morelia_boeleni:0. [...]
+tree 65 = (Candoia_aspera:0.4484958,(Loxocemus_bicolor:0.2674019,((((Python_molurus:0.03746116,Python_sebae:0.07876257):0.02255983,Python_curtus:0.09736194):0.009465829,Python_regius:0.1122148):0.04183403,((Python_reticulatus:0.05254427,Python_timoriensis:0.06316221):0.04804532,((((Morelia_oenpelliensis:0.06223055,(Morelia_tracyae:0.03993813,(Morelia_amethistina:0.02235027,((Morelia_clastolepis:0.006880411,Morelia_kinghorni:0.005690263):0.0017908,Morelia_nauta:0.005001931):0.0110807):0.0 [...]
+tree 66 = (Candoia_aspera:0.4387456,(Loxocemus_bicolor:0.3701941,((((Python_molurus:0.02845826,Python_sebae:0.08506522):0.01248504,Python_curtus:0.1079607):0.02967151,Python_regius:0.09794066):0.04692648,((((((Morelia_oenpelliensis:0.07125908,((((Morelia_kinghorni:0.01128121,Morelia_nauta:0.009803465):0.006153012,Morelia_clastolepis:0.008016785):0.004922659,Morelia_amethistina:0.02570226):0.01632472,Morelia_tracyae:0.03681089):0.02162234):0.004091767,(Morelia_spilota:0.0288998,Morelia_br [...]
+tree 67 = ((Candoia_aspera:0.4404567,Loxocemus_bicolor:0.2623151):0.03678611,((Python_regius:0.1300392,(Python_curtus:0.0849365,(Python_sebae:0.06865021,Python_molurus:0.04067085):0.01983921):0.02301141):0.0359684,((Python_reticulatus:0.05548591,Python_timoriensis:0.07583138):0.06659153,(((Morelia_carinata:0.05407921,(Morelia_viridisN:0.03238534,Morelia_viridisS:0.03823864):0.01875875):0.01477069,(((Antaresia_childreni:0.01916999,Antaresia_stimsoni:0.02019813):0.0427897,Antaresia_perthen [...]
+tree 68 = ((Loxocemus_bicolor:0.275201,(((Python_timoriensis:0.07933547,Python_reticulatus:0.05530097):0.08859932,((Morelia_boeleni:0.0768628,((Morelia_bredli:0.02613436,Morelia_spilota:0.02952909):0.03868299,(Morelia_oenpelliensis:0.05920573,(Morelia_amethistina:0.0122179,((Morelia_nauta:0.005477438,(Morelia_clastolepis:0.01002306,Morelia_kinghorni:0.002926079):0.005503214):0.02905945,Morelia_tracyae:0.02562446):0.01299805):0.05995565):0.002851122):0.02890678):0.01651045,(((((Antaresia_ [...]
+tree 69 = (Candoia_aspera:0.5069986,((((((((Antaresia_maculosa:0.08576571,((Antaresia_childreni:0.03038096,Antaresia_stimsoni:0.01692226):0.04191162,Antaresia_perthensis:0.06766591):0.01159188):0.00361208,((Morelia_viridisN:0.03718157,Morelia_viridisS:0.05590611):0.01486255,Morelia_carinata:0.08684218):0.01085831):0.01500122,(((Antaresia_ramsayi:0.02557342,Antaresia_melanocephalus:0.04322508):0.05597149,(Apodora_papuana:0.06308859,(Liasis_olivaceus:0.03261355,(Liasis_fuscus:0.0270116,Lia [...]
+tree 70 = (Candoia_aspera:0.4122645,(((Python_regius:0.110234,((Python_molurus:0.03231937,Python_sebae:0.07999153):0.006036296,Python_curtus:0.09954285):0.02136783):0.03842027,(((((Liasis_albertisii:0.04708427,Bothrochilus_boa:0.0570808):0.04067184,((Antaresia_ramsayi:0.02902459,Antaresia_melanocephalus:0.03726802):0.04574105,(Apodora_papuana:0.07554033,(Liasis_olivaceus:0.03919017,(Liasis_mackloti:0.01169475,Liasis_fuscus:0.01905053):0.03694819):0.01354151):0.01916572):0.001572765):0.00 [...]
+tree 71 = (Candoia_aspera:0.4589375,(Loxocemus_bicolor:0.2861174,(((((Antaresia_maculosa:0.07010981,((Antaresia_childreni:0.01826358,Antaresia_stimsoni:0.02200477):0.03110523,Antaresia_perthensis:0.06846857):0.01000272):0.004299263,((Morelia_viridisN:0.03728736,Morelia_viridisS:0.03357402):0.02663172,Morelia_carinata:0.06458395):0.009897064):0.01429599,((Morelia_boeleni:0.07654923,(((Morelia_amethistina:0.02884541,(Morelia_tracyae:0.05910984,((Morelia_clastolepis:0.007296296,Morelia_king [...]
+tree 72 = (Candoia_aspera:0.4576079,(((((((((Morelia_amethistina:0.01521725,(Morelia_tracyae:0.05461905,(Morelia_clastolepis:0.003219944,(Morelia_nauta:0.006485679,Morelia_kinghorni:0.007425166):0.001335271):0.03162993):0.009930456):0.04123097,Morelia_oenpelliensis:0.07386276):0.008972008,(Morelia_spilota:0.023071,Morelia_bredli:0.03727896):0.03126727):0.03469699,((Antaresia_maculosa:0.06361313,(Antaresia_perthensis:0.07265412,(Antaresia_stimsoni:0.0111831,Antaresia_childreni:0.02761491) [...]
+tree 73 = ((Loxocemus_bicolor:0.3119393,((((Python_molurus:0.04448889,Python_sebae:0.06188512):0.0142073,Python_curtus:0.1025871):0.03482844,Python_regius:0.1297599):0.02807402,((Python_reticulatus:0.0707405,Python_timoriensis:0.08769258):0.0480519,((((((Morelia_clastolepis:0.01089226,(Morelia_kinghorni:0.01052386,Morelia_nauta:0.009789005):0.002879246):0.0157598,(Morelia_tracyae:0.02688318,Morelia_amethistina:0.02338759):0.008894687):0.02362956,Morelia_oenpelliensis:0.0695819):0.0063823 [...]
+tree 74 = ((Loxocemus_bicolor:0.2083269,Candoia_aspera:0.5152461):0.04062122,((((((Morelia_bredli:0.02280977,Morelia_spilota:0.02376029):0.03352429,(Morelia_oenpelliensis:0.05724071,((Morelia_tracyae:0.03030344,(Morelia_clastolepis:0.003406181,(Morelia_kinghorni:0.003670556,Morelia_nauta:0.01034161):0.0009901308):0.02356278):0.002815858,Morelia_amethistina:0.0307196):0.04274883):0.00406888):0.01093207,Morelia_boeleni:0.06876127):0.008428241,((Liasis_albertisii:0.03787623,Bothrochilus_boa [...]
+tree 75 = ((Loxocemus_bicolor:0.3300245,((((((Morelia_spilota:0.01654569,Morelia_bredli:0.03397926):0.03641578,(Morelia_oenpelliensis:0.0720801,(Morelia_tracyae:0.05217545,(((Morelia_clastolepis:0.005005123,Morelia_kinghorni:0.01336545):0.002474397,Morelia_nauta:0.005135748):0.008917268,Morelia_amethistina:0.02211238):0.0103868):0.03809202):0.005851688):0.01685244,(((Morelia_viridisN:0.03925044,Morelia_viridisS:0.03750531):0.01639746,Morelia_carinata:0.07054688):0.008838584,(((Antaresia_ [...]
+tree 76 = (((((((((Antaresia_melanocephalus:0.02772176,Antaresia_ramsayi:0.04226773):0.0504747,(Apodora_papuana:0.07093863,((Liasis_mackloti:0.01709142,Liasis_fuscus:0.02330178):0.04618391,Liasis_olivaceus:0.0352755):0.01342014):0.02167455):0.0100165,(Bothrochilus_boa:0.07219773,Liasis_albertisii:0.04785805):0.04234023):0.004860061,((((Morelia_tracyae:0.04624559,(Morelia_amethistina:0.02472913,((Morelia_kinghorni:0.01642213,Morelia_clastolepis:0.009078821):0.003514855,Morelia_nauta:0.001 [...]
+tree 77 = ((Candoia_aspera:0.3502765,Loxocemus_bicolor:0.2724483):0.02964649,((((Python_molurus:0.04580597,Python_sebae:0.0612617):0.01270166,Python_curtus:0.07769046):0.02310792,Python_regius:0.09778553):0.0505763,((Python_reticulatus:0.0510522,Python_timoriensis:0.07707769):0.05952654,((((((Apodora_papuana:0.04280117,Liasis_olivaceus:0.03264188):0.01300365,(Liasis_mackloti:0.008264549,Liasis_fuscus:0.01835877):0.04882774):0.01341318,(Antaresia_ramsayi:0.03582116,Antaresia_melanocephalu [...]
+tree 78 = (Candoia_aspera:0.4818862,(((Python_regius:0.1173105,(Python_curtus:0.09787109,(Python_molurus:0.03613981,Python_sebae:0.08034014):0.02579503):0.01255788):0.03521315,((Python_timoriensis:0.06462919,Python_reticulatus:0.04927181):0.0781527,(((((Morelia_bredli:0.03127313,Morelia_spilota:0.03540732):0.03493716,(Morelia_oenpelliensis:0.05428268,((Morelia_tracyae:0.03980916,(Morelia_nauta:0.002858831,(Morelia_kinghorni:0.01143028,Morelia_clastolepis:0.01019444):0.009460836):0.020065 [...]
+tree 79 = ((((Python_timoriensis:0.07252546,Python_reticulatus:0.06270089):0.06874345,((((Apodora_papuana:0.08250042,(Liasis_olivaceus:0.04256184,(Liasis_mackloti:0.01543068,Liasis_fuscus:0.01570251):0.05041162):0.01150162):0.01891647,((Antaresia_melanocephalus:0.03158756,Antaresia_ramsayi:0.03251311):0.05107385,(Bothrochilus_boa:0.06030661,Liasis_albertisii:0.04458544):0.03289097):0.003724523):0.007746936,(Morelia_boeleni:0.07578827,((Morelia_spilota:0.02666204,Morelia_bredli:0.02909298 [...]
+tree 80 = (((((Python_timoriensis:0.07942641,Python_reticulatus:0.05318356):0.0542813,((((((Liasis_mackloti:0.01510409,Liasis_fuscus:0.01776043):0.05075845,(Liasis_olivaceus:0.03951128,Apodora_papuana:0.07083407):0.01755491):0.01728235,(Antaresia_melanocephalus:0.03002391,Antaresia_ramsayi:0.02149549):0.04485914):0.01052883,(Bothrochilus_boa:0.05519856,Liasis_albertisii:0.04763622):0.02797339):0.004472592,Morelia_boeleni:0.08374513):0.005074553,((((Antaresia_perthensis:0.07022127,(Antare [...]
+tree 81 = (Candoia_aspera:0.4667571,(Loxocemus_bicolor:0.3367887,((Python_regius:0.1180484,((Python_molurus:0.03959197,Python_sebae:0.1142895):0.01503196,Python_curtus:0.09833922):0.03611823):0.04569427,(((Morelia_boeleni:0.09325398,(((Antaresia_ramsayi:0.02456999,Antaresia_melanocephalus:0.03700992):0.05498125,(Liasis_albertisii:0.06510315,Bothrochilus_boa:0.06030748):0.02629961):0.007964493,((Liasis_olivaceus:0.03164813,(Liasis_mackloti:0.007046992,Liasis_fuscus:0.01983901):0.05932195) [...]
+tree 82 = ((((((Python_sebae:0.0645436,Python_molurus:0.03466129):0.01099752,Python_curtus:0.1213218):0.01569256,Python_regius:0.1288854):0.02512009,((((Liasis_albertisii:0.04768349,Bothrochilus_boa:0.06732252):0.02577025,((Apodora_papuana:0.0573615,(Liasis_olivaceus:0.03143039,(Liasis_fuscus:0.01956832,Liasis_mackloti:0.01151569):0.04049188):0.01153908):0.02375811,(Antaresia_ramsayi:0.02867471,Antaresia_melanocephalus:0.03825474):0.0532607):0.007452147):0.005339881,((Morelia_boeleni:0.0 [...]
+tree 83 = (((((Python_timoriensis:0.06385903,Python_reticulatus:0.05887241):0.06039356,(((((Antaresia_ramsayi:0.03291234,Antaresia_melanocephalus:0.04406639):0.0551582,(Apodora_papuana:0.06219194,(Liasis_olivaceus:0.04618287,(Liasis_fuscus:0.01928215,Liasis_mackloti:0.01361912):0.04523828):0.01769705):0.01373734):0.005296261,(Liasis_albertisii:0.04618613,Bothrochilus_boa:0.07393764):0.02720086):0.007912564,((Morelia_carinata:0.07341501,(Morelia_viridisN:0.03939504,Morelia_viridisS:0.0484 [...]
+tree 84 = ((Candoia_aspera:0.4302856,Loxocemus_bicolor:0.2106506):0.03081074,(((Python_reticulatus:0.039706,Python_timoriensis:0.06318284):0.06737832,((((Antaresia_ramsayi:0.02993718,Antaresia_melanocephalus:0.0516321):0.03462159,((Liasis_fuscus:0.01909033,Liasis_mackloti:0.01920083):0.03884619,(Apodora_papuana:0.07970396,Liasis_olivaceus:0.0318336):0.0159565):0.02141131):0.01086279,(((Antaresia_perthensis:0.05877012,(Antaresia_childreni:0.02373958,Antaresia_stimsoni:0.01345116):0.031549 [...]
+tree 85 = ((Loxocemus_bicolor:0.2534505,(((((Morelia_carinata:0.07389122,(Morelia_viridisN:0.03609381,Morelia_viridisS:0.05675261):0.01821106):0.01076806,(((Antaresia_stimsoni:0.01256993,Antaresia_childreni:0.02450923):0.02974669,Antaresia_perthensis:0.05764076):0.02295225,Antaresia_maculosa:0.07677902):0.008637588):0.01786285,((Morelia_boeleni:0.07562694,(((Bothrochilus_boa:0.06376279,Liasis_albertisii:0.04877791):0.03468829,(Antaresia_melanocephalus:0.04365131,Antaresia_ramsayi:0.02560 [...]
+tree 86 = (Candoia_aspera:0.4432525,(((Python_regius:0.1388916,((Python_sebae:0.08554284,Python_molurus:0.05720035):0.009684637,Python_curtus:0.08433645):0.01174155):0.02783744,((((((Morelia_bredli:0.02386123,Morelia_spilota:0.02840604):0.03818062,(Morelia_oenpelliensis:0.05193934,(Morelia_amethistina:0.01753699,(((Morelia_nauta:0.006926759,Morelia_kinghorni:0.005577224):0.001173386,Morelia_clastolepis:0.004074834):0.02082514,Morelia_tracyae:0.05079507):0.004276661):0.04497917):0.0089538 [...]
+tree 87 = ((((((Python_molurus:0.03328051,Python_sebae:0.08260708):0.02277176,Python_curtus:0.09369442):0.01362622,Python_regius:0.1144538):0.04157068,((Python_reticulatus:0.0659659,Python_timoriensis:0.05748917):0.05766686,((((Morelia_spilota:0.02944967,Morelia_bredli:0.02243377):0.03036736,(Morelia_oenpelliensis:0.05862991,((Morelia_amethistina:0.02044603,(Morelia_clastolepis:0.00667788,(Morelia_kinghorni:0.002341999,Morelia_nauta:0.004724123):0.001102001):0.01914452):0.006570517,Morel [...]
+tree 88 = ((Candoia_aspera:0.4541513,Loxocemus_bicolor:0.2958392):0.07081872,(((Python_timoriensis:0.05908032,Python_reticulatus:0.06230935):0.05923604,(((Morelia_carinata:0.059557,(Morelia_viridisN:0.0430535,Morelia_viridisS:0.04451561):0.02637773):0.01378714,(Antaresia_maculosa:0.06826855,((Antaresia_stimsoni:0.01615772,Antaresia_childreni:0.02120395):0.03420147,Antaresia_perthensis:0.06973437):0.02609569):0.003154777):0.02419757,((Morelia_boeleni:0.0936062,((Morelia_oenpelliensis:0.05 [...]
+tree 89 = ((Loxocemus_bicolor:0.2715632,((Python_regius:0.105666,(Python_curtus:0.08820886,(Python_molurus:0.0289014,Python_sebae:0.08313008):0.01341111):0.02700934):0.0537633,((Python_timoriensis:0.06336319,Python_reticulatus:0.06893482):0.0668129,(((Morelia_carinata:0.05800344,(Morelia_viridisN:0.03450113,Morelia_viridisS:0.04273127):0.02064897):0.01309781,(Antaresia_maculosa:0.07388802,(Antaresia_perthensis:0.06515081,(Antaresia_stimsoni:0.01508704,Antaresia_childreni:0.02555118):0.02 [...]
+tree 90 = (Candoia_aspera:0.4139069,(((Python_regius:0.1113714,(Python_curtus:0.1046913,(Python_molurus:0.03657293,Python_sebae:0.06993362):0.02487479):0.0164626):0.04311928,((Python_reticulatus:0.06430189,Python_timoriensis:0.06051875):0.0693176,((Morelia_boeleni:0.08663333,((Morelia_spilota:0.03083786,Morelia_bredli:0.0240764):0.03537649,(Morelia_oenpelliensis:0.06986482,((((Morelia_nauta:0.01074089,Morelia_kinghorni:0.006973603):0.003692685,Morelia_clastolepis:0.002062225):0.01547749, [...]
+tree 91 = ((Loxocemus_bicolor:0.3083324,((((Morelia_boeleni:0.09668887,((Liasis_albertisii:0.05250439,Bothrochilus_boa:0.06016687):0.03512285,((Morelia_oenpelliensis:0.05748413,(Morelia_tracyae:0.0477756,((Morelia_nauta:0.007311925,(Morelia_kinghorni:0.005988491,Morelia_clastolepis:0.003411513):0.004753291):0.02241144,Morelia_amethistina:0.02106282):0.006562451):0.03868782):0.01022867,(Morelia_spilota:0.02908629,Morelia_bredli:0.02807888):0.02736293):0.02072108):0.007550236):0.004824623, [...]
+tree 92 = (Candoia_aspera:0.4281057,((((Python_timoriensis:0.08823408,Python_reticulatus:0.05737884):0.06100876,(((Bothrochilus_boa:0.07126329,Liasis_albertisii:0.04201895):0.03675228,((Antaresia_melanocephalus:0.04236584,Antaresia_ramsayi:0.03108991):0.04881623,((Liasis_mackloti:0.01787838,Liasis_fuscus:0.01928909):0.04546609,(Liasis_olivaceus:0.05017097,Apodora_papuana:0.06242772):0.00707014):0.01689589):0.01717237):0.003562895,((Morelia_boeleni:0.09789998,((Morelia_oenpelliensis:0.067 [...]
+tree 93 = ((((Python_regius:0.1131232,((Python_molurus:0.05168396,Python_sebae:0.08173496):0.02146037,Python_curtus:0.1083859):0.01259181):0.0382401,(((((Morelia_carinata:0.07151188,(Morelia_viridisN:0.03574948,Morelia_viridisS:0.06082342):0.02895885):0.01261191,((Antaresia_perthensis:0.06404546,(Antaresia_stimsoni:0.01594511,Antaresia_childreni:0.02247536):0.02246347):0.006851028,Antaresia_maculosa:0.0730325):0.01006378):0.02929077,(((Apodora_papuana:0.05937471,Liasis_olivaceus:0.030790 [...]
+tree 94 = (((Python_regius:0.1239454,((Python_molurus:0.03482248,Python_sebae:0.07534156):0.00995777,Python_curtus:0.09774541):0.04014603):0.01570374,(((Morelia_boeleni:0.08858978,((((Antaresia_ramsayi:0.0206568,Antaresia_melanocephalus:0.04666388):0.04498903,(Apodora_papuana:0.05813629,(Liasis_olivaceus:0.05170553,(Liasis_fuscus:0.02074332,Liasis_mackloti:0.01562806):0.04108401):0.00829329):0.0165318):0.005684696,(Liasis_albertisii:0.05068824,Bothrochilus_boa:0.06321412):0.03784581):0.0 [...]
+tree 95 = ((((((((Antaresia_ramsayi:0.02667422,Antaresia_melanocephalus:0.04534988):0.05253592,(Apodora_papuana:0.07194987,(Liasis_olivaceus:0.04129446,(Liasis_mackloti:0.02126345,Liasis_fuscus:0.02281094):0.05439975):0.01019772):0.007377441):0.0114808,(((Morelia_viridisS:0.03858762,Morelia_viridisN:0.04247917):0.02657865,Morelia_carinata:0.08634983):0.01532563,((Antaresia_perthensis:0.07156371,(Antaresia_stimsoni:0.02213176,Antaresia_childreni:0.01944844):0.03966154):0.008415598,Antares [...]
+tree 96 = ((Loxocemus_bicolor:0.2807967,(((Python_timoriensis:0.06910729,Python_reticulatus:0.05372697):0.07638634,((((Morelia_viridisN:0.02707026,Morelia_viridisS:0.04347542):0.02229195,Morelia_carinata:0.05926867):0.01329909,(Antaresia_maculosa:0.04804866,((Antaresia_stimsoni:0.0148235,Antaresia_childreni:0.02618698):0.03229745,Antaresia_perthensis:0.06685734):0.003997152):0.01381649):0.01124514,((((((Morelia_tracyae:0.04246949,(((Morelia_nauta:0.005484603,Morelia_kinghorni:0.0104469): [...]
+tree 97 = (Candoia_aspera:0.5039896,(Loxocemus_bicolor:0.2850881,((((Python_sebae:0.08938354,Python_molurus:0.03446374):0.01632282,Python_curtus:0.1258863):0.02531511,Python_regius:0.1176712):0.04925833,((((((Antaresia_ramsayi:0.03828439,Antaresia_melanocephalus:0.03645482):0.04680986,(((Liasis_fuscus:0.02699014,Liasis_mackloti:0.008925802):0.04733224,Liasis_olivaceus:0.03373177):0.01716661,Apodora_papuana:0.05578779):0.01661917):0.001369567,(Liasis_albertisii:0.05880192,Bothrochilus_boa [...]
+tree 98 = (Candoia_aspera:0.3960502,(((Python_regius:0.1248549,(Python_curtus:0.09506814,(Python_sebae:0.07827634,Python_molurus:0.03632028):0.01662299):0.02602335):0.05087225,((Python_reticulatus:0.0549807,Python_timoriensis:0.06575509):0.04986447,((((Antaresia_perthensis:0.05946804,(Antaresia_childreni:0.02254218,Antaresia_stimsoni:0.02131714):0.03895009):0.01473793,Antaresia_maculosa:0.07138852):0.004917744,(Morelia_carinata:0.06986168,(Morelia_viridisN:0.03186693,Morelia_viridisS:0.0 [...]
+tree 99 = (((((Morelia_boeleni:0.09666952,((Morelia_bredli:0.02487562,Morelia_spilota:0.01972983):0.02472607,(Morelia_oenpelliensis:0.05818019,(Morelia_tracyae:0.04529842,((Morelia_nauta:0.001550672,(Morelia_clastolepis:0.00699391,Morelia_kinghorni:0.008062495):0.007728304):0.01843411,Morelia_amethistina:0.02151424):0.006970344):0.04507299):0.008188663):0.02156805):0.002725827,((((((Liasis_fuscus:0.02915797,Liasis_mackloti:0.01101938):0.05756546,Liasis_olivaceus:0.04796678):0.01695819,Ap [...]
+tree 100 = ((Loxocemus_bicolor:0.2671658,(((Python_curtus:0.1129476,(Python_molurus:0.04232295,Python_sebae:0.06584234):0.02744495):0.01460987,Python_regius:0.1067747):0.04485947,((((((Morelia_spilota:0.03627458,Morelia_bredli:0.02800939):0.03362723,((Morelia_tracyae:0.05576232,(((Morelia_kinghorni:0.00559587,Morelia_nauta:0.009946386):0.003195788,Morelia_clastolepis:0.003350075):0.01567562,Morelia_amethistina:0.02654552):0.005885569):0.02131979,Morelia_oenpelliensis:0.06271031):0.007420 [...]
+tree 101 = ((Loxocemus_bicolor:0.3169261,((Python_regius:0.1401556,(Python_curtus:0.08796553,(Python_molurus:0.04991819,Python_sebae:0.08324849):0.02384237):0.01601799):0.04009144,((((((Morelia_tracyae:0.04670118,((Morelia_clastolepis:0.003713301,(Morelia_kinghorni:0.01206751,Morelia_nauta:0.01282358):0.01047532):0.0141348,Morelia_amethistina:0.01978401):0.006927226):0.02565125,Morelia_oenpelliensis:0.05165965):0.007390497,(Morelia_spilota:0.02417998,Morelia_bredli:0.03266626):0.03605997 [...]
+End;
+
+[Total of 101 trees sourced from:]
+[ ../mb/pythonidae.mb.run3.t: 1001 trees in file, sampling 1 tree of every 10 trees after 0 tree burn-in: 101 trees added (current total = 101 trees) ]
diff --git a/doc/source/examples/pythonidae.mb.run4.t b/doc/source/examples/pythonidae.mb.run4.t
new file mode 100644
index 0000000..bea3e6b
--- /dev/null
+++ b/doc/source/examples/pythonidae.mb.run4.t
@@ -0,0 +1,108 @@
+#NEXUS
+
+Begin Trees;
+tree 1 = (Morelia_viridisS:0.1,(Python_reticulatus:0.1,(Liasis_albertisii:0.1,((Python_sebae:0.1,(Morelia_kinghorni:0.1,((Morelia_tracyae:0.1,Antaresia_ramsayi:0.1):0.1,(Morelia_clastolepis:0.1,Liasis_olivaceus:0.1):0.1):0.1):0.1):0.1,((Liasis_fuscus:0.1,(Antaresia_melanocephalus:0.1,((Morelia_boeleni:0.1,Morelia_oenpelliensis:0.1):0.1,(Morelia_nauta:0.1,(Apodora_papuana:0.1,((Python_regius:0.1,(Antaresia_stimsoni:0.1,(Antaresia_childreni:0.1,(Python_curtus:0.1,(Python_molurus:0.1,(Morel [...]
+tree 2 = ((Loxocemus_bicolor:0.29998,((Python_regius:0.08968457,(Python_curtus:0.1029562,(Python_sebae:0.09757662,Python_molurus:0.0259894):0.01456706):0.02502923):0.04690754,(((((Liasis_albertisii:0.04723311,Bothrochilus_boa:0.05733938):0.03385905,((((Morelia_amethistina:0.02284752,(Morelia_nauta:0.007179308,(Morelia_kinghorni:0.005976172,Morelia_clastolepis:0.01150655):0.004736825):0.01950885):0.008573832,Morelia_tracyae:0.04217456):0.04538893,Morelia_oenpelliensis:0.07600545):0.002258 [...]
+tree 3 = ((Loxocemus_bicolor:0.2209193,Candoia_aspera:0.5175202):0.05981584,((Python_regius:0.131341,((Python_sebae:0.08197324,Python_molurus:0.02629174):0.004903717,Python_curtus:0.1151623):0.01926628):0.03974369,(((((((Morelia_tracyae:0.0621596,(Morelia_amethistina:0.01752052,((Morelia_clastolepis:0.009737327,Morelia_kinghorni:0.00774926):0.0004675142,Morelia_nauta:0.004862369):0.02153621):0.0107371):0.04272645,Morelia_oenpelliensis:0.08233729):0.004388594,(Morelia_bredli:0.02426521,Mo [...]
+tree 4 = (Candoia_aspera:0.4448422,(((Python_regius:0.1280475,(Python_curtus:0.09500802,(Python_molurus:0.03410363,Python_sebae:0.09097051):0.01147731):0.0116351):0.04962919,((Python_timoriensis:0.07265631,Python_reticulatus:0.06311041):0.06279975,(((Morelia_carinata:0.06853881,(Morelia_viridisS:0.05068178,Morelia_viridisN:0.04196028):0.02447451):0.01809677,(((Antaresia_childreni:0.02673943,Antaresia_stimsoni:0.0133698):0.04104044,Antaresia_perthensis:0.08526301):0.007445487,Antaresia_ma [...]
+tree 5 = (Candoia_aspera:0.4015654,(Loxocemus_bicolor:0.288799,(((Python_curtus:0.09116556,(Python_molurus:0.03555662,Python_sebae:0.07814927):0.0160167):0.0205932,Python_regius:0.123048):0.04098075,((Python_timoriensis:0.05968186,Python_reticulatus:0.06968151):0.05547378,((Morelia_boeleni:0.08409658,(((Antaresia_ramsayi:0.03074081,Antaresia_melanocephalus:0.0346257):0.05305423,(Apodora_papuana:0.06106267,((Liasis_fuscus:0.01690269,Liasis_mackloti:0.01689524):0.03609024,Liasis_olivaceus: [...]
+tree 6 = ((((Python_curtus:0.09646189,(Python_sebae:0.08002913,Python_molurus:0.03568761):0.008634297):0.02200725,Python_regius:0.1317527):0.03633037,(((((Antaresia_perthensis:0.05307065,(Antaresia_stimsoni:0.009090358,Antaresia_childreni:0.0254213):0.04220135):0.01816758,Antaresia_maculosa:0.06828908):0.01002845,((Morelia_viridisN:0.0248399,Morelia_viridisS:0.05401829):0.02268186,Morelia_carinata:0.07445212):0.009722884):0.01743538,(((((Antaresia_melanocephalus:0.03836372,Antaresia_rams [...]
+tree 7 = ((Candoia_aspera:0.4619861,((((Python_molurus:0.03721065,Python_sebae:0.09614368):0.0162926,Python_curtus:0.1000736):0.01924715,Python_regius:0.119749):0.04487645,((Python_timoriensis:0.06912482,Python_reticulatus:0.05068688):0.06164897,(((Liasis_albertisii:0.04401247,Bothrochilus_boa:0.05268417):0.03726093,((Apodora_papuana:0.06119071,(Liasis_olivaceus:0.04297874,(Liasis_fuscus:0.0283551,Liasis_mackloti:0.006027042):0.04708239):0.00937156):0.009677338,(Antaresia_ramsayi:0.03275 [...]
+tree 8 = (((((Python_curtus:0.08970388,(Python_sebae:0.09383888,Python_molurus:0.03118811):0.02230383):0.0318401,Python_regius:0.1112805):0.03880972,((Python_timoriensis:0.05606978,Python_reticulatus:0.06112931):0.06149968,(((((((Liasis_fuscus:0.02430779,Liasis_mackloti:0.01298445):0.05819194,Liasis_olivaceus:0.04340126):0.0157641,Apodora_papuana:0.05457405):0.01652161,(Antaresia_ramsayi:0.02531317,Antaresia_melanocephalus:0.04808955):0.06518523):0.003581067,(Liasis_albertisii:0.04956958 [...]
+tree 9 = ((Loxocemus_bicolor:0.2792383,((Python_regius:0.1106281,((Python_molurus:0.03239843,Python_sebae:0.08617904):0.005639289,Python_curtus:0.1382793):0.03422022):0.03756419,((Python_timoriensis:0.07247158,Python_reticulatus:0.05732143):0.06344207,(((((Apodora_papuana:0.05626137,Liasis_olivaceus:0.02901987):0.008936626,(Liasis_fuscus:0.02781678,Liasis_mackloti:0.008644321):0.06617561):0.01733756,(Antaresia_melanocephalus:0.04252032,Antaresia_ramsayi:0.02837204):0.04600447):0.01135502 [...]
+tree 10 = (((((Python_curtus:0.1072085,(Python_molurus:0.03871235,Python_sebae:0.0678876):0.01192949):0.01668069,Python_regius:0.1224135):0.03929044,((Python_timoriensis:0.06715769,Python_reticulatus:0.06970045):0.0697207,(((((Antaresia_perthensis:0.06640792,(Antaresia_stimsoni:0.01099803,Antaresia_childreni:0.03139541):0.03590912):0.02305846,Antaresia_maculosa:0.06796156):0.008023627,((Morelia_viridisN:0.03763553,Morelia_viridisS:0.05044016):0.0219675,Morelia_carinata:0.07447572):0.0181 [...]
+tree 11 = ((((Python_regius:0.1027942,(Python_curtus:0.1093195,(Python_molurus:0.03616785,Python_sebae:0.09714906):0.01900329):0.008746833):0.03480031,((Python_timoriensis:0.05888493,Python_reticulatus:0.05249643):0.06859351,((Liasis_albertisii:0.04248073,Bothrochilus_boa:0.08031133):0.03398111,((Morelia_boeleni:0.07999314,((((Liasis_fuscus:0.01725487,Liasis_mackloti:0.01089339):0.05703001,(Liasis_olivaceus:0.0302746,Apodora_papuana:0.05585387):0.01099982):0.01251347,(Antaresia_melanocep [...]
+tree 12 = ((Loxocemus_bicolor:0.200797,Candoia_aspera:0.4791858):0.02924977,(((Python_timoriensis:0.07453055,Python_reticulatus:0.05007415):0.04164017,((Morelia_boeleni:0.08686082,(((Morelia_tracyae:0.02808619,(Morelia_amethistina:0.02303181,((Morelia_kinghorni:0.01274665,Morelia_nauta:0.009048873):0.001590683,Morelia_clastolepis:0.006054381):0.01882585):0.01332885):0.03223371,Morelia_oenpelliensis:0.05694572):0.002731064,(Morelia_bredli:0.02672501,Morelia_spilota:0.0225727):0.02678919): [...]
+tree 13 = (Candoia_aspera:0.4273618,(((((((Bothrochilus_boa:0.05415261,Liasis_albertisii:0.06038956):0.0360089,((Antaresia_melanocephalus:0.03041821,Antaresia_ramsayi:0.0331126):0.05873344,((Liasis_fuscus:0.0184059,Liasis_mackloti:0.01465728):0.04310984,(Apodora_papuana:0.05418494,Liasis_olivaceus:0.03883886):0.016399):0.01873951):0.009908615):0.005950868,(Morelia_boeleni:0.09917271,((Morelia_bredli:0.02846252,Morelia_spilota:0.02458112):0.0365903,(((Morelia_tracyae:0.03813979,(Morelia_c [...]
+tree 14 = ((Loxocemus_bicolor:0.2478028,((((((Antaresia_ramsayi:0.03367224,Antaresia_melanocephalus:0.03609129):0.04704844,((Apodora_papuana:0.05697895,Liasis_olivaceus:0.03303652):0.003571652,(Liasis_fuscus:0.02007346,Liasis_mackloti:0.008729213):0.04390847):0.02024927):0.005893421,((Liasis_albertisii:0.05713446,Bothrochilus_boa:0.06403746):0.02788034,(Morelia_boeleni:0.07313066,((Morelia_spilota:0.03214009,Morelia_bredli:0.02508033):0.02692491,((((Morelia_clastolepis:0.001813014,(Morel [...]
+tree 15 = (Candoia_aspera:0.4192863,(Loxocemus_bicolor:0.2798413,(((((((Antaresia_stimsoni:0.01329395,Antaresia_childreni:0.02283497):0.03790398,Antaresia_perthensis:0.07216747):0.008864959,Antaresia_maculosa:0.06182634):0.009885569,((Morelia_viridisN:0.034578,Morelia_viridisS:0.04693376):0.01679416,Morelia_carinata:0.04904747):0.01559752):0.01005754,((Morelia_boeleni:0.07412517,(((Morelia_tracyae:0.02896581,(Morelia_amethistina:0.03479837,((Morelia_nauta:0.0063588,Morelia_kinghorni:0.00 [...]
+tree 16 = (Candoia_aspera:0.5004037,(Loxocemus_bicolor:0.2686312,((Python_regius:0.1227537,((Python_molurus:0.02929629,Python_sebae:0.07680908):0.01567275,Python_curtus:0.1201353):0.02038343):0.03700993,((Python_timoriensis:0.05402337,Python_reticulatus:0.06266955):0.0723655,(((Antaresia_maculosa:0.0824284,((Antaresia_childreni:0.02716977,Antaresia_stimsoni:0.01773016):0.03547345,Antaresia_perthensis:0.081437):0.0118044):0.01428121,(Morelia_carinata:0.06047333,(Morelia_viridisS:0.0537466 [...]
+tree 17 = (Candoia_aspera:0.3860517,(Loxocemus_bicolor:0.2262186,((Python_regius:0.108871,(Python_curtus:0.07047174,(Python_molurus:0.03462239,Python_sebae:0.07665672):0.009287629):0.01072433):0.04514998,((Python_timoriensis:0.0838981,Python_reticulatus:0.05669912):0.05609795,((((((Morelia_tracyae:0.03987745,((Morelia_clastolepis:0.007140712,(Morelia_nauta:0.01100948,Morelia_kinghorni:0.007874271):0.002130305):0.00610072,Morelia_amethistina:0.02963331):0.01019719):0.0334844,Morelia_oenpe [...]
+tree 18 = ((Loxocemus_bicolor:0.2877769,((Python_regius:0.1397491,((Python_sebae:0.08341701,Python_molurus:0.031548):0.02842307,Python_curtus:0.1021217):0.0152687):0.05564439,((Python_timoriensis:0.06025919,Python_reticulatus:0.06209771):0.06208062,((Morelia_boeleni:0.07254053,((Morelia_spilota:0.0292054,Morelia_bredli:0.02617089):0.04941909,((Morelia_tracyae:0.04522253,(((Morelia_kinghorni:0.01142549,Morelia_clastolepis:0.01216984):0.007288991,Morelia_nauta:0.0008988171):0.01149753,More [...]
+tree 19 = (Candoia_aspera:0.4315535,(((Python_regius:0.1083479,(Python_curtus:0.1078002,(Python_molurus:0.06315571,Python_sebae:0.07304212):0.03146347):0.02541859):0.05393959,((Python_timoriensis:0.0555151,Python_reticulatus:0.07256859):0.06332415,((((((Antaresia_childreni:0.02090179,Antaresia_stimsoni:0.02001766):0.03533676,Antaresia_perthensis:0.0606145):0.01254612,Antaresia_maculosa:0.06593747):0.007542348,(Morelia_carinata:0.06172531,(Morelia_viridisS:0.04635308,Morelia_viridisN:0.04 [...]
+tree 20 = ((Loxocemus_bicolor:0.3155324,((Python_regius:0.1228927,(Python_curtus:0.1097628,(Python_molurus:0.03835939,Python_sebae:0.07668466):0.008325477):0.01460641):0.06564612,((Python_timoriensis:0.07856463,Python_reticulatus:0.05958525):0.05230699,(Morelia_boeleni:0.06775803,(((Liasis_albertisii:0.0645956,Bothrochilus_boa:0.06127537):0.02999912,((Antaresia_ramsayi:0.0288452,Antaresia_melanocephalus:0.04084328):0.04550624,(((Liasis_fuscus:0.01444571,Liasis_mackloti:0.01290266):0.0390 [...]
+tree 21 = (Candoia_aspera:0.4216067,(Loxocemus_bicolor:0.2956747,(((Python_curtus:0.08918839,(Python_molurus:0.05216853,Python_sebae:0.07150718):0.01801873):0.01726334,Python_regius:0.1668874):0.04387736,((Python_timoriensis:0.07135985,Python_reticulatus:0.05553729):0.06714049,(((Morelia_carinata:0.05402521,(Morelia_viridisS:0.04267584,Morelia_viridisN:0.03901156):0.01898352):0.01712506,(((Antaresia_childreni:0.02684644,Antaresia_stimsoni:0.01368649):0.04100616,Antaresia_perthensis:0.061 [...]
+tree 22 = ((Loxocemus_bicolor:0.2416069,((Python_regius:0.1243658,((Python_molurus:0.02672784,Python_sebae:0.07747051):0.01978814,Python_curtus:0.09906979):0.01446203):0.03923037,((Python_timoriensis:0.07259966,Python_reticulatus:0.05987173):0.06120053,(((Liasis_albertisii:0.0463971,Bothrochilus_boa:0.06180745):0.03566932,((Apodora_papuana:0.0726456,(Liasis_olivaceus:0.04192667,(Liasis_fuscus:0.02242715,Liasis_mackloti:0.01043229):0.04170356):0.01561047):0.01366414,(Antaresia_ramsayi:0.0 [...]
+tree 23 = (((((Python_timoriensis:0.08258052,Python_reticulatus:0.05853735):0.08288238,(((((Morelia_viridisN:0.03066907,Morelia_viridisS:0.05095432):0.01767479,Morelia_carinata:0.07342125):0.01474768,(((Antaresia_stimsoni:0.01833412,Antaresia_childreni:0.02854834):0.0462008,Antaresia_perthensis:0.0652752):0.01291676,Antaresia_maculosa:0.08806668):0.007344264):0.02173363,((Antaresia_ramsayi:0.03082787,Antaresia_melanocephalus:0.04706824):0.05208642,((Liasis_olivaceus:0.04073541,(Liasis_fu [...]
+tree 24 = (((Python_regius:0.1417174,(Python_curtus:0.110082,(Python_molurus:0.02637725,Python_sebae:0.07899893):0.01286143):0.01916527):0.06151694,((((((Liasis_fuscus:0.0178239,Liasis_mackloti:0.01868649):0.05704714,(Apodora_papuana:0.05741915,Liasis_olivaceus:0.0337298):0.01104941):0.01689001,(Antaresia_melanocephalus:0.03924603,Antaresia_ramsayi:0.03206718):0.06916624):0.004846253,(Liasis_albertisii:0.0753159,Bothrochilus_boa:0.07176204):0.03006251):0.00922749,(((Antaresia_maculosa:0. [...]
+tree 25 = (Candoia_aspera:0.3524799,((((Python_timoriensis:0.0740242,Python_reticulatus:0.05492826):0.06151397,(((Antaresia_maculosa:0.05970249,((Antaresia_stimsoni:0.01580222,Antaresia_childreni:0.03054988):0.03668299,Antaresia_perthensis:0.07063788):0.0183855):0.00864595,((Morelia_viridisN:0.03323697,Morelia_viridisS:0.05418915):0.02468022,Morelia_carinata:0.06302397):0.01568301):0.03371229,(((Liasis_albertisii:0.059584,Bothrochilus_boa:0.05887819):0.03012165,((Antaresia_ramsayi:0.0271 [...]
+tree 26 = (((((Python_curtus:0.08556007,(Python_sebae:0.06193187,Python_molurus:0.04968326):0.005435923):0.01851413,Python_regius:0.1277156):0.04573327,((((Bothrochilus_boa:0.06112235,Liasis_albertisii:0.0591258):0.03619068,(Morelia_boeleni:0.06807541,((Morelia_oenpelliensis:0.0523391,(Morelia_tracyae:0.03849492,(Morelia_amethistina:0.01932389,(Morelia_clastolepis:0.006375849,(Morelia_nauta:0.005909729,Morelia_kinghorni:0.008158058):0.003842392):0.01774768):0.002145456):0.0269855):0.0073 [...]
+tree 27 = ((Loxocemus_bicolor:0.2745395,((((Python_molurus:0.05626972,Python_sebae:0.07311616):0.02361951,Python_curtus:0.07978493):0.01955288,Python_regius:0.119671):0.0317135,(((((((Morelia_viridisN:0.03676345,Morelia_viridisS:0.03966575):0.03629429,Morelia_carinata:0.04791429):0.01092544,((Antaresia_perthensis:0.06241723,(Antaresia_stimsoni:0.01352217,Antaresia_childreni:0.02220556):0.03658676):0.01352575,Antaresia_maculosa:0.06451497):0.01398111):0.01464195,(((Liasis_olivaceus:0.0352 [...]
+tree 28 = ((((Python_curtus:0.1009424,(Python_sebae:0.07073727,Python_molurus:0.0458842):0.01611277):0.02137165,Python_regius:0.1032613):0.05541138,(((((Morelia_bredli:0.02912984,Morelia_spilota:0.02397348):0.02903979,(Morelia_oenpelliensis:0.04869417,(Morelia_tracyae:0.02929244,((Morelia_clastolepis:0.006308689,(Morelia_kinghorni:0.007352282,Morelia_nauta:0.003787058):0.004816204):0.01431019,Morelia_amethistina:0.02075164):0.009746208):0.03313488):0.007625648):0.02159451,Morelia_boeleni [...]
+tree 29 = (Candoia_aspera:0.3987203,((((Python_timoriensis:0.07231611,Python_reticulatus:0.06693656):0.07092691,((((Antaresia_ramsayi:0.02601053,Antaresia_melanocephalus:0.03847052):0.04431386,((Liasis_olivaceus:0.03460486,Apodora_papuana:0.05290077):0.009100054,(Liasis_fuscus:0.01768649,Liasis_mackloti:0.01600562):0.04353961):0.01585652):0.01486235,(Liasis_albertisii:0.04911192,Bothrochilus_boa:0.05631675):0.03876376):0.0004149582,((((Morelia_oenpelliensis:0.04973058,(Morelia_tracyae:0. [...]
+tree 30 = (Candoia_aspera:0.4868755,(((Python_regius:0.1221145,((Python_molurus:0.05019593,Python_sebae:0.07748772):0.01368855,Python_curtus:0.1141123):0.01987758):0.04155347,((Python_timoriensis:0.07494973,Python_reticulatus:0.05862679):0.06559235,(((Morelia_oenpelliensis:0.05175119,(Morelia_tracyae:0.04466695,(Morelia_amethistina:0.0192166,(Morelia_clastolepis:0.005236865,(Morelia_kinghorni:0.009909338,Morelia_nauta:0.01748069):0.002925145):0.01592512):0.01615313):0.03149762):0.0058539 [...]
+tree 31 = ((((Python_regius:0.127571,((Python_sebae:0.08733474,Python_molurus:0.02636416):0.01988984,Python_curtus:0.08227164):0.0179738):0.04350246,((((((Morelia_carinata:0.06880104,(Morelia_viridisN:0.03915972,Morelia_viridisS:0.05202187):0.02713937):0.00535652,(Antaresia_maculosa:0.07387812,(Antaresia_perthensis:0.06075918,(Antaresia_stimsoni:0.01861873,Antaresia_childreni:0.01600216):0.03189661):0.01853196):0.006632877):0.01217124,((Antaresia_melanocephalus:0.0414989,Antaresia_ramsay [...]
+tree 32 = ((Loxocemus_bicolor:0.2606271,((((Python_sebae:0.0818305,Python_molurus:0.02727312):0.01367413,Python_curtus:0.08714736):0.02175664,Python_regius:0.1020426):0.07583032,(((((Morelia_carinata:0.05255298,(Morelia_viridisN:0.02892482,Morelia_viridisS:0.05407292):0.03711387):0.01244001,((Antaresia_perthensis:0.06543078,(Antaresia_stimsoni:0.02282742,Antaresia_childreni:0.02338983):0.0390911):0.008210593,Antaresia_maculosa:0.07095299):0.007290989):0.01250111,((Bothrochilus_boa:0.0607 [...]
+tree 33 = (Candoia_aspera:0.3709596,(Loxocemus_bicolor:0.3007952,((Python_regius:0.1199573,(Python_curtus:0.1072403,(Python_molurus:0.03916451,Python_sebae:0.07200862):0.01487179):0.02108596):0.02065243,((Python_timoriensis:0.0774226,Python_reticulatus:0.05885978):0.06575871,((((Liasis_albertisii:0.06312153,Bothrochilus_boa:0.0504203):0.03711298,((((Liasis_fuscus:0.02797756,Liasis_mackloti:0.01218298):0.0538399,Liasis_olivaceus:0.04082057):0.01243284,Apodora_papuana:0.05917395):0.0123968 [...]
+tree 34 = ((Loxocemus_bicolor:0.3328977,(((Python_timoriensis:0.05424232,Python_reticulatus:0.06088091):0.0443408,(((((Morelia_bredli:0.03127133,Morelia_spilota:0.01963772):0.02802323,(Morelia_oenpelliensis:0.06068657,(((Morelia_clastolepis:0.003184588,(Morelia_nauta:0.008600695,Morelia_kinghorni:0.005238384):0.00852199):0.00685194,Morelia_amethistina:0.01906498):0.01556873,Morelia_tracyae:0.01951623):0.03165511):0.007153062):0.0306723,Morelia_boeleni:0.08086277):0.00606816,(((Antaresia_ [...]
+tree 35 = (((((((((Antaresia_perthensis:0.076839,(Antaresia_stimsoni:0.01678085,Antaresia_childreni:0.02946878):0.03759226):0.01176085,Antaresia_maculosa:0.06572966):0.005717706,(Morelia_carinata:0.05542747,(Morelia_viridisN:0.03474877,Morelia_viridisS:0.04205535):0.03438249):0.01795039):0.02082926,((Apodora_papuana:0.05639444,(Liasis_olivaceus:0.02437619,(Liasis_fuscus:0.02117262,Liasis_mackloti:0.007070453):0.04439346):0.0134516):0.01207259,((Antaresia_ramsayi:0.02854236,Antaresia_mela [...]
+tree 36 = ((Loxocemus_bicolor:0.2483509,Candoia_aspera:0.4492172):0.06859066,((Python_regius:0.1255389,((Python_molurus:0.03130493,Python_sebae:0.0745412):0.01865452,Python_curtus:0.1036943):0.01279912):0.01985159,((((((Morelia_carinata:0.08370114,(Morelia_viridisS:0.04985579,Morelia_viridisN:0.03327157):0.03830219):0.008672443,(Antaresia_maculosa:0.0623934,((Antaresia_childreni:0.02799178,Antaresia_stimsoni:0.02148532):0.03666041,Antaresia_perthensis:0.07446277):0.01086222):0.02094396): [...]
+tree 37 = (Candoia_aspera:0.4166711,((((Python_curtus:0.09816914,(Python_sebae:0.08240094,Python_molurus:0.05226293):0.01456777):0.01641748,Python_regius:0.137197):0.05957615,((Morelia_boeleni:0.08765504,((((Morelia_bredli:0.03444947,Morelia_spilota:0.02775664):0.02601056,(Morelia_amethistina:0.02924364,(Morelia_tracyae:0.03584065,((Morelia_nauta:0.01115885,Morelia_kinghorni:0.01021469):0.003210438,Morelia_clastolepis:0.006594632):0.03294197):0.001463494):0.03494781):0.002309134,Morelia_ [...]
+tree 38 = (Candoia_aspera:0.4315115,(Loxocemus_bicolor:0.2596803,((((((Morelia_bredli:0.02230078,Morelia_spilota:0.02205087):0.03955585,(Morelia_oenpelliensis:0.05115782,(((Morelia_clastolepis:0.008149148,(Morelia_nauta:0.002970811,Morelia_kinghorni:0.006429062):0.002749328):0.00566445,Morelia_amethistina:0.01508516):0.01228169,Morelia_tracyae:0.03545761):0.03108932):0.005038192):0.02099316,((((Antaresia_melanocephalus:0.04737015,Antaresia_ramsayi:0.02937167):0.04575993,(Apodora_papuana: [...]
+tree 39 = ((Loxocemus_bicolor:0.2923877,((Python_regius:0.1073602,(Python_curtus:0.08588581,(Python_molurus:0.04365642,Python_sebae:0.07138998):0.01973334):0.03022542):0.04769119,((Python_timoriensis:0.05422595,Python_reticulatus:0.05878151):0.05748568,((Morelia_boeleni:0.07541993,((Morelia_spilota:0.02019428,Morelia_bredli:0.04109098):0.0378947,(Morelia_oenpelliensis:0.06183072,(Morelia_tracyae:0.03545849,(((Morelia_clastolepis:0.004731276,Morelia_kinghorni:0.005092597):0.0005185752,Mor [...]
+tree 40 = ((Candoia_aspera:0.4156428,Loxocemus_bicolor:0.2269911):0.04826334,(((((((Antaresia_maculosa:0.07407624,(Antaresia_perthensis:0.05331123,(Antaresia_stimsoni:0.01391683,Antaresia_childreni:0.02353525):0.03288148):0.01385114):0.004249709,(Morelia_carinata:0.06564797,(Morelia_viridisN:0.04468362,Morelia_viridisS:0.0520571):0.02200155):0.008594821):0.01366193,((Antaresia_melanocephalus:0.04277621,Antaresia_ramsayi:0.02779418):0.04118139,((Liasis_olivaceus:0.04348896,Apodora_papuana [...]
+tree 41 = (Candoia_aspera:0.3566623,(((((Python_sebae:0.07282374,Python_molurus:0.02476629):0.02705769,Python_curtus:0.09737761):0.01517889,Python_regius:0.1214553):0.02036528,((Python_timoriensis:0.06640685,Python_reticulatus:0.04522264):0.07405751,(((Morelia_boeleni:0.08328095,((Morelia_bredli:0.03283461,Morelia_spilota:0.01994478):0.03109293,(((Morelia_tracyae:0.03936545,Morelia_amethistina:0.03400436):0.004093362,((Morelia_kinghorni:0.004637601,Morelia_nauta:0.008610755):0.005974469, [...]
+tree 42 = ((Loxocemus_bicolor:0.3158634,(((Python_timoriensis:0.0693274,Python_reticulatus:0.04754045):0.05546853,(((Bothrochilus_boa:0.06050538,Liasis_albertisii:0.0566936):0.03382026,((Antaresia_melanocephalus:0.04823754,Antaresia_ramsayi:0.03184894):0.04372982,((Liasis_olivaceus:0.04662664,Apodora_papuana:0.07828025):0.003462852,(Liasis_fuscus:0.02185453,Liasis_mackloti:0.009381108):0.04740364):0.02318003):0.004541615):0.004595739,((((Morelia_viridisN:0.03471745,Morelia_viridisS:0.045 [...]
+tree 43 = (Candoia_aspera:0.4779116,(Loxocemus_bicolor:0.3169837,(((((Liasis_albertisii:0.0474626,Bothrochilus_boa:0.0594194):0.03660165,(((Antaresia_melanocephalus:0.04410004,Antaresia_ramsayi:0.02017723):0.05126644,(Apodora_papuana:0.05914869,(Liasis_olivaceus:0.05195717,(Liasis_fuscus:0.02399651,Liasis_mackloti:0.007262315):0.04950535):0.01042366):0.01214327):0.01340623,(((Morelia_bredli:0.03687476,Morelia_spilota:0.03293356):0.02977614,((Morelia_tracyae:0.04761735,(Morelia_amethistin [...]
+tree 44 = (Candoia_aspera:0.3818913,(Loxocemus_bicolor:0.2384072,(((((Morelia_carinata:0.05838946,(Morelia_viridisN:0.02674,Morelia_viridisS:0.04088508):0.02908822):0.01411169,(Antaresia_maculosa:0.0580162,((Antaresia_stimsoni:0.01965443,Antaresia_childreni:0.02601986):0.04652883,Antaresia_perthensis:0.07484478):0.01135406):0.008163954):0.01560239,((((Apodora_papuana:0.05828467,(Liasis_olivaceus:0.0320515,(Liasis_fuscus:0.0194024,Liasis_mackloti:0.009507815):0.04213784):0.01011161):0.009 [...]
+tree 45 = (Candoia_aspera:0.4749727,(Loxocemus_bicolor:0.2909081,(((((Liasis_albertisii:0.05014584,Bothrochilus_boa:0.05870229):0.03693661,((Antaresia_melanocephalus:0.04437313,Antaresia_ramsayi:0.02662717):0.04252003,(Apodora_papuana:0.05208564,(Liasis_olivaceus:0.04891133,(Liasis_fuscus:0.02040686,Liasis_mackloti:0.01262133):0.05273751):0.01025606):0.02029362):0.007923826):0.0124865,(((Morelia_carinata:0.0634373,(Morelia_viridisN:0.03721233,Morelia_viridisS:0.0516408):0.02993211):0.013 [...]
+tree 46 = (Candoia_aspera:0.5167322,((((Python_timoriensis:0.06709443,Python_reticulatus:0.06793708):0.07640251,(((Bothrochilus_boa:0.0713485,Liasis_albertisii:0.04180947):0.03760496,(((Antaresia_maculosa:0.08028006,((Antaresia_stimsoni:0.01299851,Antaresia_childreni:0.02807306):0.04246189,Antaresia_perthensis:0.07026985):0.01391976):0.0195585,((Morelia_viridisN:0.03546905,Morelia_viridisS:0.0645878):0.04086846,Morelia_carinata:0.06635054):0.009228577):0.01671697,((Antaresia_melanocephal [...]
+tree 47 = ((Loxocemus_bicolor:0.243761,((Python_regius:0.1256326,((Python_sebae:0.07759863,Python_molurus:0.03319403):0.02745888,Python_curtus:0.1036193):0.01886196):0.04526608,((Python_timoriensis:0.07885125,Python_reticulatus:0.04078674):0.06705376,(((Morelia_boeleni:0.07244095,((((Morelia_amethistina:0.02612452,((Morelia_nauta:0.005952378,Morelia_kinghorni:0.01197636):0.003538517,Morelia_clastolepis:0.01001071):0.01796709):0.009481718,Morelia_tracyae:0.03859101):0.03004094,Morelia_oen [...]
+tree 48 = (Candoia_aspera:0.4623149,(Loxocemus_bicolor:0.2848758,(((Python_timoriensis:0.08529002,Python_reticulatus:0.06117518):0.05230377,((((Liasis_albertisii:0.05560586,Bothrochilus_boa:0.05213018):0.03215789,(Antaresia_ramsayi:0.03804862,Antaresia_melanocephalus:0.04693488):0.04454437):0.007747695,(Apodora_papuana:0.05693809,(Liasis_olivaceus:0.03222194,(Liasis_fuscus:0.0141549,Liasis_mackloti:0.01803922):0.06031693):0.008449162):0.01240927):0.008461036,((Morelia_boeleni:0.07705831, [...]
+tree 49 = ((((((Python_molurus:0.02966201,Python_sebae:0.07929907):0.02351445,Python_curtus:0.089452):0.02699706,Python_regius:0.1383436):0.04680606,((((((Bothrochilus_boa:0.05036822,Liasis_albertisii:0.05982621):0.03438025,(((Liasis_olivaceus:0.03713293,(Liasis_fuscus:0.01918723,Liasis_mackloti:0.01381359):0.04989393):0.01383069,Apodora_papuana:0.06764718):0.01789452,(Antaresia_melanocephalus:0.03643972,Antaresia_ramsayi:0.02577372):0.04709483):0.006821191):0.00657582,Morelia_boeleni:0. [...]
+tree 50 = (Candoia_aspera:0.4379332,(((((Python_sebae:0.07359605,Python_molurus:0.04254693):0.00898198,Python_curtus:0.1110208):0.02649152,Python_regius:0.09660345):0.05497974,(((Morelia_boeleni:0.07553051,((((Morelia_tracyae:0.04456018,Morelia_amethistina:0.02108639):0.002624646,(Morelia_nauta:0.008275892,(Morelia_kinghorni:0.01202487,Morelia_clastolepis:0.006690178):0.0003475322):0.0154572):0.03874607,Morelia_oenpelliensis:0.05392181):0.002551398,(Morelia_bredli:0.02786552,Morelia_spil [...]
+tree 51 = ((Loxocemus_bicolor:0.3498798,(((Python_curtus:0.1199984,(Python_molurus:0.03343227,Python_sebae:0.06214762):0.01505318):0.01582637,Python_regius:0.1361952):0.06415399,(((Morelia_boeleni:0.07756513,((Morelia_oenpelliensis:0.07415,(Morelia_tracyae:0.04061802,(Morelia_amethistina:0.04029614,(Morelia_clastolepis:0.008050705,(Morelia_kinghorni:0.008623384,Morelia_nauta:0.01475328):0.003627272):0.01071592):0.01336837):0.02026353):0.002976652,(Morelia_bredli:0.02779951,Morelia_spilot [...]
+tree 52 = (Candoia_aspera:0.4393582,(Loxocemus_bicolor:0.3036332,(((Python_timoriensis:0.07685404,Python_reticulatus:0.0588312):0.06606706,(((((Morelia_bredli:0.02885476,Morelia_spilota:0.02530417):0.03299627,((((Morelia_clastolepis:0.002007727,(Morelia_nauta:0.01156896,Morelia_kinghorni:0.002030209):0.007676394):0.01607767,Morelia_tracyae:0.04849488):0.01065517,Morelia_amethistina:0.02323289):0.04031674,Morelia_oenpelliensis:0.07842425):0.006012383):0.01645917,Morelia_boeleni:0.08212263 [...]
+tree 53 = (Candoia_aspera:0.4354567,((((((((((Morelia_amethistina:0.01880946,Morelia_tracyae:0.03228031):0.007355948,(Morelia_nauta:0.002509787,(Morelia_kinghorni:0.008547859,Morelia_clastolepis:0.006177976):0.01163141):0.006092898):0.0437007,Morelia_oenpelliensis:0.05868744):0.01058369,(Morelia_bredli:0.02021243,Morelia_spilota:0.0236931):0.04113045):0.02420083,Morelia_boeleni:0.08697624):0.001693959,(((Apodora_papuana:0.05754786,((Liasis_fuscus:0.01519766,Liasis_mackloti:0.00888373):0. [...]
+tree 54 = ((((((((((Morelia_kinghorni:0.008665215,Morelia_nauta:0.0109783):0.001026878,Morelia_clastolepis:0.003541599):0.02206708,Morelia_amethistina:0.01969782):0.004866081,Morelia_tracyae:0.03401599):0.04312542,Morelia_oenpelliensis:0.05542273):0.004424566,(Morelia_bredli:0.02587295,Morelia_spilota:0.02686584):0.03220125):0.02335586,(Morelia_boeleni:0.07466805,((((Morelia_viridisN:0.03284982,Morelia_viridisS:0.05396462):0.02402509,Morelia_carinata:0.0733903):0.00859578,(Antaresia_macu [...]
+tree 55 = (Candoia_aspera:0.4096436,(Loxocemus_bicolor:0.2825922,((Python_regius:0.143649,((Python_molurus:0.03263311,Python_sebae:0.09044284):0.03325118,Python_curtus:0.09589007):0.01728509):0.04095609,((Python_timoriensis:0.07044698,Python_reticulatus:0.06046401):0.0612632,((((Morelia_bredli:0.0283581,Morelia_spilota:0.01987454):0.02565555,(((Morelia_tracyae:0.04287551,(Morelia_clastolepis:0.006354092,(Morelia_kinghorni:0.007341021,Morelia_nauta:0.01217569):1.422462e-05):0.02275691):0. [...]
+tree 56 = ((((Python_timoriensis:0.0821696,Python_reticulatus:0.0374911):0.06419834,(((Morelia_boeleni:0.08052849,((Morelia_bredli:0.03064723,Morelia_spilota:0.02530753):0.03036284,(((Morelia_amethistina:0.02839346,((Morelia_kinghorni:0.005676871,Morelia_nauta:0.006679796):0.006206046,Morelia_clastolepis:0.002117769):0.02479271):0.007808779,Morelia_tracyae:0.056348):0.03795807,Morelia_oenpelliensis:0.06488113):0.002177624):0.01503186):0.006123575,((Bothrochilus_boa:0.07207754,Liasis_albe [...]
+tree 57 = ((Loxocemus_bicolor:0.2325396,((((Python_molurus:0.03786264,Python_sebae:0.06502635):0.007936275,Python_curtus:0.09277477):0.01503368,Python_regius:0.1075102):0.03876495,((Python_timoriensis:0.06244723,Python_reticulatus:0.06023936):0.05824688,(((((Morelia_carinata:0.06145063,(Morelia_viridisN:0.03435088,Morelia_viridisS:0.04947603):0.02912258):0.01761857,(Antaresia_maculosa:0.07065326,(Antaresia_perthensis:0.07828664,(Antaresia_stimsoni:0.0170134,Antaresia_childreni:0.02668196 [...]
+tree 58 = ((Loxocemus_bicolor:0.2468242,(((Python_curtus:0.1170279,(Python_sebae:0.09284164,Python_molurus:0.03687876):0.02243126):0.01334561,Python_regius:0.1148749):0.04587945,((Python_timoriensis:0.07785041,Python_reticulatus:0.06751723):0.08746517,(((Morelia_boeleni:0.08939881,(Bothrochilus_boa:0.07597912,Liasis_albertisii:0.05399311):0.0351783):0.0007660284,((Morelia_bredli:0.02840847,Morelia_spilota:0.02214527):0.03226572,(Morelia_oenpelliensis:0.07709114,(((Morelia_nauta:0.0063821 [...]
+tree 59 = (((Python_regius:0.1522807,(Python_curtus:0.1117174,(Python_sebae:0.1053864,Python_molurus:0.04630373):0.02259866):0.01967949):0.0319183,((Python_timoriensis:0.08010659,Python_reticulatus:0.03796243):0.06844831,(((Morelia_boeleni:0.09501911,((Morelia_oenpelliensis:0.06511799,(((Morelia_clastolepis:0.006418486,(Morelia_nauta:0.01409049,Morelia_kinghorni:0.01242694):0.009480745):0.01750396,Morelia_amethistina:0.02468036):0.005713716,Morelia_tracyae:0.05530899):0.02743302):0.00888 [...]
+tree 60 = (Candoia_aspera:0.4340778,(((((Python_molurus:0.05527572,Python_sebae:0.08815507):0.01067865,Python_curtus:0.09143945):0.006439727,Python_regius:0.1452298):0.03931163,((Python_timoriensis:0.07775332,Python_reticulatus:0.056339):0.06948392,((((Antaresia_melanocephalus:0.0428833,Antaresia_ramsayi:0.02298154):0.05497927,(((Liasis_fuscus:0.0206774,Liasis_mackloti:0.01631532):0.04937458,Liasis_olivaceus:0.05034072):0.0149876,Apodora_papuana:0.06038298):0.02319475):0.008116068,(Liasi [...]
+tree 61 = ((((((((Antaresia_perthensis:0.07638852,(Antaresia_stimsoni:0.02121778,Antaresia_childreni:0.02889215):0.0354173):0.01981606,Antaresia_maculosa:0.0693481):0.01032929,(Morelia_carinata:0.06492437,(Morelia_viridisN:0.02927527,Morelia_viridisS:0.04859942):0.01695186):0.01177804):0.01795348,(((Liasis_albertisii:0.0494817,Bothrochilus_boa:0.0678623):0.03585962,((Antaresia_ramsayi:0.02585155,Antaresia_melanocephalus:0.04108934):0.04877222,(Apodora_papuana:0.06419168,(Liasis_olivaceus [...]
+tree 62 = (((((Python_curtus:0.1007437,(Python_sebae:0.07847175,Python_molurus:0.03661869):0.01177114):0.01913868,Python_regius:0.1148672):0.044093,((Morelia_boeleni:0.08594061,(((((((Morelia_kinghorni:0.004810639,Morelia_nauta:0.01061287):0.01048466,Morelia_clastolepis:0.009231243):0.00945426,Morelia_amethistina:0.02563356):0.004799697,Morelia_tracyae:0.04114297):0.03006124,Morelia_oenpelliensis:0.07771829):0.009323358,(Morelia_bredli:0.03632089,Morelia_spilota:0.02523437):0.04209958):0 [...]
+tree 63 = (Candoia_aspera:0.4533356,((((((Liasis_albertisii:0.04457579,Bothrochilus_boa:0.05094706):0.0327761,(Morelia_boeleni:0.07913116,((Morelia_oenpelliensis:0.07029218,((Morelia_clastolepis:0.0019791,(Morelia_kinghorni:0.0075455,Morelia_nauta:0.009120248):0.009370934):0.01231059,(Morelia_amethistina:0.02332951,Morelia_tracyae:0.03878409):0.003046213):0.02605981):0.002390341,(Morelia_bredli:0.03384283,Morelia_spilota:0.02554936):0.03919047):0.02307381):0.001715514):0.0006266203,(((An [...]
+tree 64 = (Candoia_aspera:0.4760166,(Loxocemus_bicolor:0.2757868,(((Python_timoriensis:0.07564501,Python_reticulatus:0.05995008):0.08176805,(((Liasis_albertisii:0.05854234,Bothrochilus_boa:0.06265786):0.03611681,(((Morelia_carinata:0.06077572,(Morelia_viridisN:0.03375266,Morelia_viridisS:0.05060965):0.01678711):0.01210923,(Antaresia_maculosa:0.06233509,(Antaresia_perthensis:0.0815234,(Antaresia_stimsoni:0.01417981,Antaresia_childreni:0.02374216):0.02519215):0.01449892):0.00686185):0.0165 [...]
+tree 65 = (Candoia_aspera:0.4138643,(((((Python_molurus:0.03606197,Python_sebae:0.08431371):0.0151031,Python_curtus:0.09509211):0.009259102,Python_regius:0.1134973):0.06223495,(((((Liasis_albertisii:0.06055545,Bothrochilus_boa:0.07139759):0.01471734,((Antaresia_ramsayi:0.03518096,Antaresia_melanocephalus:0.02931779):0.04659165,(Apodora_papuana:0.06222481,(Liasis_olivaceus:0.04596883,(Liasis_fuscus:0.02048555,Liasis_mackloti:0.01178189):0.0473872):0.005137335):0.01289113):0.01185418):0.00 [...]
+tree 66 = (Candoia_aspera:0.4089105,(Loxocemus_bicolor:0.2593586,(((Python_timoriensis:0.07988133,Python_reticulatus:0.05016129):0.07450671,(((((Morelia_carinata:0.07236462,(Morelia_viridisN:0.02473521,Morelia_viridisS:0.05530351):0.02301765):0.009019592,(Antaresia_maculosa:0.08353675,(Antaresia_perthensis:0.09658293,(Antaresia_stimsoni:0.013754,Antaresia_childreni:0.03040858):0.0405517):0.01371936):0.01787184):0.02750648,(Morelia_boeleni:0.08164632,((Morelia_bredli:0.03820559,Morelia_sp [...]
+tree 67 = (Candoia_aspera:0.4449339,(((((((Antaresia_melanocephalus:0.0453505,Antaresia_ramsayi:0.03185906):0.04791288,(Apodora_papuana:0.06816093,((Liasis_fuscus:0.013558,Liasis_mackloti:0.0222779):0.04901131,Liasis_olivaceus:0.04632228):0.008134278):0.01378831):0.009530067,((((Morelia_bredli:0.03261452,Morelia_spilota:0.02358782):0.02347192,(((((Morelia_clastolepis:0.008472611,Morelia_kinghorni:0.005468857):0.003942563,Morelia_nauta:0.006005992):0.01842269,Morelia_tracyae:0.03686506):0 [...]
+tree 68 = (Candoia_aspera:0.5362472,((((Python_curtus:0.1037307,(Python_sebae:0.1005292,Python_molurus:0.05540015):0.02078274):0.01943558,Python_regius:0.1120188):0.03340796,((Python_timoriensis:0.07649895,Python_reticulatus:0.06999589):0.05984266,((((Bothrochilus_boa:0.07809498,Liasis_albertisii:0.04177523):0.03933876,((Antaresia_melanocephalus:0.04510809,Antaresia_ramsayi:0.0235075):0.05109955,(Apodora_papuana:0.07140937,((Liasis_fuscus:0.01855687,Liasis_mackloti:0.01844191):0.04468211 [...]
+tree 69 = ((Loxocemus_bicolor:0.342135,((((((Liasis_albertisii:0.04830736,Bothrochilus_boa:0.07607338):0.03296,(((Liasis_fuscus:0.01679569,Liasis_mackloti:0.01195317):0.04018365,(Apodora_papuana:0.06570416,Liasis_olivaceus:0.04346911):0.01052556):0.01745802,(Antaresia_melanocephalus:0.03843291,Antaresia_ramsayi:0.03509593):0.0400436):0.01547011):0.002890318,(((((Morelia_tracyae:0.0552147,(Morelia_nauta:0.01088679,(Morelia_kinghorni:0.007242071,Morelia_clastolepis:0.01214149):0.004459173) [...]
+tree 70 = ((Loxocemus_bicolor:0.2690822,(((Python_curtus:0.0925561,(Python_sebae:0.0880517,Python_molurus:0.04310565):0.03291147):0.02703655,Python_regius:0.1015576):0.03443013,((Python_timoriensis:0.07286464,Python_reticulatus:0.0484841):0.05235894,(((((Morelia_viridisN:0.04018959,Morelia_viridisS:0.04779184):0.01664751,Morelia_carinata:0.05738444):0.01658997,(Antaresia_maculosa:0.05536493,(Antaresia_perthensis:0.07150627,(Antaresia_stimsoni:0.01522095,Antaresia_childreni:0.02336449):0. [...]
+tree 71 = ((((((Morelia_carinata:0.06155951,(Morelia_viridisS:0.04464672,Morelia_viridisN:0.02762765):0.02463339):0.01333783,(((Antaresia_childreni:0.01969403,Antaresia_stimsoni:0.01686892):0.02815768,Antaresia_perthensis:0.07071792):0.01990291,Antaresia_maculosa:0.08918901):0.01022217):0.01366177,(((Liasis_albertisii:0.05270908,Bothrochilus_boa:0.064011):0.03191483,((Antaresia_ramsayi:0.02613638,Antaresia_melanocephalus:0.04321827):0.04677747,(Apodora_papuana:0.05451725,((Liasis_fuscus: [...]
+tree 72 = (Candoia_aspera:0.453306,(Loxocemus_bicolor:0.3887322,((((Python_sebae:0.08365485,Python_molurus:0.03665528):0.0154688,Python_curtus:0.1050893):0.007793195,Python_regius:0.1534954):0.0613676,((Python_timoriensis:0.0680481,Python_reticulatus:0.0621895):0.06921195,(((Morelia_boeleni:0.07784198,((Morelia_bredli:0.02851329,Morelia_spilota:0.02151493):0.03156458,(Morelia_oenpelliensis:0.06409916,((Morelia_tracyae:0.03399367,Morelia_amethistina:0.03818573):0.008414893,(Morelia_nauta: [...]
+tree 73 = (Candoia_aspera:0.5520749,(Loxocemus_bicolor:0.2377251,(((((((Antaresia_stimsoni:0.0125949,Antaresia_childreni:0.02313392):0.02206282,Antaresia_perthensis:0.07820299):0.01191254,Antaresia_maculosa:0.06207283):0.01709952,((Morelia_viridisN:0.02766603,Morelia_viridisS:0.05194837):0.02040493,Morelia_carinata:0.06265164):0.005846406):0.03017284,(((((((Morelia_clastolepis:0.002789684,(Morelia_nauta:0.008007311,Morelia_kinghorni:0.004015526):0.004611768):0.007731586,Morelia_amethisti [...]
+tree 74 = (((((Python_curtus:0.1006856,(Python_sebae:0.08316927,Python_molurus:0.03516886):0.01796021):0.02440497,Python_regius:0.1332322):0.02834445,((Python_timoriensis:0.06957957,Python_reticulatus:0.05636223):0.05746936,((((Morelia_bredli:0.02128809,Morelia_spilota:0.02450162):0.03612316,((((Morelia_clastolepis:0.005198126,(Morelia_nauta:0.01442816,Morelia_kinghorni:0.01178255):0.002347382):0.02317242,Morelia_amethistina:0.01246185):0.01386049,Morelia_tracyae:0.03155355):0.02492261,M [...]
+tree 75 = (((((Python_timoriensis:0.07658318,Python_reticulatus:0.05110824):0.05329578,((((Morelia_bredli:0.03524291,Morelia_spilota:0.0308012):0.02868422,(Morelia_oenpelliensis:0.0754284,((Morelia_amethistina:0.02502096,(Morelia_clastolepis:0.007619496,(Morelia_kinghorni:0.00782635,Morelia_nauta:0.006335583):0.002912933):0.02746534):0.01734855,Morelia_tracyae:0.02524737):0.02578698):0.003275429):0.02849694,Morelia_boeleni:0.07492803):0.005232946,((((Morelia_viridisN:0.03856291,Morelia_v [...]
+tree 76 = (Candoia_aspera:0.4393047,(Loxocemus_bicolor:0.2542101,(((((((Antaresia_maculosa:0.06741466,((Antaresia_childreni:0.0330332,Antaresia_stimsoni:0.01454498):0.05188499,Antaresia_perthensis:0.0586203):0.01222515):0.008918908,(Morelia_carinata:0.06125037,(Morelia_viridisS:0.06262673,Morelia_viridisN:0.03160418):0.01678727):0.01154299):0.01559785,((Antaresia_ramsayi:0.02670876,Antaresia_melanocephalus:0.0420795):0.05122368,((Liasis_fuscus:0.02065444,Liasis_mackloti:0.009335938):0.04 [...]
+tree 77 = (((((Python_timoriensis:0.08782096,Python_reticulatus:0.07040678):0.05833417,(((((Morelia_viridisN:0.03720383,Morelia_viridisS:0.05117086):0.02595,Morelia_carinata:0.07158621):0.01271671,((Antaresia_perthensis:0.07707314,(Antaresia_stimsoni:0.01530133,Antaresia_childreni:0.02824286):0.03871629):0.008834431,Antaresia_maculosa:0.08010591):0.009361026):0.01831402,((Morelia_bredli:0.03099292,Morelia_spilota:0.02404462):0.03387775,((((Morelia_nauta:0.005260173,(Morelia_kinghorni:0.0 [...]
+tree 78 = (Candoia_aspera:0.4009131,((((Python_curtus:0.0869089,(Python_sebae:0.06311244,Python_molurus:0.04755661):0.01261219):0.02842917,Python_regius:0.09876378):0.03739969,(((((Liasis_albertisii:0.05324242,Bothrochilus_boa:0.06904211):0.02380102,(Antaresia_ramsayi:0.01610698,Antaresia_melanocephalus:0.04963503):0.06369576):0.001914608,(Apodora_papuana:0.05942414,((Liasis_fuscus:0.02549272,Liasis_mackloti:0.01196099):0.04490971,Liasis_olivaceus:0.03668288):0.01814909):0.0184324):0.001 [...]
+tree 79 = (((((Python_timoriensis:0.07553944,Python_reticulatus:0.07115621):0.07381626,(((((Morelia_oenpelliensis:0.06551793,(Morelia_amethistina:0.03619238,(Morelia_tracyae:0.05018406,(Morelia_clastolepis:0.002661674,(Morelia_kinghorni:0.01704692,Morelia_nauta:0.009792482):0.004593366):0.01308924):0.00478429):0.02824213):0.006003933,(Morelia_bredli:0.02129453,Morelia_spilota:0.03007546):0.03782753):0.02679029,Morelia_boeleni:0.06715453):0.0118746,(((Antaresia_melanocephalus:0.03496582,A [...]
+tree 80 = (((((Python_timoriensis:0.05253829,Python_reticulatus:0.08561018):0.06208115,(((Morelia_boeleni:0.09228959,((((Morelia_amethistina:0.02455933,(Morelia_nauta:0.002681559,(Morelia_kinghorni:0.008246165,Morelia_clastolepis:0.009838868):0.004629327):0.00972198):0.001301648,Morelia_tracyae:0.03946081):0.03981529,Morelia_oenpelliensis:0.05633571):0.0032127,(Morelia_bredli:0.02654698,Morelia_spilota:0.02108595):0.03328877):0.01067315):0.01415516,((((Liasis_olivaceus:0.04525111,Apodora [...]
+tree 81 = (Loxocemus_bicolor:0.2566853,(Candoia_aspera:0.4693582,(((((((Morelia_viridisN:0.03146441,Morelia_viridisS:0.05593448):0.0249236,Morelia_carinata:0.06224873):0.01552312,(Antaresia_maculosa:0.05616948,((Antaresia_stimsoni:0.01007809,Antaresia_childreni:0.03079328):0.03638529,Antaresia_perthensis:0.06970695):0.01042068):0.008149887):0.009665657,((Morelia_bredli:0.02544855,Morelia_spilota:0.02456336):0.02973036,(((Morelia_tracyae:0.03685517,(Morelia_clastolepis:0.002197272,(Moreli [...]
+tree 82 = ((((Python_regius:0.09520516,(Python_curtus:0.09522774,(Python_molurus:0.03950332,Python_sebae:0.05703805):0.02247388):0.02119926):0.03441932,((Python_timoriensis:0.06061827,Python_reticulatus:0.07481421):0.04666084,(((Morelia_boeleni:0.07246276,(((((Morelia_clastolepis:0.002509624,(Morelia_nauta:0.01384186,Morelia_kinghorni:0.009644053):0.007542425):0.009695616,Morelia_tracyae:0.02873899):0.008218219,Morelia_amethistina:0.02877):0.03316924,Morelia_oenpelliensis:0.05804569):0.0 [...]
+tree 83 = ((Loxocemus_bicolor:0.2952234,(((Python_curtus:0.09552038,(Python_molurus:0.03862383,Python_sebae:0.07267771):0.01718111):0.008183769,Python_regius:0.1364789):0.04738912,((Python_timoriensis:0.05761065,Python_reticulatus:0.0516746):0.06871581,((((((Liasis_fuscus:0.01480832,Liasis_mackloti:0.01183352):0.04883964,Liasis_olivaceus:0.0401242):0.0145517,Apodora_papuana:0.05118669):0.0195619,((Antaresia_melanocephalus:0.03374798,Antaresia_ramsayi:0.0376499):0.07148785,(Bothrochilus_b [...]
+tree 84 = (Candoia_aspera:0.5016068,((((Python_curtus:0.09427624,(Python_molurus:0.0227545,Python_sebae:0.09191553):0.01452091):0.0283426,Python_regius:0.107775):0.0194763,((((Morelia_carinata:0.07375593,(Morelia_viridisS:0.0482057,Morelia_viridisN:0.03770947):0.01584058):0.01271865,(Antaresia_maculosa:0.0710502,((Antaresia_childreni:0.01593182,Antaresia_stimsoni:0.01507543):0.03282155,Antaresia_perthensis:0.06500106):0.01194462):0.002767007):0.01885032,((((((Liasis_fuscus:0.02328425,Lia [...]
+tree 85 = ((Loxocemus_bicolor:0.2794339,((((Python_molurus:0.03312409,Python_sebae:0.07822667):0.02901692,Python_curtus:0.1018022):0.022905,Python_regius:0.1415919):0.05044797,((Python_timoriensis:0.06076838,Python_reticulatus:0.06819505):0.05838198,((((Morelia_bredli:0.03212745,Morelia_spilota:0.0331548):0.02415561,(((((Morelia_kinghorni:0.0102482,Morelia_nauta:0.008844634):0.002827055,Morelia_clastolepis:0.005156116):0.01969235,Morelia_amethistina:0.02046229):0.01034341,Morelia_tracyae [...]
+tree 86 = ((Loxocemus_bicolor:0.2683567,((Python_regius:0.1091685,((Python_molurus:0.03421631,Python_sebae:0.07909729):0.02669281,Python_curtus:0.1242822):0.01552606):0.03635674,(((((Antaresia_maculosa:0.05295493,(Antaresia_perthensis:0.07248801,(Antaresia_childreni:0.02284777,Antaresia_stimsoni:0.01413853):0.0379826):0.01342258):0.01068672,(Morelia_carinata:0.06262578,(Morelia_viridisS:0.04071067,Morelia_viridisN:0.03447906):0.01312935):0.01015363):0.01178044,((Antaresia_ramsayi:0.02418 [...]
+tree 87 = (Candoia_aspera:0.4835051,((((((Antaresia_maculosa:0.0667961,(Antaresia_perthensis:0.07352979,(Antaresia_childreni:0.01595269,Antaresia_stimsoni:0.01602515):0.0377149):0.01316477):0.007044655,(Morelia_carinata:0.06471191,(Morelia_viridisS:0.05813323,Morelia_viridisN:0.02882801):0.01819743):0.01237297):0.01222415,((Morelia_boeleni:0.1043827,((Antaresia_ramsayi:0.03185868,Antaresia_melanocephalus:0.05093386):0.06070819,((Liasis_albertisii:0.04981429,Bothrochilus_boa:0.07101304):0 [...]
+tree 88 = ((Candoia_aspera:0.3563804,Loxocemus_bicolor:0.2285549):0.05516094,((Python_regius:0.1174086,((Python_sebae:0.0852972,Python_molurus:0.04013307):0.02585051,Python_curtus:0.1126192):0.01456538):0.03825918,((((((Morelia_bredli:0.02760348,Morelia_spilota:0.02851919):0.03478335,((Morelia_amethistina:0.02163415,((Morelia_clastolepis:0.003876799,(Morelia_nauta:0.01234636,Morelia_kinghorni:0.00726198):0.00929985):0.02923753,Morelia_tracyae:0.04439396):0.004379334):0.0374095,Morelia_oe [...]
+tree 89 = (Candoia_aspera:0.4520071,(((Python_regius:0.1009007,(Python_curtus:0.09834139,(Python_sebae:0.0801233,Python_molurus:0.0414197):0.01531084):0.02265769):0.06058729,(((((Liasis_albertisii:0.04496162,Bothrochilus_boa:0.07014187):0.0258081,((Antaresia_melanocephalus:0.03625373,Antaresia_ramsayi:0.0349284):0.04496869,((Liasis_olivaceus:0.03381452,Apodora_papuana:0.06317617):0.01193837,(Liasis_fuscus:0.01998534,Liasis_mackloti:0.01399873):0.04608922):0.02671637):0.00873879):0.004700 [...]
+tree 90 = (Candoia_aspera:0.4645928,((((Python_curtus:0.1195643,(Python_molurus:0.03740045,Python_sebae:0.08232443):0.01678619):0.02534988,Python_regius:0.1532762):0.02461821,(((((((Antaresia_childreni:0.02600772,Antaresia_stimsoni:0.01458769):0.02653477,Antaresia_perthensis:0.06571443):0.01466257,Antaresia_maculosa:0.05789742):0.01238719,((Morelia_viridisS:0.04478665,Morelia_viridisN:0.03251355):0.02382011,Morelia_carinata:0.08176551):0.01515166):0.01923722,(((Antaresia_ramsayi:0.015595 [...]
+tree 91 = ((Candoia_aspera:0.4253582,Loxocemus_bicolor:0.287425):0.06440288,(((Python_curtus:0.09233986,(Python_molurus:0.03912399,Python_sebae:0.07176809):0.01470935):0.01220318,Python_regius:0.1215375):0.04595127,((Python_timoriensis:0.07923207,Python_reticulatus:0.07217266):0.06028461,(((((Morelia_oenpelliensis:0.05400092,(Morelia_tracyae:0.03579002,(Morelia_amethistina:0.02089989,((Morelia_clastolepis:0.004037098,Morelia_kinghorni:0.006879557):0.00439477,Morelia_nauta:0.009191494):0. [...]
+tree 92 = (Loxocemus_bicolor:0.3061242,((((((((Morelia_viridisS:0.0488484,Morelia_viridisN:0.02718428):0.03040546,Morelia_carinata:0.05363968):0.01366625,(((Antaresia_childreni:0.02372216,Antaresia_stimsoni:0.01250969):0.03749508,Antaresia_perthensis:0.05944827):0.009214165,Antaresia_maculosa:0.07487972):0.005791328):0.01428844,((((Liasis_olivaceus:0.03355395,Apodora_papuana:0.08493765):0.01773312,(Liasis_fuscus:0.01973323,Liasis_mackloti:0.01224162):0.04450705):0.02705612,(Antaresia_ram [...]
+tree 93 = (((((Python_curtus:0.1052608,(Python_molurus:0.03656474,Python_sebae:0.0804119):0.01736489):0.01981846,Python_regius:0.118569):0.04305073,((((((((Antaresia_stimsoni:0.01799286,Antaresia_childreni:0.02440719):0.03664232,Antaresia_perthensis:0.06835721):0.01030149,Antaresia_maculosa:0.07669343):0.008762603,(Morelia_carinata:0.05903126,(Morelia_viridisN:0.02314959,Morelia_viridisS:0.05217384):0.01633361):0.01580508):0.02360512,((Antaresia_melanocephalus:0.04446293,Antaresia_ramsay [...]
+tree 94 = ((((((((Antaresia_stimsoni:0.01394605,Antaresia_childreni:0.02307618):0.03469821,Antaresia_perthensis:0.07348681):0.006527661,Antaresia_maculosa:0.07308458):0.009157082,((Morelia_viridisN:0.03283968,Morelia_viridisS:0.04931701):0.03098315,Morelia_carinata:0.06247037):0.01365183):0.01322039,((((Apodora_papuana:0.06792187,((Liasis_fuscus:0.02949712,Liasis_mackloti:0.006633742):0.04612603,Liasis_olivaceus:0.04939289):0.0173581):0.01385641,(Antaresia_melanocephalus:0.0383219,Antare [...]
+tree 95 = ((((Python_regius:0.1106696,((Python_molurus:0.03080011,Python_sebae:0.08768923):0.01799399,Python_curtus:0.09582502):0.0237615):0.0327879,((Python_timoriensis:0.05730162,Python_reticulatus:0.05610117):0.05178376,(((Morelia_carinata:0.05950242,(Morelia_viridisN:0.03009795,Morelia_viridisS:0.04999991):0.02362724):0.02350348,(((Antaresia_stimsoni:0.01536701,Antaresia_childreni:0.02915634):0.02933886,Antaresia_perthensis:0.08195631):0.01110259,Antaresia_maculosa:0.05806133):0.0123 [...]
+tree 96 = (Candoia_aspera:0.4501354,(((Python_regius:0.1048399,(Python_curtus:0.106706,(Python_molurus:0.03471225,Python_sebae:0.07358986):0.02425296):0.02633883):0.04694233,(((((Morelia_bredli:0.03593233,Morelia_spilota:0.02438969):0.03173767,(((Morelia_tracyae:0.04654885,Morelia_amethistina:0.03659602):0.008259002,(Morelia_clastolepis:0.004036055,(Morelia_kinghorni:0.006595114,Morelia_nauta:0.006000134):0.007697233):0.007601145):0.03532755,Morelia_oenpelliensis:0.05754269):0.007221801) [...]
+tree 97 = (Candoia_aspera:0.4334181,(Loxocemus_bicolor:0.2483078,(((Python_curtus:0.09120633,(Python_sebae:0.06811592,Python_molurus:0.02124031):0.01523931):0.01271599,Python_regius:0.1235802):0.04444922,(((((Morelia_bredli:0.02647463,Morelia_spilota:0.02569939):0.03189336,((Morelia_tracyae:0.02969979,(Morelia_amethistina:0.03375546,(Morelia_clastolepis:0.00246764,(Morelia_nauta:0.004177599,Morelia_kinghorni:0.01005074):0.006947956):0.01479339):0.009535147):0.01780938,Morelia_oenpelliens [...]
+tree 98 = ((Candoia_aspera:0.5016151,Loxocemus_bicolor:0.2444724):0.03221846,((((((Morelia_viridisS:0.04729769,Morelia_viridisN:0.03434786):0.01883497,Morelia_carinata:0.08120721):0.01275832,(Antaresia_maculosa:0.07250472,((Antaresia_childreni:0.0364534,Antaresia_stimsoni:0.01815363):0.04449293,Antaresia_perthensis:0.06613409):0.01531379):0.007934725):0.01554437,(((Liasis_albertisii:0.05705392,Bothrochilus_boa:0.07101109):0.03851546,((Antaresia_ramsayi:0.03344109,Antaresia_melanocephalus [...]
+tree 99 = (((((Python_timoriensis:0.08109342,Python_reticulatus:0.0630196):0.0759962,(((((Morelia_viridisS:0.05289974,Morelia_viridisN:0.03362567):0.01717123,Morelia_carinata:0.06720496):0.01746322,(Antaresia_maculosa:0.07133522,(Antaresia_perthensis:0.0738161,(Antaresia_childreni:0.02466137,Antaresia_stimsoni:0.01100988):0.03619607):0.0128205):0.005775329):0.01917408,((Morelia_oenpelliensis:0.0642247,(Morelia_tracyae:0.03121744,(Morelia_amethistina:0.02118763,((Morelia_clastolepis:0.006 [...]
+tree 100 = ((Loxocemus_bicolor:0.3021923,(((((((((Morelia_tracyae:0.04026419,((Morelia_clastolepis:0.003650239,Morelia_kinghorni:0.008478112):0.007771618,Morelia_nauta:0.008646885):0.01688588):0.006949875,Morelia_amethistina:0.02684855):0.03323158,Morelia_oenpelliensis:0.06536625):0.004677463,(Morelia_bredli:0.03398565,Morelia_spilota:0.02089452):0.02877487):0.02051626,Morelia_boeleni:0.08799992):0.001944661,((Antaresia_maculosa:0.07478773,(Antaresia_perthensis:0.08661767,(Antaresia_stim [...]
+tree 101 = (Candoia_aspera:0.5519415,(((Python_regius:0.1273714,(Python_curtus:0.1089187,(Python_molurus:0.03695303,Python_sebae:0.07848258):0.02253384):0.02297084):0.06241567,((((((Liasis_fuscus:0.01357072,Liasis_mackloti:0.01388168):0.04841025,Liasis_olivaceus:0.04581289):0.003137857,Apodora_papuana:0.06201379):0.0147125,(Antaresia_ramsayi:0.02819799,Antaresia_melanocephalus:0.03866448):0.05030802):0.006126414,(((((Morelia_clastolepis:0.01366347,(Morelia_kinghorni:0.004410157,Morelia_n [...]
+End;
+
+[Total of 101 trees sourced from:]
+[ ../mb/pythonidae.mb.run4.t: 1001 trees in file, sampling 1 tree of every 10 trees after 0 tree burn-in: 101 trees added (current total = 101 trees) ]
diff --git a/doc/source/examples/pythonidae.mcmc-con.nex b/doc/source/examples/pythonidae.mcmc-con.nex
new file mode 100644
index 0000000..b680669
--- /dev/null
+++ b/doc/source/examples/pythonidae.mcmc-con.nex
@@ -0,0 +1,13 @@
+#NEXUS
+
+[ID: 0344228063]
+begin trees;
+   [Note: This tree contains information on the topology, 
+          branch lengths (if present), and the probability
+          of the partition indicated by the branch.]
+   tree con_50_majrule = (Python_regius:0.212275,(Python_sebae:0.176816,((((((Antaresia_maculosa:0.127351,(Antaresia_perthensis:0.108378,(Antaresia_stimsoni:0.021372,Antaresia_childreni:0.038155)1.00:0.046446)1.00:0.025262)0.94:0.012957,(Morelia_carinata:0.101145,(Morelia_bredli:0.038563,Morelia_spilota:0.025643)1.00:0.050967)0.52:0.010472,Morelia_viridis:0.098541)0.98:0.023291,(Bothrochilus_boa:0.091928,Leiopython_albertisii:0.080986)1.00:0.031583)0.54:0.008347,(((Liasis_fuscus:0.026601 [...]
+
+   [Note: This tree contains information only on the topology
+          and branch lengths (mean of the posterior probability density).]
+   tree con_50_majrule = (Python_regius:0.212275,(Python_sebae:0.176816,((((((Antaresia_maculosa:0.127351,(Antaresia_perthensis:0.108378,(Antaresia_stimsoni:0.021372,Antaresia_childreni:0.038155):0.046446):0.025262):0.012957,(Morelia_carinata:0.101145,(Morelia_bredli:0.038563,Morelia_spilota:0.025643):0.050967):0.010472,Morelia_viridis:0.098541):0.023291,(Bothrochilus_boa:0.091928,Leiopython_albertisii:0.080986):0.031583):0.008347,(((Liasis_fuscus:0.026601,Liasis_mackloti:0.034524):0.069 [...]
+end;
diff --git a/doc/source/examples/pythonidae.mcmc.nex b/doc/source/examples/pythonidae.mcmc.nex
new file mode 100644
index 0000000..5e0911c
--- /dev/null
+++ b/doc/source/examples/pythonidae.mcmc.nex
@@ -0,0 +1,1035 @@
+#NEXUS
+[ID: 0344228063]
+begin trees;
+   translate
+       1 Python_regius,
+       2 Python_sebae,
+       3 Python_brongersmai,
+       4 Antaresia_maculosa,
+       5 Python_timoriensis,
+       6 Python_molurus,
+       7 Morelia_carinata,
+       8 Morelia_boeleni,
+       9 Antaresia_perthensis,
+      10 Morelia_viridis,
+      11 Aspidites_ramsayi,
+      12 Aspidites_melanocephalus,
+      13 Morelia_oenpelliensis,
+      14 Bothrochilus_boa,
+      15 Morelia_bredli,
+      16 Morelia_spilota,
+      17 Antaresia_stimsoni,
+      18 Antaresia_childreni,
+      19 Leiopython_albertisii,
+      20 Python_reticulatus,
+      21 Morelia_tracyae,
+      22 Morelia_amethistina,
+      23 Morelia_nauta,
+      24 Morelia_kinghorni,
+      25 Morelia_clastolepis,
+      26 Liasis_fuscus,
+      27 Liasis_mackloti,
+      28 Liasis_olivaceus,
+      29 Apodora_papuana;
+   tree rep.1 = (((((((((((14:0.100000,((22:0.100000,6:0.100000):0.100000,9:0.100000):0.100000):0.100000,5:0.100000):0.100000,29:0.100000):0.100000,(13:0.100000,4:0.100000):0.100000):0.100000,3:0.100000):0.100000,(19:0.100000,(((28:0.100000,8:0.100000):0.100000,12:0.100000):0.100000,10:0.100000):0.100000):0.100000):0.100000,(24:0.100000,7:0.100000):0.100000):0.100000,26:0.100000):0.100000,(((2:0.100000,20:0.100000):0.100000,18:0.100000):0.100000,23:0.100000):0.100000):0.100000,(21:0.1000 [...]
+   tree rep.20000 = ((6:0.066242,3:0.164950):0.011796,((((((((23:0.024624,24:0.012317):0.016090,25:0.003155):0.027805,22:0.032444):0.015881,21:0.042692):0.038357,8:0.093709):0.020549,((((10:0.092033,(((9:0.082520,(18:0.021861,17:0.030731):0.048695):0.016093,4:0.132174):0.016746,((16:0.025021,15:0.035369):0.048370,7:0.099389):0.019761):0.004791):0.029715,(14:0.077826,19:0.106430):0.027724):0.000845,((29:0.094915,(28:0.040298,(26:0.013435,27:0.026653):0.071847):0.024954):0.006012,(5:0.1106 [...]
+   tree rep.40000 = ((2:0.156766,(((((((25:0.007356,(23:0.006374,24:0.009580):0.009185):0.053165,22:0.035049):0.008131,21:0.043555):0.028292,8:0.077070):0.022144,13:0.074105):0.021492,((5:0.103388,20:0.100425):0.056701,(((((4:0.141057,(9:0.087752,(18:0.039802,17:0.022170):0.051033):0.014415):0.012186,((16:0.023133,15:0.034100):0.062244,7:0.093082):0.001932):0.002272,10:0.093117):0.008672,(14:0.092998,19:0.070189):0.029011):0.012604,(29:0.077475,(28:0.039202,(26:0.019990,27:0.032452):0.08 [...]
+   tree rep.60000 = (6:0.087544,((((11:0.025398,12:0.067735):0.051889,(((13:0.120279,(20:0.083649,5:0.126364):0.080529):0.007576,(8:0.081935,(21:0.046429,(22:0.021868,((24:0.016309,23:0.006618):0.005431,25:0.010977):0.023079):0.021088):0.013159):0.022275):0.011375,(((19:0.096361,14:0.073823):0.047752,(7:0.096241,(10:0.106092,((15:0.046037,16:0.016857):0.067951,(((17:0.016344,18:0.038834):0.033284,9:0.162219):0.031977,4:0.107652):0.004203):0.002284):0.007753):0.008682):0.021166,(29:0.0878 [...]
+   tree rep.80000 = ((((11:0.038378,12:0.058712):0.052677,((((20:0.146964,5:0.104805):0.080444,((21:0.035776,(22:0.032316,(25:0.005197,(23:0.006689,24:0.009212):0.009990):0.023886):0.024226):0.031773,8:0.101339):0.003138):0.000195,13:0.096892):0.019476,((((27:0.058796,26:0.010477):0.055104,28:0.042831):0.027888,29:0.075590):0.020668,((19:0.077888,14:0.087141):0.007524,(7:0.100940,(((15:0.036428,16:0.015059):0.065667,((9:0.091994,(17:0.018873,18:0.040516):0.040931):0.030215,4:0.143200):0. [...]
+   tree rep.100000 = (((((11:0.032553,12:0.059714):0.059121,(((((27:0.045437,26:0.014253):0.059063,28:0.053902):0.023804,29:0.084789):0.019326,((7:0.119620,(((4:0.122284,(9:0.094358,(17:0.026884,18:0.028133):0.062666):0.023835):0.004876,10:0.109429):0.001965,(15:0.032295,16:0.025799):0.041995):0.005608):0.029921,(19:0.056328,14:0.098134):0.035393):0.011682):0.013442,((((((23:0.014767,24:0.009067):0.003148,25:0.010787):0.028381,22:0.023380):0.016466,21:0.045603):0.021974,8:0.098244):0.019 [...]
+   tree rep.120000 = (3:0.133378,(6:0.103237,(2:0.156479,(((((20:0.071555,5:0.107144):0.058197,13:0.066838):0.014620,(8:0.109436,(((25:0.009829,(24:0.009454,23:0.009614):0.009466):0.012624,22:0.036312):0.019345,21:0.068710):0.028961):0.033051):0.012899,(((14:0.094880,19:0.066591):0.053403,((((18:0.030974,17:0.033525):0.039525,9:0.088811):0.012548,4:0.125078):0.019271,(((15:0.037198,16:0.030946):0.038329,7:0.101189):0.001550,10:0.086028):0.003755):0.025105):0.003342,(29:0.118019,((27:0.03 [...]
+   tree rep.140000 = (3:0.115251,(6:0.093083,(2:0.199799,((12:0.070041,11:0.031675):0.041867,(((8:0.087981,(21:0.039451,((25:0.014768,(24:0.017794,23:0.013078):0.005106):0.025318,22:0.034334):0.016130):0.040811):0.038971,((29:0.102852,(28:0.038674,(26:0.042713,27:0.040491):0.067797):0.027906):0.005400,(((7:0.066430,(5:0.110463,20:0.080290):0.070874):0.035758,((10:0.087483,(16:0.025993,15:0.037533):0.037780):0.000779,((9:0.107437,(18:0.046426,17:0.024178):0.045079):0.043162,4:0.112734):0. [...]
+   tree rep.160000 = ((2:0.162445,((11:0.023406,12:0.078201):0.047812,((13:0.078235,((21:0.035882,(22:0.024192,((24:0.008695,23:0.011262):0.006407,25:0.011246):0.021368):0.017300):0.020042,8:0.101499):0.012961):0.001593,(((19:0.063372,14:0.094855):0.033147,((5:0.139547,20:0.070146):0.086217,((((16:0.031473,15:0.027645):0.043013,7:0.093368):0.009385,10:0.069786):0.007080,(4:0.105749,((18:0.031231,17:0.014916):0.048466,9:0.097602):0.023718):0.013501):0.030685):0.003705):0.005511,(29:0.1048 [...]
+   tree rep.180000 = (((((11:0.036306,12:0.125277):0.040802,((((19:0.076161,14:0.085402):0.041938,(((7:0.104681,(15:0.046343,16:0.020058):0.032550):0.014708,(((17:0.033986,18:0.038097):0.052182,9:0.099732):0.024399,4:0.120885):0.007330):0.008928,10:0.106584):0.019722):0.001206,(29:0.119861,((27:0.055971,26:0.024069):0.062695,28:0.056427):0.031985):0.011853):0.003903,(((((25:0.004093,(24:0.010450,23:0.019540):0.008672):0.009136,22:0.046464):0.043936,21:0.028645):0.034011,8:0.133715):0.037 [...]
+   tree rep.200000 = ((((12:0.091882,11:0.018945):0.064978,(((29:0.070282,((26:0.020934,27:0.033646):0.060574,28:0.068032):0.024759):0.030816,((7:0.096897,(10:0.083818,((16:0.015991,15:0.044456):0.082663,(4:0.107987,(9:0.132207,(18:0.052303,17:0.020907):0.043901):0.019891):0.012545):0.001877):0.010896):0.018368,(14:0.113230,19:0.068727):0.023463):0.003625):0.023145,((8:0.089425,(((25:0.006184,(24:0.011516,23:0.005965):0.004166):0.033560,22:0.034933):0.015651,21:0.030684):0.057262):0.0142 [...]
+   tree rep.220000 = (((((20:0.092398,5:0.096801):0.099477,((((21:0.054423,(22:0.034713,((23:0.016325,24:0.011948):0.020165,25:0.001339):0.024767):0.019288):0.040695,8:0.118017):0.024524,(((((15:0.050016,16:0.026010):0.052242,(10:0.100138,7:0.123147):0.013631):0.002664,(4:0.106282,((18:0.049794,17:0.026242):0.059687,9:0.089050):0.017469):0.003170):0.023720,(14:0.086932,19:0.100777):0.031222):0.001476,(29:0.094961,((27:0.031210,26:0.039721):0.065917,28:0.041315):0.024683):0.010741):0.0149 [...]
+   tree rep.240000 = (3:0.127963,(6:0.114169,((((20:0.081608,5:0.102154):0.086161,((13:0.079038,((29:0.106681,(28:0.038765,(27:0.037469,26:0.033306):0.072767):0.021642):0.003397,((14:0.093724,19:0.098293):0.044330,((((15:0.034152,16:0.018713):0.050381,7:0.086029):0.006483,10:0.099186):0.003022,(4:0.143670,((18:0.052550,17:0.030845):0.064398,9:0.105283):0.023843):0.003299):0.023680):0.004262):0.010121):0.003192,((21:0.031183,(((24:0.016591,23:0.006491):0.005068,25:0.007603):0.017110,22:0. [...]
+   tree rep.260000 = (((((12:0.079467,11:0.031992):0.059879,((((14:0.090943,19:0.082250):0.028405,(7:0.097020,((((9:0.134867,(18:0.035038,17:0.021354):0.057394):0.023856,4:0.138457):0.012068,(16:0.035331,15:0.024601):0.057995):0.001822,10:0.092626):0.027787):0.018629):0.000089,((28:0.047609,(26:0.033069,27:0.024366):0.088182):0.015294,29:0.121131):0.033635):0.005770,((5:0.159578,20:0.075151):0.089908,(13:0.097538,(8:0.098020,(21:0.039361,(22:0.035769,((23:0.005923,24:0.009694):0.002434,2 [...]
+   tree rep.280000 = (6:0.064674,((2:0.140332,((12:0.061720,11:0.029913):0.069023,((((5:0.088341,20:0.070670):0.087732,(((22:0.033887,(25:0.006936,(23:0.005719,24:0.011723):0.003023):0.018374):0.006656,21:0.031491):0.024020,8:0.087635):0.017082):0.000346,13:0.070500):0.003575,((((26:0.022377,27:0.045309):0.055706,28:0.043330):0.024093,29:0.087036):0.000732,((14:0.083428,19:0.067606):0.011356,(((((18:0.039845,17:0.023187):0.051378,9:0.082422):0.043093,4:0.136748):0.014051,((16:0.016019,15 [...]
+   tree rep.300000 = (((6:0.107793,3:0.122173):0.008786,((12:0.057621,11:0.026143):0.033211,(((((((23:0.011336,25:0.010016):0.003296,24:0.015145):0.023620,22:0.049514):0.032798,21:0.045835):0.030294,8:0.084696):0.033141,((20:0.102934,5:0.079042):0.106620,13:0.088779):0.006746):0.001026,((((((15:0.039735,16:0.023041):0.049682,(((18:0.036827,17:0.030360):0.032671,9:0.095226):0.032777,4:0.117850):0.011220):0.016476,10:0.098037):0.016351,7:0.079656):0.009369,(14:0.086953,19:0.086886):0.04372 [...]
+   tree rep.320000 = ((6:0.109370,3:0.132079):0.001809,(2:0.207706,((((20:0.117807,5:0.080901):0.080200,13:0.076291):0.002647,((((14:0.085517,19:0.076941):0.034881,(7:0.100367,(((15:0.049910,16:0.007830):0.072859,10:0.110245):0.001829,(((17:0.025151,18:0.033563):0.061119,9:0.096304):0.035663,4:0.196790):0.008530):0.005568):0.027995):0.010697,((28:0.033861,(27:0.029351,26:0.028996):0.085989):0.021810,29:0.103881):0.026547):0.015077,((21:0.036502,(((23:0.006268,24:0.014905):0.007028,25:0.0 [...]
+   tree rep.340000 = (6:0.076820,((2:0.153922,(((((21:0.041467,((25:0.006382,(23:0.014490,24:0.005123):0.006154):0.016617,22:0.031018):0.033249):0.022096,8:0.080763):0.013595,((20:0.084480,5:0.093029):0.110405,13:0.079587):0.010755):0.003470,((7:0.115861,(((15:0.032429,16:0.025279):0.046534,(((17:0.038631,18:0.017890):0.062821,9:0.159500):0.006277,4:0.142955):0.017415):0.010805,10:0.096951):0.012902):0.015563,((29:0.108854,(28:0.069891,(27:0.040901,26:0.029900):0.068192):0.018426):0.0300 [...]
+   tree rep.360000 = ((3:0.119524,6:0.098720):0.006230,(((12:0.066245,11:0.052762):0.042546,((((28:0.037276,(26:0.015716,27:0.045728):0.077072):0.009253,29:0.092000):0.022479,((((4:0.110653,(9:0.089569,(18:0.024773,17:0.021932):0.051323):0.040936):0.018071,10:0.118262):0.000977,((16:0.034831,15:0.026489):0.027292,7:0.123760):0.004208):0.027011,(14:0.094550,19:0.088755):0.045072):0.000752):0.012716,((5:0.107881,20:0.089122):0.147768,(13:0.097705,(8:0.111097,((22:0.030309,(25:0.005611,(24: [...]
+   tree rep.380000 = ((2:0.191276,((((4:0.134543,((17:0.019189,18:0.038759):0.038405,9:0.140808):0.029777):0.013784,((7:0.138724,(15:0.034745,16:0.028682):0.059342):0.009800,10:0.115521):0.000272):0.034364,(((14:0.089944,19:0.116504):0.050643,(29:0.083687,((27:0.054076,26:0.040497):0.075881,28:0.031886):0.033779):0.002907):0.017705,(13:0.125572,((20:0.079378,5:0.131943):0.118471,((21:0.049810,(22:0.054498,(25:0.008190,(23:0.005479,24:0.015176):0.002958):0.024893):0.009581):0.069042,8:0.0 [...]
+   tree rep.400000 = (((((12:0.067995,11:0.032097):0.053773,((13:0.111964,(5:0.091170,20:0.087863):0.102534):0.004292,(((21:0.084798,(22:0.031903,((24:0.003218,23:0.017324):0.004630,25:0.004104):0.023852):0.024036):0.035037,8:0.106754):0.037366,(((19:0.077909,14:0.088587):0.012959,(7:0.104451,(((9:0.134971,(18:0.029413,17:0.016605):0.045141):0.014394,4:0.109603):0.010890,((16:0.016521,15:0.040052):0.042016,10:0.110036):0.004623):0.018270):0.019924):0.016024,((28:0.040569,(26:0.029721,27: [...]
+   tree rep.420000 = (6:0.066255,(3:0.145331,(2:0.135355,((((8:0.088270,(21:0.032605,(22:0.031297,(25:0.005536,(24:0.005562,23:0.014683):0.011198):0.023731):0.013853):0.017935):0.019848,((20:0.106515,5:0.088855):0.112322,13:0.059966):0.019449):0.004700,(((14:0.078988,19:0.085650):0.035162,((((17:0.017906,18:0.035317):0.056050,9:0.107504):0.035603,4:0.098536):0.008222,(10:0.102519,(7:0.082231,(15:0.048974,16:0.028733):0.061030):0.003854):0.006633):0.028426):0.011727,(29:0.106892,((27:0.02 [...]
+   tree rep.440000 = ((3:0.119786,6:0.109221):0.000111,(2:0.150612,((((((27:0.038765,26:0.013340):0.080796,28:0.033309):0.024991,29:0.083229):0.010282,(((7:0.099201,(15:0.035848,16:0.023751):0.048478):0.006029,((((17:0.022685,18:0.031675):0.046747,9:0.105321):0.013481,4:0.110149):0.004093,10:0.097749):0.000063):0.027416,(14:0.108567,19:0.082453):0.044440):0.001738):0.010393,(((20:0.121238,5:0.097226):0.062195,13:0.091685):0.025217,(((22:0.044918,((23:0.005094,24:0.008362):0.007445,25:0.0 [...]
+   tree rep.460000 = (6:0.093595,((2:0.187087,((((((19:0.089933,14:0.088488):0.038830,((((9:0.101365,(18:0.038876,17:0.016522):0.050813):0.025138,10:0.121651):0.002983,(4:0.125660,(16:0.017566,15:0.051198):0.036176):0.002404):0.001240,7:0.089465):0.020295):0.006713,(29:0.092679,(28:0.053557,(26:0.027330,27:0.039222):0.075719):0.020540):0.013570):0.007720,(8:0.105113,((25:0.006341,(23:0.013619,24:0.005021):0.007755):0.052184,(21:0.042194,22:0.027831):0.003340):0.016426):0.052125):0.002686 [...]
+   tree rep.480000 = ((3:0.126218,(2:0.174180,((11:0.023077,12:0.089611):0.071332,((((14:0.085322,19:0.069454):0.032306,((((4:0.114065,(9:0.077260,(17:0.022449,18:0.036862):0.052723):0.030863):0.005682,(15:0.021107,16:0.025233):0.068568):0.004595,10:0.073245):0.018960,7:0.116912):0.008560):0.018768,((28:0.043698,(27:0.042325,26:0.020070):0.047866):0.022116,29:0.090278):0.006365):0.005193,((20:0.055091,5:0.097617):0.097119,(13:0.083543,((21:0.041052,(22:0.033022,((24:0.005352,23:0.014195) [...]
+   tree rep.500000 = (((2:0.184519,((11:0.015618,12:0.074203):0.045708,(((((14:0.131671,19:0.081796):0.022834,((28:0.029643,(27:0.060136,26:0.011817):0.085127):0.008146,29:0.078425):0.023608):0.000530,((10:0.089542,((15:0.032912,16:0.030956):0.038066,7:0.094766):0.004567):0.013856,((9:0.080039,(17:0.024119,18:0.032440):0.040563):0.017506,4:0.129181):0.024291):0.052140):0.014456,((((22:0.047893,((23:0.003739,24:0.012612):0.006557,25:0.012786):0.008972):0.032419,21:0.042256):0.011100,8:0.0 [...]
+   tree rep.520000 = ((((12:0.072167,11:0.028015):0.036335,((((5:0.075035,20:0.121305):0.140509,(8:0.095196,((22:0.036789,(25:0.019076,(23:0.012057,24:0.007781):0.006432):0.039741):0.010489,21:0.036157):0.028105):0.035141):0.006673,13:0.092414):0.026057,(((19:0.066412,14:0.080688):0.029921,(((4:0.116620,(9:0.094084,(18:0.033201,17:0.020318):0.069434):0.043931):0.007478,((16:0.017397,15:0.057939):0.065047,10:0.109745):0.004474):0.015277,7:0.101029):0.011178):0.027656,(29:0.092704,(28:0.04 [...]
+   tree rep.540000 = ((6:0.085912,(2:0.218475,(((((4:0.108417,((18:0.031647,17:0.023475):0.037196,9:0.128013):0.020437):0.006807,(10:0.063205,((16:0.020942,15:0.037521):0.050094,7:0.095031):0.006794):0.011806):0.014709,((29:0.089903,((26:0.022100,27:0.031617):0.072881,28:0.051269):0.028856):0.005664,(19:0.085257,14:0.109354):0.039405):0.002797):0.011916,(13:0.086817,((5:0.087513,20:0.091199):0.110736,(8:0.082743,(21:0.062968,((25:0.009119,(24:0.009762,23:0.006528):0.005923):0.025119,22:0 [...]
+   tree rep.560000 = ((3:0.118195,6:0.102949):0.007385,(((((21:0.028763,(22:0.025181,(25:0.007616,(24:0.010449,23:0.003081):0.013673):0.017942):0.019570):0.032968,8:0.095324):0.021291,(((20:0.073436,5:0.096795):0.097916,13:0.090163):0.005336,((29:0.085398,(28:0.050384,(27:0.038996,26:0.019305):0.057289):0.020915):0.015679,((14:0.105687,19:0.046054):0.054246,(10:0.086456,(((15:0.039772,16:0.030514):0.053672,7:0.107885):0.010185,((9:0.128680,(17:0.028333,18:0.042981):0.024993):0.023723,4:0 [...]
+   tree rep.580000 = (3:0.128064,((((12:0.059931,11:0.024556):0.053313,((((21:0.035968,((25:0.010531,(23:0.013752,24:0.014772):0.009194):0.028686,22:0.031485):0.017983):0.046246,8:0.095083):0.038491,13:0.108833):0.006873,(((14:0.109715,19:0.068349):0.048503,(((7:0.102903,(16:0.010275,15:0.055893):0.053192):0.000608,10:0.068682):0.005754,(((18:0.030669,17:0.024797):0.040691,9:0.140314):0.033296,4:0.129000):0.004638):0.035078):0.008286,((((26:0.026587,27:0.029049):0.082789,28:0.039350):0.0 [...]
+   tree rep.600000 = ((6:0.061092,3:0.169896):0.011645,(2:0.181783,((11:0.018080,12:0.086661):0.043769,(((20:0.103330,5:0.104583):0.091467,((((9:0.115002,(17:0.041995,18:0.054883):0.043128):0.039414,4:0.138915):0.012388,((7:0.116387,(15:0.043266,16:0.028769):0.055826):0.019675,10:0.110559):0.004465):0.014870,((((27:0.027046,26:0.035224):0.066791,28:0.059201):0.023672,(14:0.096968,19:0.086170):0.021957):0.000905,29:0.109745):0.005201):0.004943):0.000795,((8:0.122979,((22:0.033693,(25:0.00 [...]
+   tree rep.620000 = (3:0.141553,((((12:0.072751,11:0.020441):0.050791,((((((10:0.093387,((9:0.126717,(18:0.033109,17:0.031741):0.052431):0.011638,4:0.117468):0.015478):0.002366,(16:0.011219,15:0.049856):0.061717):0.010744,7:0.123527):0.020269,(14:0.099376,19:0.070737):0.029096):0.007200,(29:0.102276,(28:0.048486,(26:0.021617,27:0.029996):0.066796):0.012667):0.021023):0.017374,((5:0.058965,20:0.134657):0.075881,(13:0.061269,((21:0.059790,(22:0.029337,(25:0.006944,(23:0.013428,24:0.012310 [...]
+   tree rep.640000 = ((((((13:0.092996,(20:0.113759,5:0.145262):0.108433):0.017001,(((7:0.111712,(15:0.034124,16:0.022092):0.044629):0.022932,((4:0.138224,((18:0.036929,17:0.019209):0.031179,9:0.149545):0.034894):0.010031,10:0.120826):0.001343):0.020349,((((27:0.036571,26:0.030546):0.071955,28:0.051839):0.021497,29:0.144680):0.012331,(19:0.123045,14:0.087199):0.064437):0.008111):0.018831):0.000395,((((25:0.002279,(23:0.012470,24:0.005875):0.009469):0.030213,22:0.031235):0.028347,21:0.050 [...]
+   tree rep.660000 = (3:0.126194,(6:0.087865,(((12:0.036953,11:0.053672):0.043858,(((19:0.098481,14:0.096527):0.027062,(((((18:0.046783,17:0.012531):0.048381,9:0.092267):0.016664,4:0.099830):0.020015,(((16:0.017905,15:0.030749):0.050006,7:0.087921):0.004897,10:0.128316):0.004855):0.024365,(29:0.115536,(28:0.048690,(26:0.023913,27:0.036504):0.066783):0.021090):0.006851):0.008006):0.009870,((13:0.074047,(5:0.092204,20:0.078556):0.076239):0.000016,(8:0.108429,((22:0.029571,((24:0.012163,23: [...]
+   tree rep.680000 = ((2:0.208646,(((8:0.106020,((22:0.040315,((23:0.011083,24:0.025639):0.000579,25:0.007592):0.027793):0.022492,21:0.040421):0.033838):0.040138,((((((27:0.036759,26:0.047320):0.078432,28:0.053763):0.028758,29:0.094463):0.012131,((7:0.144794,(10:0.125664,((15:0.033811,16:0.024468):0.064863,((9:0.147831,(18:0.043527,17:0.025483):0.051450):0.039571,4:0.101997):0.002903):0.014627):0.009273):0.018435,(19:0.062131,14:0.128887):0.064492):0.007733):0.006159,(20:0.151610,5:0.113 [...]
+   tree rep.700000 = ((2:0.148427,((((21:0.046670,((25:0.017088,(24:0.013025,23:0.008946):0.004191):0.027809,22:0.018453):0.011545):0.028924,8:0.087995):0.012021,((((26:0.027738,27:0.040406):0.080801,28:0.040177):0.011032,(((19:0.069491,14:0.098740):0.030934,((7:0.077078,(5:0.093842,20:0.074628):0.069049):0.026616,(((16:0.020407,15:0.034299):0.053139,(4:0.116682,((18:0.023869,17:0.023922):0.042943,9:0.137837):0.035859):0.010435):0.007801,10:0.078822):0.008386):0.011544):0.006864,29:0.088 [...]
+   tree rep.720000 = ((3:0.110592,6:0.092789):0.003276,(((12:0.080117,11:0.021155):0.048450,((((19:0.089989,14:0.079800):0.026462,(7:0.091714,((4:0.114162,((18:0.045294,17:0.018654):0.053379,9:0.094775):0.028561):0.007601,((16:0.017968,15:0.046952):0.046541,10:0.128227):0.004874):0.006067):0.014093):0.000916,((28:0.068187,(26:0.030417,27:0.019409):0.065424):0.018671,29:0.111357):0.013804):0.009957,((5:0.080792,20:0.106478):0.104391,((8:0.084254,((22:0.027935,(23:0.007146,(25:0.009956,24: [...]
+   tree rep.740000 = (2:0.190350,(((((((7:0.126517,(10:0.118961,(((9:0.107748,(17:0.018945,18:0.036972):0.059882):0.024461,4:0.118005):0.008915,(15:0.045532,16:0.018455):0.048543):0.007842):0.010521):0.015744,(19:0.090745,14:0.086313):0.047675):0.017703,((29:0.118918,(20:0.107777,5:0.109540):0.112781):0.009165,((27:0.045637,26:0.024134):0.074456,28:0.055156):0.029976):0.009729):0.004628,13:0.081816):0.000403,(8:0.132400,((22:0.035912,(25:0.003110,(24:0.013798,23:0.015974):0.017582):0.026 [...]
+   tree rep.760000 = ((3:0.143178,6:0.100020):0.012499,(2:0.206996,((12:0.055588,11:0.030025):0.051003,(((((9:0.100595,(18:0.041157,17:0.028501):0.051582):0.018843,4:0.147679):0.008489,(10:0.086545,(7:0.116104,(16:0.011996,15:0.058928):0.061436):0.012837):0.006728):0.034609,((5:0.115478,20:0.077420):0.093169,(13:0.088256,(((22:0.041788,((24:0.012766,23:0.005464):0.006647,25:0.008034):0.024509):0.021890,21:0.038906):0.018007,8:0.078029):0.028412):0.010988):0.004145):0.001062,((29:0.104841 [...]
+   tree rep.780000 = ((3:0.144743,(2:0.196037,((11:0.033707,12:0.067155):0.051205,(((13:0.093665,(20:0.063651,5:0.126241):0.104854):0.002102,(((22:0.039232,(25:0.007535,(23:0.011804,24:0.007150):0.006167):0.017589):0.013621,21:0.047781):0.029369,8:0.098605):0.036605):0.004311,(((7:0.112355,(((9:0.092797,(17:0.024069,18:0.039842):0.035233):0.034630,4:0.141609):0.007096,(10:0.089943,(15:0.048717,16:0.025143):0.053729):0.009744):0.014927):0.020986,(19:0.077504,14:0.100198):0.017636):0.01587 [...]
+   tree rep.800000 = ((3:0.124518,6:0.089367):0.032312,(((11:0.022213,12:0.097302):0.049372,((((4:0.134674,(9:0.146274,(17:0.014059,18:0.045979):0.042883):0.017271):0.010973,(((15:0.029490,16:0.021402):0.073074,7:0.095929):0.002836,10:0.098658):0.009406):0.027351,((((27:0.042914,26:0.019853):0.103604,28:0.059017):0.021818,29:0.088150):0.023665,(19:0.075835,14:0.088348):0.037399):0.004000):0.002498,((13:0.092210,(8:0.118896,(((23:0.012010,24:0.006410):0.010469,25:0.012920):0.013806,(22:0. [...]
+   tree rep.820000 = ((((12:0.076925,11:0.015144):0.055261,((((5:0.099690,20:0.111676):0.069135,13:0.076513):0.010192,((((26:0.028607,27:0.034589):0.078063,28:0.070839):0.017797,29:0.092963):0.009075,((7:0.104104,(((16:0.019981,15:0.045168):0.047767,10:0.095020):0.005921,(4:0.128519,(9:0.108115,(18:0.026504,17:0.025297):0.083393):0.011393):0.018346):0.015702):0.011235,(19:0.103703,14:0.111480):0.030971):0.001098):0.014758):0.004357,((21:0.035202,(22:0.026576,(25:0.010981,(24:0.014787,23: [...]
+   tree rep.840000 = ((2:0.157653,(((13:0.071774,(((((26:0.036374,27:0.028044):0.055051,28:0.054665):0.038424,29:0.124957):0.000401,(19:0.084323,14:0.072124):0.035438):0.001122,((((16:0.016091,15:0.050265):0.054781,(((18:0.025901,17:0.025283):0.031285,9:0.095014):0.021267,4:0.101316):0.009893):0.009775,7:0.120297):0.004210,10:0.090190):0.019500):0.007505):0.007626,((5:0.095716,20:0.084449):0.078110,((21:0.034215,(22:0.010811,(25:0.016514,(24:0.011527,23:0.005342):0.009339):0.035129):0.02 [...]
+   tree rep.860000 = (3:0.164852,(6:0.125125,(2:0.194562,((12:0.063475,11:0.035444):0.046257,(((((27:0.038309,26:0.018495):0.075023,28:0.071592):0.036620,29:0.089395):0.019182,(((10:0.098132,(((9:0.102658,(18:0.037576,17:0.023932):0.082080):0.027852,4:0.129284):0.015547,(15:0.030076,16:0.029465):0.067485):0.015145):0.011540,7:0.110795):0.012691,(14:0.092872,19:0.084860):0.031526):0.005015):0.015675,((20:0.072336,5:0.092998):0.128276,(((21:0.049169,((25:0.005750,(23:0.013526,24:0.022295): [...]
+   tree rep.880000 = ((3:0.121861,6:0.090875):0.004625,(((11:0.024639,12:0.072012):0.062507,(((((10:0.088536,(4:0.115380,(9:0.127557,(18:0.042816,17:0.033901):0.050408):0.047674):0.013769):0.002685,(7:0.097687,(16:0.013900,15:0.035335):0.040532):0.017145):0.018320,(((26:0.016901,27:0.029070):0.048624,28:0.059711):0.019434,29:0.091287):0.014889):0.001181,(19:0.081382,14:0.101131):0.030332):0.001234,(13:0.143734,((8:0.102193,(((25:0.002643,(23:0.009390,24:0.016200):0.007178):0.016886,22:0. [...]
+   tree rep.900000 = ((3:0.168614,6:0.071862):0.016262,(2:0.176533,((12:0.072743,11:0.052030):0.034332,(((((9:0.117054,(18:0.030520,17:0.025809):0.045166):0.032041,4:0.155619):0.013213,((15:0.030144,16:0.038321):0.047215,7:0.095136):0.025224):0.005755,10:0.100792):0.038021,(((19:0.114527,14:0.099478):0.021421,(29:0.104044,((27:0.032604,26:0.013361):0.053506,28:0.064369):0.022983):0.019710):0.004775,(((20:0.130213,5:0.082484):0.066176,13:0.093056):0.006605,(8:0.098177,(((25:0.011749,(24:0 [...]
+   tree rep.920000 = ((2:0.166796,((11:0.034138,12:0.085191):0.061828,(((8:0.090322,(21:0.063713,(22:0.024808,((23:0.010328,24:0.012658):0.008450,25:0.006086):0.030690):0.008059):0.031689):0.029643,((5:0.084758,20:0.106526):0.104749,13:0.079355):0.002847):0.003861,((((26:0.027216,27:0.030248):0.075173,28:0.050610):0.025736,29:0.089845):0.028232,((4:0.130561,((10:0.111037,(7:0.112138,(16:0.031015,15:0.047337):0.044491):0.006166):0.008930,((18:0.038287,17:0.028187):0.036343,9:0.186444):0.0 [...]
+   tree rep.940000 = ((((((20:0.119798,5:0.077112):0.101916,13:0.074404):0.005286,(((22:0.044906,(25:0.006766,(23:0.014660,24:0.009940):0.008258):0.022458):0.024032,21:0.034044):0.007670,8:0.084720):0.021683):0.001992,(((19:0.103373,14:0.106097):0.033799,(10:0.090746,((4:0.132730,(9:0.091837,(17:0.018995,18:0.043830):0.049937):0.040223):0.018150,(7:0.082692,(15:0.040410,16:0.021335):0.041861):0.015638):0.006572):0.035848):0.002466,(29:0.092640,((27:0.033894,26:0.019924):0.064844,28:0.039 [...]
+   tree rep.960000 = ((3:0.152343,6:0.104414):0.008543,(((((13:0.080195,(20:0.079022,5:0.084316):0.105041):0.018029,(((25:0.004195,(24:0.020382,23:0.009633):0.007692):0.021371,(21:0.072140,22:0.031938):0.010823):0.065672,8:0.106351):0.014102):0.000114,((((27:0.037418,26:0.019452):0.057068,28:0.048165):0.018020,29:0.086893):0.016499,(((4:0.140888,(9:0.116961,(17:0.021264,18:0.026814):0.040454):0.019749):0.023383,(10:0.110770,(7:0.122304,(15:0.042826,16:0.026760):0.037797):0.005549):0.0038 [...]
+   tree rep.980000 = (6:0.076737,((((((((((((18:0.033903,17:0.022111):0.043466,9:0.111497):0.027255,4:0.105866):0.043806,10:0.111941):0.005312,(16:0.020134,15:0.021456):0.055738):0.008618,7:0.110712):0.018719,(29:0.078221,((26:0.024664,27:0.038919):0.075060,28:0.054640):0.029733):0.012616):0.006341,(14:0.091762,19:0.094881):0.031845):0.012567,(((5:0.103796,20:0.100789):0.109220,13:0.100665):0.010121,(8:0.088143,(21:0.069179,(((23:0.008667,24:0.010303):0.003766,25:0.006319):0.032184,22:0. [...]
+   tree rep.1000000 = (((((((((28:0.050892,(27:0.046909,26:0.027639):0.077550):0.026694,29:0.109869):0.009386,((19:0.104502,14:0.076063):0.025700,((((9:0.102620,(17:0.031214,18:0.039446):0.050491):0.018708,4:0.126067):0.018058,((15:0.043317,16:0.021129):0.061737,10:0.147354):0.004234):0.010588,7:0.105946):0.024862):0.013950):0.010483,(8:0.076721,((22:0.048880,((24:0.007828,23:0.007481):0.021127,25:0.002895):0.023331):0.026581,21:0.043295):0.039082):0.045827):0.010386,(13:0.091700,(20:0.0 [...]
+   tree rep.1020000 = ((2:0.238881,(((((9:0.163094,(18:0.034843,17:0.018127):0.033963):0.036292,4:0.118345):0.012332,((7:0.086208,(16:0.029510,15:0.033390):0.061412):0.018947,10:0.084262):0.001801):0.019814,(((14:0.099278,19:0.099682):0.033910,((28:0.028920,(26:0.022423,27:0.045589):0.077962):0.028071,29:0.074604):0.020601):0.003851,((5:0.096554,20:0.097291):0.088290,(13:0.087040,(((((24:0.009973,23:0.009371):0.003795,25:0.014790):0.033559,22:0.020708):0.004797,21:0.066698):0.036413,8:0. [...]
+   tree rep.1040000 = (3:0.151924,(6:0.079013,((13:0.115374,(((((20:0.083181,5:0.134579):0.136177,(((4:0.114276,(9:0.100994,(17:0.030276,18:0.036202):0.045487):0.027342):0.006234,((15:0.035430,16:0.027267):0.070300,10:0.105719):0.002373):0.013920,7:0.084675):0.019614):0.004433,((28:0.050109,(27:0.037883,26:0.020545):0.053529):0.038538,((19:0.087247,14:0.121433):0.022725,29:0.117525):0.002670):0.004846):0.027128,(8:0.111354,(21:0.045782,(((23:0.015260,24:0.021247):0.007959,25:0.009984):0. [...]
+   tree rep.1060000 = (((2:0.210965,((11:0.029764,12:0.095361):0.076300,((((14:0.110167,19:0.099630):0.031100,(((26:0.018650,27:0.037953):0.077263,28:0.051609):0.022779,29:0.082814):0.001726):0.010956,((5:0.124371,20:0.084699):0.123016,(13:0.084500,(((((23:0.015660,24:0.006915):0.007556,25:0.010884):0.041359,22:0.029860):0.012803,21:0.061180):0.015094,8:0.074650):0.020600):0.000547):0.005544):0.005016,((4:0.108133,(9:0.133427,(18:0.035836,17:0.025236):0.051820):0.029197):0.020346,(10:0.1 [...]
+   tree rep.1080000 = ((6:0.097889,3:0.135907):0.015405,(2:0.175514,((12:0.068728,11:0.034674):0.048972,(((29:0.076057,(28:0.039648,(27:0.042467,26:0.023033):0.076364):0.011781):0.014765,((19:0.057884,14:0.098955):0.025613,((((15:0.041346,16:0.012635):0.056247,(((18:0.041780,17:0.014989):0.049292,9:0.112583):0.030462,4:0.122599):0.004820):0.009649,7:0.089848):0.000313,10:0.099377):0.016652):0.008618):0.014152,((8:0.095825,(21:0.032254,(22:0.017684,(25:0.002557,(24:0.006250,23:0.015648):0 [...]
+   tree rep.1100000 = (6:0.069407,(3:0.131385,(2:0.163823,((11:0.046680,12:0.068078):0.046971,(((8:0.113273,(21:0.046348,(22:0.027032,(25:0.013121,(23:0.011228,24:0.017199):0.002073):0.027339):0.021696):0.020482):0.039163,((5:0.097760,20:0.092493):0.072809,13:0.088585):0.007685):0.014998,(((29:0.124484,((26:0.016060,27:0.034620):0.064326,28:0.030270):0.032319):0.017491,((10:0.114955,((16:0.022296,15:0.035310):0.070380,7:0.091346):0.007980):0.004602,(4:0.121553,((18:0.032680,17:0.013563): [...]
+   tree rep.1120000 = ((((((8:0.090494,((((23:0.009287,24:0.011021):0.012874,25:0.006266):0.027212,22:0.043022):0.029625,21:0.047039):0.022630):0.030347,(((((28:0.049587,(27:0.037864,26:0.031190):0.096003):0.036881,29:0.091543):0.004124,(20:0.103189,5:0.103277):0.102086):0.012688,13:0.098763):0.000988,(19:0.064909,14:0.085266):0.048208):0.000911):0.010097,((((18:0.030306,17:0.022813):0.042461,9:0.128486):0.019290,4:0.119427):0.007365,((10:0.094850,7:0.081331):0.017183,(15:0.059973,16:0.0 [...]
+   tree rep.1140000 = ((((((((28:0.067999,(27:0.045573,26:0.028210):0.061712):0.010634,29:0.094948):0.024136,(14:0.105314,19:0.093313):0.023458):0.001001,(7:0.086912,((10:0.085418,(15:0.030481,16:0.026373):0.059309):0.002241,((9:0.132847,(17:0.022642,18:0.038501):0.046757):0.028460,4:0.128708):0.011050):0.013034):0.017151):0.015984,((((25:0.017455,(23:0.005667,24:0.011666):0.010820):0.013912,(21:0.067159,22:0.042121):0.005630):0.040315,8:0.088469):0.017877,(13:0.088123,(20:0.093259,5:0.1 [...]
+   tree rep.1160000 = ((6:0.097132,3:0.109948):0.000849,(((12:0.093376,11:0.059892):0.037997,(((29:0.091693,((26:0.021877,27:0.019716):0.076034,28:0.037320):0.016421):0.018032,((19:0.093437,14:0.092837):0.021334,((((16:0.016061,15:0.041490):0.067511,(4:0.106898,(9:0.112593,(18:0.044256,17:0.019590):0.031223):0.028637):0.008766):0.004473,7:0.102396):0.009045,10:0.087572):0.023885):0.003450):0.006276,((((5:0.097488,20:0.079008):0.059007,((((24:0.007443,23:0.012381):0.005169,25:0.005086):0. [...]
+   tree rep.1180000 = (6:0.088374,((((((29:0.080202,((26:0.024021,27:0.041604):0.074322,28:0.067118):0.015413):0.007666,((19:0.069095,14:0.103764):0.029052,((5:0.071997,20:0.091473):0.091633,((10:0.121352,(((9:0.116338,(18:0.040318,17:0.026180):0.047728):0.024998,4:0.134586):0.007606,(16:0.020540,15:0.041358):0.043981):0.004940):0.007764,7:0.108693):0.019768):0.001726):0.014425):0.008870,(13:0.066074,(8:0.096847,(((25:0.007288,(23:0.009854,24:0.011219):0.005581):0.022160,22:0.018892):0.0 [...]
+   tree rep.1200000 = ((((13:0.096374,((8:0.079789,(21:0.042707,((25:0.012778,(24:0.016600,23:0.008108):0.009278):0.021655,22:0.046630):0.022249):0.032251):0.021100,((((14:0.107501,19:0.102532):0.013320,(29:0.083259,((27:0.041287,26:0.022996):0.063628,28:0.049395):0.017425):0.019805):0.002286,(((10:0.093364,(15:0.033973,16:0.029415):0.074320):0.001851,(((18:0.031959,17:0.017670):0.048317,9:0.107305):0.023905,4:0.127847):0.012382):0.011422,7:0.097418):0.011450):0.005075,(20:0.095623,5:0.1 [...]
+   tree rep.1220000 = ((6:0.095222,(((((21:0.047066,(22:0.039126,((24:0.006168,23:0.007010):0.005050,25:0.012327):0.028184):0.017766):0.027549,8:0.103050):0.044691,(((((28:0.056205,(27:0.033575,26:0.023880):0.077972):0.019870,29:0.109313):0.004913,(10:0.102781,(4:0.129148,(((15:0.037629,16:0.023224):0.054903,7:0.123350):0.022987,((18:0.036443,17:0.024693):0.046696,9:0.109882):0.034659):0.005882):0.015807):0.023674):0.004220,(14:0.082197,19:0.094397):0.028767):0.006509,((20:0.129268,5:0.1 [...]
+   tree rep.1240000 = ((2:0.144632,((((((26:0.010998,27:0.037353):0.098640,28:0.035826):0.025345,(19:0.068940,14:0.108129):0.022251):0.002786,(29:0.107902,((5:0.107969,20:0.110753):0.075650,((10:0.091318,((16:0.024479,15:0.040381):0.036766,7:0.113900):0.002846):0.011419,(4:0.119243,(9:0.083174,(18:0.043701,17:0.016275):0.072594):0.009592):0.040166):0.035712):0.000998):0.004597):0.009573,(13:0.094635,(((22:0.033949,(23:0.008912,(25:0.017329,24:0.007210):0.003980):0.032812):0.021928,21:0.0 [...]
+   tree rep.1260000 = ((2:0.153615,((((((28:0.033677,(26:0.029837,27:0.029131):0.054086):0.028542,29:0.088275):0.010790,((4:0.123005,(9:0.109591,(18:0.055115,17:0.022503):0.036166):0.025692):0.006311,(10:0.104245,(7:0.116388,(16:0.014047,15:0.040526):0.053958):0.006452):0.024949):0.032042):0.000680,(19:0.081925,14:0.094859):0.032017):0.013559,((((22:0.028607,((23:0.007232,24:0.012110):0.007152,25:0.002964):0.021227):0.012092,21:0.044347):0.032179,8:0.073257):0.022439,(13:0.086655,(5:0.11 [...]
+   tree rep.1280000 = (3:0.135529,(6:0.103681,(2:0.181587,(((13:0.083094,((21:0.050494,((25:0.013921,(23:0.018204,24:0.010143):0.009329):0.020489,22:0.024281):0.009950):0.017113,8:0.092019):0.013817):0.014243,(((((5:0.136353,20:0.138984):0.077072,7:0.092158):0.012265,((((9:0.085139,(18:0.038219,17:0.013845):0.040582):0.022528,4:0.130849):0.014558,10:0.083300):0.000186,(16:0.012602,15:0.041116):0.053798):0.016838):0.006167,(19:0.068111,14:0.108502):0.038988):0.015802,(((26:0.011266,27:0.0 [...]
+   tree rep.1300000 = (((2:0.184308,(((8:0.091539,(22:0.039158,((23:0.009995,(24:0.012031,25:0.012196):0.004901):0.017262,21:0.072728):0.019113):0.035003):0.045623,(13:0.089788,((5:0.123586,20:0.125282):0.086015,(((19:0.077829,14:0.065494):0.038636,(10:0.099463,((7:0.111286,(16:0.028245,15:0.043472):0.052633):0.008471,((9:0.095097,(18:0.036458,17:0.011934):0.060799):0.020186,4:0.125728):0.010020):0.000625):0.033624):0.012935,(29:0.072331,((26:0.030109,27:0.050732):0.072682,28:0.046254):0 [...]
+   tree rep.1320000 = (3:0.111024,(6:0.078138,(2:0.122673,((12:0.055830,11:0.024193):0.039590,((8:0.096041,(21:0.046492,(22:0.038067,((23:0.006195,24:0.013282):0.006166,25:0.001425):0.027292):0.019083):0.014810):0.011263,((5:0.051069,20:0.082869):0.114751,(((((26:0.034914,27:0.030839):0.069513,28:0.034186):0.016920,29:0.086142):0.009101,((((16:0.022561,15:0.040899):0.053489,(7:0.131019,10:0.073521):0.011325):0.009107,(((18:0.040337,17:0.015409):0.043324,9:0.089705):0.033400,4:0.124970):0 [...]
+   tree rep.1340000 = ((2:0.145835,((12:0.070777,11:0.021634):0.067625,(((((7:0.112622,(((18:0.045922,17:0.023940):0.022638,9:0.096356):0.049617,(16:0.027849,15:0.044231):0.042747):0.004039):0.016010,(10:0.087311,4:0.125430):0.009013):0.018039,(14:0.082620,19:0.096590):0.021740):0.007730,(((26:0.024286,27:0.026980):0.067194,28:0.041536):0.021879,29:0.086814):0.022602):0.018276,(((((23:0.003473,(24:0.011473,25:0.015411):0.002810):0.028037,22:0.025620):0.021003,21:0.044122):0.013207,8:0.09 [...]
+   tree rep.1360000 = (6:0.087648,((((12:0.047486,11:0.032326):0.036081,(((13:0.108976,(8:0.103418,((22:0.034861,((23:0.009926,24:0.014119):0.005176,25:0.006382):0.027363):0.009692,21:0.030953):0.059604):0.047466):0.001388,(20:0.114711,5:0.131041):0.059021):0.001418,((29:0.087548,(28:0.037977,(27:0.042706,26:0.033477):0.060507):0.012498):0.024570,(((((9:0.107300,(18:0.045506,17:0.031939):0.052945):0.037217,(7:0.100922,(15:0.044764,16:0.036656):0.050343):0.002872):0.000948,4:0.125224):0.0 [...]
+   tree rep.1380000 = (6:0.081195,(3:0.113824,(((12:0.054966,11:0.029945):0.054523,(((((27:0.038052,26:0.020620):0.055263,28:0.049689):0.008583,29:0.103712):0.024650,((((4:0.123874,(9:0.090426,(18:0.053380,17:0.020460):0.041744):0.015316):0.019270,(7:0.093988,(15:0.038814,16:0.026650):0.053582):0.008009):0.016264,10:0.105616):0.019002,(14:0.090475,19:0.077846):0.034315):0.001656):0.016463,((8:0.087536,((22:0.019285,((23:0.015598,24:0.012464):0.002294,25:0.013966):0.032069):0.028230,21:0. [...]
+   tree rep.1400000 = (((((11:0.021958,12:0.054053):0.033578,((((19:0.098038,14:0.080740):0.036134,(7:0.071531,(((15:0.047253,16:0.025111):0.060914,((9:0.097152,(17:0.017649,18:0.037369):0.066237):0.006972,4:0.155216):0.024041):0.020510,10:0.124965):0.018728):0.032135):0.002455,(((27:0.030806,26:0.023655):0.075466,28:0.050617):0.040724,29:0.113378):0.013015):0.016761,(((21:0.049977,(((24:0.004870,23:0.015228):0.011314,25:0.009760):0.025483,22:0.028607):0.008075):0.063195,8:0.083352):0.02 [...]
+   tree rep.1420000 = ((2:0.167569,((12:0.052720,11:0.041326):0.036704,((((22:0.036268,((24:0.009005,23:0.012514):0.009581,25:0.008754):0.021950):0.024278,21:0.031143):0.029261,8:0.094673):0.023782,(((20:0.078500,5:0.116079):0.071783,13:0.081857):0.010876,((((10:0.090991,((15:0.057084,16:0.022036):0.056376,(4:0.138755,((18:0.026861,17:0.017888):0.042289,9:0.130000):0.019768):0.021719):0.012685):0.002654,7:0.085778):0.037804,(14:0.076016,19:0.089979):0.015499):0.016154,(((27:0.041343,26:0 [...]
+   tree rep.1440000 = (((((11:0.023734,12:0.077423):0.033872,((((10:0.108050,((((18:0.036725,17:0.015369):0.051318,9:0.132065):0.030990,4:0.136638):0.009293,((16:0.036122,15:0.024391):0.040053,7:0.107269):0.009028):0.005274):0.015648,(19:0.087926,14:0.051708):0.037206):0.027511,(((26:0.030338,27:0.021271):0.059494,28:0.054779):0.022397,29:0.085179):0.010328):0.000992,(13:0.099335,((8:0.077870,(21:0.054633,(22:0.042475,(25:0.010835,(24:0.014332,23:0.014732):0.001938):0.021440):0.034622):0 [...]
+   tree rep.1460000 = (6:0.108038,((((12:0.052807,11:0.041670):0.044216,(((5:0.062974,20:0.113814):0.101862,(13:0.119814,(8:0.115277,(21:0.046405,(((23:0.006931,24:0.009844):0.009129,25:0.006201):0.022145,22:0.022174):0.016240):0.051685):0.011403):0.016782):0.011777,(((29:0.071440,(28:0.055302,(26:0.024725,27:0.029853):0.063027):0.017583):0.017047,(14:0.078220,19:0.089153):0.032263):0.000905,(10:0.070063,((4:0.098820,((18:0.044125,17:0.019340):0.037694,9:0.105742):0.037011):0.007837,(7:0 [...]
+   tree rep.1480000 = ((2:0.123550,((((21:0.037641,(((24:0.016036,23:0.014953):0.006131,25:0.008029):0.022177,22:0.027822):0.012923):0.021781,8:0.091253):0.022083,(((20:0.071812,5:0.107604):0.071369,13:0.106109):0.008612,(((((4:0.132193,(9:0.117992,(17:0.012273,18:0.044879):0.041176):0.029618):0.019354,10:0.119287):0.001375,((15:0.044281,16:0.014483):0.041140,7:0.093341):0.016601):0.016645,(19:0.072064,14:0.125361):0.026579):0.009139,(29:0.068687,(28:0.027217,(27:0.035965,26:0.034835):0. [...]
+   tree rep.1500000 = ((2:0.167610,((11:0.040575,12:0.060969):0.048731,(((5:0.088067,20:0.086026):0.056626,(13:0.098569,((21:0.037656,((25:0.001927,(23:0.014086,24:0.016171):0.006030):0.012906,22:0.050938):0.011260):0.025464,8:0.074786):0.022637):0.004461):0.008428,((29:0.099669,(28:0.047736,(26:0.036990,27:0.030149):0.065598):0.022347):0.021213,((19:0.065593,14:0.111776):0.028584,((((4:0.122999,((18:0.048255,17:0.029570):0.039493,9:0.095607):0.035243):0.036339,(16:0.026408,15:0.052535): [...]
+   tree rep.1520000 = ((3:0.130137,(2:0.129084,((11:0.028248,12:0.086979):0.062464,((5:0.133181,20:0.070852):0.104538,(((8:0.065851,((((24:0.008295,23:0.011801):0.010964,25:0.005445):0.031855,22:0.040177):0.019479,21:0.034465):0.034625):0.026626,13:0.114485):0.001649,((19:0.072871,14:0.065188):0.029913,((29:0.071708,((26:0.030584,27:0.052570):0.074477,28:0.030355):0.023398):0.022250,(((16:0.033157,15:0.036929):0.038170,(7:0.103678,10:0.087111):0.001523):0.014724,(((18:0.034590,17:0.02241 [...]
+   tree rep.1540000 = ((((11:0.028183,12:0.089982):0.050092,(((8:0.082538,((((23:0.016532,24:0.008869):0.006733,25:0.007946):0.019550,22:0.038634):0.012273,21:0.046539):0.032151):0.050865,((5:0.094070,20:0.065448):0.084309,13:0.078733):0.006373):0.007072,(((19:0.067243,14:0.088646):0.035506,(((26:0.023370,27:0.025217):0.059868,28:0.045196):0.031108,29:0.091327):0.013211):0.006209,(((16:0.021700,15:0.044228):0.067366,(10:0.123883,(4:0.119781,((18:0.043370,17:0.014076):0.036529,9:0.083126) [...]
+   tree rep.1560000 = (6:0.099661,((2:0.140366,((11:0.032459,12:0.068365):0.052231,(((5:0.083295,20:0.117297):0.091109,(((21:0.040662,(22:0.039545,((24:0.010488,23:0.007845):0.011655,25:0.003586):0.049563):0.003542):0.028739,8:0.100465):0.028049,13:0.094167):0.010412):0.001771,(((28:0.044655,(26:0.026899,27:0.047572):0.058951):0.025254,29:0.079109):0.004469,((((4:0.154498,((18:0.038553,17:0.021511):0.033622,9:0.124817):0.026587):0.026204,((16:0.031462,15:0.031148):0.048924,10:0.100125):0 [...]
+   tree rep.1580000 = (((2:0.152983,((12:0.076338,11:0.037838):0.031316,(((13:0.092432,((((25:0.005186,(24:0.011803,23:0.007055):0.005027):0.021986,22:0.048089):0.022849,21:0.043465):0.019922,8:0.064782):0.019814):0.010564,(5:0.089004,20:0.105302):0.073070):0.003054,((((10:0.104652,((16:0.015828,15:0.033094):0.047578,7:0.102094):0.008799):0.000785,(((18:0.036300,17:0.022957):0.044965,9:0.098720):0.031145,4:0.109961):0.010367):0.024641,(14:0.108312,19:0.105988):0.020140):0.008519,(29:0.10 [...]
+   tree rep.1600000 = ((6:0.071537,3:0.103946):0.022831,(2:0.168313,((12:0.069336,11:0.042652):0.041495,(13:0.070993,((((14:0.067509,19:0.079217):0.030009,(29:0.081479,((26:0.024695,27:0.028387):0.069534,28:0.041821):0.016178):0.022509):0.005335,((((16:0.019553,15:0.043138):0.058627,10:0.108608):0.005610,(4:0.107120,((18:0.034575,17:0.010765):0.043851,9:0.103492):0.023640):0.015807):0.005198,7:0.080450):0.020538):0.013145,(((21:0.049420,((25:0.004170,(24:0.010971,23:0.009963):0.006239):0 [...]
+   tree rep.1620000 = (3:0.122689,((2:0.156742,(((((((24:0.008762,23:0.008362):0.005708,25:0.005213):0.022194,22:0.035892):0.024848,21:0.047912):0.014628,8:0.094794):0.033452,(((29:0.089448,((27:0.032387,26:0.026347):0.054643,28:0.037019):0.025579):0.016935,((7:0.084425,((10:0.090580,(15:0.035687,16:0.032485):0.049216):0.009131,(((18:0.033118,17:0.034838):0.045472,9:0.105970):0.021946,4:0.123015):0.014312):0.004344):0.030880,(14:0.132389,19:0.086355):0.029071):0.020804):0.008482,(13:0.06 [...]
+   tree rep.1640000 = ((6:0.090949,3:0.151883):0.012173,(2:0.175676,((((20:0.112917,5:0.088630):0.098848,((8:0.080685,(((25:0.006089,(23:0.008188,24:0.008697):0.006721):0.023903,22:0.039950):0.026764,21:0.035906):0.041886):0.024885,13:0.083863):0.000424):0.008381,((((27:0.042098,26:0.026648):0.075966,28:0.043795):0.033433,((10:0.079355,(7:0.106781,(15:0.032250,16:0.026993):0.069319):0.002208):0.001194,(((18:0.034521,17:0.015842):0.037815,9:0.097319):0.018688,4:0.155120):0.023885):0.02669 [...]
+   tree rep.1660000 = (((2:0.159184,(((((5:0.107666,20:0.102908):0.105753,13:0.075576):0.007644,(8:0.068472,(21:0.045414,((25:0.007446,(24:0.014558,23:0.014063):0.004750):0.038550,22:0.029794):0.030018):0.003572):0.030332):0.006682,((((((16:0.018315,15:0.050370):0.059879,(4:0.145715,(9:0.123724,(18:0.039539,17:0.020466):0.045892):0.017456):0.016463):0.004988,10:0.095206):0.007720,7:0.078873):0.030283,(19:0.067572,14:0.096636):0.033303):0.002530,(29:0.089839,((26:0.014744,27:0.030337):0.0 [...]
+   tree rep.1680000 = (6:0.110664,((2:0.154584,(((13:0.116847,(((((27:0.032099,26:0.027751):0.079099,28:0.037397):0.035538,29:0.096972):0.009585,((7:0.166449,((((18:0.031893,17:0.016859):0.036991,9:0.117139):0.028128,4:0.137271):0.012898,(10:0.099103,(15:0.031769,16:0.017774):0.049147):0.008368):0.009449):0.024897,(14:0.096983,19:0.069251):0.029349):0.000399):0.005201,(20:0.116713,5:0.101249):0.137023):0.007315):0.021933,(8:0.123835,(21:0.077826,(22:0.057409,((24:0.015392,23:0.009460):0. [...]
+   tree rep.1700000 = ((6:0.076630,3:0.127851):0.012226,((((((14:0.065671,19:0.074488):0.015303,((((((24:0.011757,23:0.014945):0.005685,25:0.002942):0.021282,22:0.033951):0.017696,21:0.050018):0.036742,8:0.075082):0.014232,(13:0.058922,(20:0.101749,5:0.113135):0.074309):0.010357):0.009964):0.000610,((28:0.047959,(27:0.037037,26:0.030887):0.068836):0.022313,29:0.093364):0.030766):0.007959,((((18:0.035264,17:0.022490):0.039329,9:0.108720):0.026417,4:0.106045):0.013950,(10:0.082465,((15:0.0 [...]
+   tree rep.1720000 = (6:0.103613,(3:0.132433,(2:0.192446,(((13:0.101300,((((23:0.018059,(25:0.015171,24:0.008204):0.002090):0.025281,(21:0.037334,22:0.030119):0.004500):0.043649,8:0.101324):0.022511,(((28:0.063134,(26:0.026919,27:0.046044):0.087585):0.023140,29:0.113732):0.015964,((19:0.058258,14:0.087274):0.039080,(((((18:0.042493,17:0.017251):0.049777,9:0.076096):0.023840,4:0.125244):0.015639,(10:0.088704,(16:0.027226,15:0.044398):0.034034):0.003507):0.009462,7:0.105293):0.016949):0.0 [...]
+   tree rep.1740000 = (((((13:0.100091,((21:0.056058,(22:0.029835,(25:0.007267,(24:0.015716,23:0.010480):0.010532):0.023905):0.024581):0.028887,8:0.110106):0.020356):0.004089,((5:0.117332,20:0.099077):0.084086,(((((7:0.083817,(10:0.115702,(16:0.021611,15:0.052257):0.050190):0.007094):0.002809,(4:0.111940,((18:0.039653,17:0.018601):0.045280,9:0.145265):0.024563):0.007007):0.027352,(19:0.063660,14:0.103261):0.037068):0.009735,((26:0.031011,27:0.039719):0.053184,28:0.048369):0.015331):0.000 [...]
+   tree rep.1760000 = ((((((((22:0.033698,((24:0.009507,25:0.018480):0.003818,23:0.004946):0.028804):0.033587,21:0.044053):0.019269,8:0.094706):0.015836,((5:0.087943,20:0.073772):0.098109,13:0.080848):0.007895):0.003920,(((19:0.064000,14:0.066628):0.047909,((((((18:0.036476,17:0.024431):0.066274,9:0.083067):0.027758,4:0.112284):0.029991,(16:0.025528,15:0.023517):0.074433):0.023104,7:0.081816):0.000098,10:0.069865):0.010215):0.012713,(29:0.080579,(28:0.042052,(26:0.032922,27:0.024234):0.0 [...]
+   tree rep.1780000 = ((3:0.147784,6:0.075708):0.002588,(2:0.164675,((12:0.072046,11:0.023306):0.059860,(((((((23:0.013902,24:0.008482):0.006577,25:0.011956):0.035777,22:0.028759):0.000960,21:0.043726):0.040814,8:0.111390):0.020820,(13:0.103393,(20:0.103006,5:0.069521):0.064253):0.021324):0.000156,(((29:0.091244,(28:0.045756,(27:0.034730,26:0.026628):0.081493):0.022621):0.031272,(14:0.080188,19:0.076264):0.018925):0.002068,((((7:0.087633,(15:0.038821,16:0.023467):0.040847):0.015763,10:0. [...]
+   tree rep.1800000 = ((((12:0.057117,11:0.036820):0.065139,(((4:0.150487,(9:0.109999,(18:0.038616,17:0.016454):0.041724):0.017756):0.005669,((7:0.094983,(15:0.027071,16:0.022219):0.054374):0.021244,10:0.117962):0.014917):0.026628,((((20:0.101817,5:0.103616):0.069546,13:0.084853):0.002686,((21:0.035848,(((23:0.008837,24:0.007496):0.010834,25:0.003031):0.028879,22:0.047730):0.024288):0.020115,8:0.087435):0.019188):0.016801,((14:0.084980,19:0.085505):0.034562,(29:0.079415,(28:0.036237,(27: [...]
+   tree rep.1820000 = (3:0.175506,(6:0.083962,(2:0.211876,(((((5:0.096733,20:0.123210):0.082795,13:0.095868):0.002573,((((25:0.007070,(24:0.015001,23:0.009914):0.006179):0.021465,22:0.024663):0.013278,21:0.056998):0.008345,8:0.096138):0.028692):0.003080,((29:0.081751,(28:0.055213,(26:0.049684,27:0.030006):0.040518):0.023690):0.011405,(((((16:0.017385,15:0.027597):0.053999,7:0.083579):0.023410,10:0.091048):0.009805,(((18:0.038002,17:0.024786):0.051513,9:0.110877):0.027634,4:0.121080):0.00 [...]
+   tree rep.1840000 = ((((12:0.066870,11:0.021997):0.060206,((((20:0.099665,5:0.103498):0.080566,13:0.092150):0.011249,((21:0.038300,(22:0.033633,(25:0.003698,(24:0.011488,23:0.012406):0.003458):0.029546):0.024911):0.035025,8:0.101373):0.030775):0.001288,((((15:0.041049,16:0.020706):0.043428,(10:0.099766,7:0.106133):0.004508):0.014013,(4:0.140989,((18:0.050869,17:0.018862):0.052628,9:0.127302):0.023047):0.019589):0.032708,((14:0.089930,19:0.079318):0.031860,(29:0.107994,(28:0.047483,(27: [...]
+   tree rep.1860000 = (3:0.129676,(6:0.093060,(2:0.204347,((11:0.018932,12:0.090644):0.065254,((((((27:0.032489,26:0.021169):0.061401,28:0.049759):0.021841,29:0.077771):0.019809,(19:0.097808,14:0.092181):0.033931):0.005984,(7:0.112928,(((15:0.039901,16:0.028594):0.059717,(4:0.117768,((17:0.027980,18:0.029625):0.065121,9:0.093452):0.030108):0.022895):0.003070,10:0.120436):0.007633):0.020589):0.011458,(((8:0.091899,((22:0.024566,((24:0.007980,23:0.007994):0.002181,25:0.004528):0.037262):0. [...]
+   tree rep.1880000 = (3:0.109000,((((((29:0.107175,(28:0.048756,(26:0.023346,27:0.036643):0.052809):0.014604):0.023965,(((10:0.102544,((16:0.011485,15:0.039975):0.052528,(4:0.130189,((18:0.039048,17:0.014515):0.044680,9:0.084249):0.018280):0.013766):0.010937):0.014831,7:0.090062):0.007553,(19:0.085028,14:0.128916):0.019671):0.002674):0.009624,((5:0.094995,20:0.081497):0.057527,((12:0.062972,11:0.032907):0.057374,((((25:0.012872,(24:0.005881,23:0.014205):0.007732):0.026945,22:0.037219):0 [...]
+   tree rep.1900000 = ((2:0.226191,(((((20:0.108629,5:0.150200):0.135453,13:0.126130):0.003780,((((25:0.018659,(23:0.014007,24:0.004312):0.009423):0.024858,22:0.036507):0.027030,21:0.067969):0.083918,8:0.066577):0.055639):0.002653,((10:0.114172,(7:0.147380,((15:0.040735,16:0.016298):0.058571,(4:0.191861,(9:0.133340,(18:0.052705,17:0.020472):0.044343):0.026535):0.009761):0.001269):0.005548):0.047360,((29:0.116175,((27:0.044805,26:0.012686):0.091370,28:0.053766):0.021818):0.027198,(14:0.10 [...]
+   tree rep.1920000 = (6:0.101856,((2:0.190062,((11:0.027715,12:0.046568):0.066807,(((8:0.083213,(((25:0.018367,(23:0.006100,24:0.008767):0.007050):0.022958,22:0.035145):0.013461,21:0.052856):0.026170):0.030513,(13:0.093189,(5:0.114647,20:0.058492):0.109642):0.009682):0.007330,((29:0.082887,(28:0.032433,(26:0.040742,27:0.042870):0.067833):0.020735):0.019032,((14:0.089425,19:0.069086):0.037197,((7:0.121459,(16:0.027934,15:0.039457):0.062542):0.007542,(10:0.121617,(4:0.152332,(9:0.103554,( [...]
+   tree rep.1940000 = ((3:0.140391,6:0.080818):0.015784,(((12:0.057964,11:0.042362):0.056734,(((((5:0.111547,20:0.109420):0.112868,(((26:0.007561,27:0.037671):0.078126,28:0.044315):0.009289,29:0.095477):0.008208):0.009130,((19:0.088167,14:0.104599):0.016156,(10:0.094593,((7:0.100672,(((18:0.030646,17:0.027353):0.054909,9:0.114756):0.027992,4:0.125156):0.028934):0.004965,(16:0.015020,15:0.035551):0.064443):0.010138):0.021194):0.011138):0.007729,13:0.113927):0.002288,(8:0.096448,(21:0.0338 [...]
+   tree rep.1960000 = ((6:0.060866,(2:0.131311,((11:0.047780,12:0.079000):0.043151,((((14:0.099123,19:0.079855):0.028987,((10:0.080077,((4:0.105966,((18:0.036565,17:0.029803):0.044714,9:0.101832):0.016747):0.004229,(16:0.025372,15:0.044519):0.048031):0.006220):0.002772,7:0.094402):0.022698):0.009405,(29:0.111209,((26:0.026074,27:0.039762):0.081240,28:0.057721):0.031595):0.004974):0.003073,(((((25:0.003005,(24:0.004332,23:0.009543):0.002757):0.013264,22:0.029245):0.021851,21:0.063621):0.0 [...]
+   tree rep.1980000 = (3:0.120201,(6:0.118890,(2:0.152296,((11:0.036886,12:0.055046):0.049856,(((((14:0.074127,19:0.071709):0.016753,(((((18:0.051538,17:0.017442):0.045127,9:0.113908):0.020063,4:0.122731):0.004783,(10:0.084359,(7:0.083031,(16:0.014400,15:0.029362):0.045805):0.012048):0.004378):0.021396,(((26:0.023516,27:0.028476):0.057158,28:0.053509):0.032100,29:0.077901):0.007931):0.001400):0.003391,(5:0.073946,20:0.084143):0.065214):0.000322,((21:0.037109,(((24:0.009928,23:0.008934):0 [...]
+   tree rep.2000000 = ((2:0.174954,((12:0.052976,11:0.031724):0.065002,((((((29:0.094531,((26:0.019908,27:0.037446):0.077156,28:0.042628):0.029169):0.007597,(5:0.097414,20:0.106677):0.102824):0.006678,(((4:0.157474,((18:0.033609,17:0.023078):0.048701,9:0.098189):0.010365):0.009629,((16:0.028697,15:0.046349):0.035725,7:0.119105):0.019330):0.014294,10:0.115248):0.030524):0.001432,(19:0.079429,14:0.114319):0.033981):0.011507,13:0.117414):0.000214,((((25:0.004462,(23:0.013656,24:0.006143):0. [...]
+   tree rep.2020000 = ((3:0.160849,6:0.058570):0.019282,(2:0.160980,((12:0.092594,11:0.024633):0.043233,(((((27:0.034226,26:0.023493):0.076243,28:0.037949):0.008282,29:0.079084):0.015925,(((((15:0.041064,16:0.033943):0.028980,(4:0.123712,((18:0.021566,17:0.024329):0.023601,9:0.113050):0.036254):0.013851):0.008147,10:0.103014):0.004382,7:0.125027):0.011710,(14:0.086009,19:0.073050):0.014489):0.017371):0.008711,(((8:0.123862,(21:0.055108,(22:0.027936,((24:0.008887,23:0.004939):0.002380,25: [...]
+   tree rep.2040000 = ((2:0.147985,((12:0.079372,11:0.024379):0.052804,((((21:0.073221,22:0.026582):0.004446,((24:0.010169,23:0.008171):0.011806,25:0.001686):0.040217):0.018992,8:0.089654):0.019150,((13:0.095442,(20:0.116455,5:0.096244):0.087817):0.009767,((29:0.107775,((27:0.033478,26:0.027948):0.075607,28:0.040920):0.018283):0.015346,((14:0.078301,19:0.085355):0.032647,(7:0.111770,((10:0.093533,(4:0.103967,(9:0.107719,(18:0.036553,17:0.024793):0.048454):0.031917):0.008596):0.002241,(15 [...]
+   tree rep.2060000 = ((2:0.167867,((12:0.082385,11:0.016145):0.078608,((((((25:0.004825,(23:0.014255,24:0.016038):0.003882):0.038785,22:0.031684):0.024104,21:0.053867):0.038306,8:0.097129):0.024788,((20:0.086357,5:0.121563):0.128058,13:0.087893):0.008992):0.003793,((((27:0.059341,26:0.021188):0.084345,28:0.050657):0.009999,29:0.082997):0.032108,(((((18:0.040128,17:0.015449):0.045701,9:0.102569):0.023977,4:0.118277):0.030941,(10:0.081516,((15:0.049969,16:0.029760):0.053084,7:0.103015):0. [...]
+   tree rep.2080000 = ((2:0.160785,((11:0.020098,12:0.060853):0.060656,(((21:0.052915,(((23:0.010868,24:0.012414):0.005801,25:0.014231):0.021868,22:0.029972):0.023013):0.036164,8:0.102176):0.026822,(((5:0.100877,20:0.115913):0.150000,13:0.091389):0.019085,(((14:0.088551,19:0.098739):0.047663,((10:0.127628,((16:0.029374,15:0.029993):0.042540,7:0.090129):0.004259):0.011768,(((18:0.043697,17:0.026749):0.049142,9:0.134963):0.007445,4:0.160299):0.005777):0.031521):0.002471,(29:0.104759,(28:0. [...]
+   tree rep.2100000 = ((3:0.112731,6:0.094780):0.008933,(2:0.131178,((11:0.034883,12:0.068829):0.035203,((13:0.108974,((5:0.123430,20:0.086813):0.101265,(8:0.098145,((22:0.034217,((24:0.009631,23:0.015332):0.008588,25:0.004276):0.032792):0.018850,21:0.037426):0.018891):0.019874):0.005045):0.007528,(((14:0.096004,19:0.065967):0.026343,((28:0.044534,(26:0.039769,27:0.033878):0.064210):0.009039,29:0.072415):0.032128):0.009320,(((7:0.077737,(16:0.010201,15:0.035589):0.053603):0.026108,10:0.0 [...]
+   tree rep.2120000 = (((2:0.142499,((11:0.031772,12:0.057518):0.025034,(((((14:0.101867,19:0.083457):0.008720,(29:0.095057,(28:0.055070,(26:0.022614,27:0.040115):0.058600):0.023735):0.009083):0.006000,(10:0.081199,((((18:0.026792,17:0.026400):0.047784,9:0.084683):0.004784,4:0.106633):0.015741,((16:0.034389,15:0.024325):0.042808,7:0.105588):0.009896):0.020303):0.012045):0.011990,(13:0.099418,(5:0.095410,20:0.082578):0.103530):0.001843):0.009530,((21:0.064070,(22:0.033962,((24:0.009021,23 [...]
+   tree rep.2140000 = ((3:0.081589,6:0.122950):0.004695,(2:0.188479,((11:0.028653,12:0.068583):0.055369,(((13:0.080928,((5:0.091343,20:0.059057):0.096831,(8:0.074040,((((23:0.013848,24:0.004534):0.003664,25:0.007774):0.022847,22:0.027603):0.024999,21:0.037070):0.039703):0.014490):0.001785):0.010534,((29:0.083341,((26:0.024551,27:0.034176):0.057437,28:0.073953):0.042924):0.015448,(14:0.070751,19:0.089415):0.030703):0.004275):0.009860,((4:0.104879,((18:0.030894,17:0.032440):0.076270,9:0.11 [...]
+   tree rep.2160000 = (((2:0.199380,((11:0.020799,12:0.089092):0.075223,(((13:0.111947,(5:0.080717,20:0.111460):0.099124):0.011979,(8:0.071890,((21:0.067836,22:0.034319):0.010763,((23:0.011363,24:0.014802):0.003788,25:0.003386):0.013909):0.045748):0.023661):0.008948,((29:0.099946,(28:0.049462,(26:0.026178,27:0.030072):0.050755):0.033571):0.031031,(((7:0.089125,(16:0.028833,15:0.043564):0.060213):0.010970,(10:0.115188,(4:0.113186,(9:0.105515,(18:0.041942,17:0.013667):0.048946):0.021248):0 [...]
+   tree rep.2180000 = ((((((29:0.095525,((27:0.033431,26:0.016522):0.069799,28:0.047639):0.031630):0.025549,((19:0.081786,14:0.111154):0.023269,(10:0.129974,((((9:0.126288,(17:0.014243,18:0.034552):0.035057):0.030100,4:0.141938):0.017906,(15:0.052453,16:0.024162):0.037750):0.004247,7:0.090675):0.005889):0.018105):0.013309):0.016580,((13:0.114895,((21:0.055061,(((24:0.008042,23:0.006979):0.000604,25:0.001910):0.031121,22:0.021858):0.007507):0.033966,8:0.073041):0.031842):0.003133,(20:0.10 [...]
+   tree rep.2200000 = ((((((((((9:0.143448,(17:0.015286,18:0.046825):0.055152):0.023509,4:0.128193):0.010866,((15:0.037687,16:0.026491):0.063775,7:0.108703):0.017307):0.011263,10:0.112634):0.024891,(((27:0.034971,26:0.022298):0.066375,28:0.054707):0.027236,29:0.073528):0.009500):0.007005,(19:0.096220,14:0.089626):0.033751):0.007381,((((22:0.037918,(25:0.010377,(23:0.008156,24:0.019242):0.004669):0.028304):0.021749,21:0.042652):0.035610,8:0.116776):0.034168,(13:0.131866,(20:0.128316,5:0.0 [...]
+   tree rep.2220000 = (2:0.131964,((((((5:0.115037,20:0.071436):0.102163,(13:0.057432,(8:0.098226,(21:0.053067,(22:0.019186,((24:0.010159,23:0.008856):0.007244,25:0.007564):0.023557):0.012661):0.034950):0.032827):0.003434):0.006232,((14:0.092144,19:0.080044):0.033559,((28:0.053338,(26:0.021565,27:0.024235):0.054514):0.020314,29:0.099661):0.021706):0.004996):0.004196,((10:0.114222,((16:0.022455,15:0.039849):0.039911,7:0.091709):0.002188):0.020952,(4:0.089414,((18:0.056270,17:0.013308):0.0 [...]
+   tree rep.2240000 = (((((((((9:0.151469,(17:0.025050,18:0.034687):0.030404):0.033920,(7:0.089619,(15:0.025744,16:0.026551):0.074829):0.008330):0.004852,(10:0.066281,4:0.123423):0.009891):0.028960,(19:0.049392,14:0.111473):0.036245):0.008148,((28:0.042077,(27:0.031382,26:0.025881):0.054185):0.035065,29:0.092204):0.001981):0.007124,((((21:0.036515,(((25:0.008291,24:0.013027):0.000572,23:0.007532):0.030587,22:0.024145):0.029034):0.006256,8:0.072970):0.038426,(20:0.069484,5:0.107415):0.098 [...]
+   tree rep.2260000 = ((((((5:0.091380,20:0.110843):0.104391,(13:0.071532,((21:0.036951,(22:0.032595,(25:0.002736,(24:0.009356,23:0.013044):0.009892):0.018841):0.016861):0.032760,8:0.071262):0.025251):0.002025):0.004683,((7:0.082675,(10:0.089789,((4:0.110318,((18:0.047340,17:0.015192):0.045947,9:0.095779):0.044582):0.022149,(16:0.028568,15:0.033170):0.030567):0.006530):0.025200):0.018300,((29:0.083149,((26:0.020566,27:0.052868):0.088662,28:0.050279):0.019346):0.015976,(14:0.123215,19:0.0 [...]
+   tree rep.2280000 = (6:0.081657,((2:0.178939,((((5:0.114774,20:0.089929):0.090896,13:0.100681):0.007153,(((((24:0.011947,23:0.011105):0.005358,25:0.009505):0.025346,(21:0.071277,22:0.019464):0.020174):0.051855,8:0.117410):0.006608,(((28:0.046035,(26:0.022939,27:0.041797):0.049844):0.035675,29:0.114615):0.006121,((7:0.123853,(((16:0.032017,15:0.040582):0.062426,((9:0.103148,(18:0.063114,17:0.019335):0.050503):0.038121,4:0.115115):0.014726):0.000985,10:0.098526):0.010688):0.025912,(14:0. [...]
+   tree rep.2300000 = ((2:0.205851,((((8:0.121599,((((23:0.013756,24:0.012034):0.011623,25:0.013870):0.012316,22:0.035981):0.015591,21:0.042371):0.030118):0.020552,13:0.135999):0.012712,(((14:0.105054,19:0.082421):0.036663,((20:0.122457,5:0.122501):0.098559,(((4:0.126756,(9:0.076805,(18:0.028320,17:0.028627):0.055521):0.044292):0.003559,(7:0.111850,(15:0.034486,16:0.013841):0.046915):0.020121):0.009142,10:0.104158):0.024019):0.011070):0.007246,(((27:0.032021,26:0.036712):0.064796,28:0.04 [...]
+   tree rep.2320000 = (2:0.147344,((((((22:0.034608,((24:0.009610,23:0.004680):0.002222,25:0.006173):0.020965):0.025876,21:0.049153):0.026206,8:0.081781):0.027363,(((((14:0.091949,19:0.059449):0.035109,((((15:0.041517,16:0.020350):0.055082,10:0.088677):0.002141,(((17:0.021826,18:0.045262):0.041069,9:0.110229):0.034436,4:0.128156):0.008398):0.010843,7:0.090690):0.021943):0.019589,(29:0.124129,((27:0.034093,26:0.026368):0.059202,28:0.044452):0.035589):0.006401):0.004508,(20:0.098478,5:0.08 [...]
+   tree rep.2340000 = (((3:0.186497,6:0.074383):0.016214,((((((22:0.056523,((24:0.012173,23:0.007184):0.008215,25:0.003893):0.012486):0.023934,21:0.050006):0.044534,8:0.063392):0.028624,(13:0.075110,(20:0.117810,5:0.095215):0.110559):0.009240):0.003456,(((14:0.080453,19:0.094919):0.021307,(10:0.104098,((4:0.113905,((17:0.026312,18:0.036287):0.030876,9:0.111919):0.033160):0.001581,((15:0.035282,16:0.032199):0.049821,7:0.085735):0.019913):0.005609):0.018110):0.005856,((27:0.043097,26:0.022 [...]
+   tree rep.2360000 = (((((((((4:0.107457,(9:0.094390,(17:0.041965,18:0.018516):0.064488):0.032693):0.012819,(7:0.104932,(15:0.039695,16:0.024784):0.054436):0.003899):0.001704,10:0.118640):0.026652,(29:0.100503,((27:0.034262,26:0.028442):0.057733,28:0.048843):0.007187):0.022850):0.003971,(14:0.098567,19:0.069399):0.060797):0.009140,(((21:0.042462,(22:0.028998,((23:0.010933,24:0.009408):0.010495,25:0.005540):0.036240):0.015000):0.029768,8:0.102002):0.032627,(13:0.070500,(20:0.086021,5:0.1 [...]
+   tree rep.2380000 = ((6:0.108958,3:0.115318):0.012912,((((((8:0.074391,(21:0.055331,(((23:0.008803,24:0.009752):0.004384,25:0.004465):0.021573,22:0.031283):0.033442):0.032455):0.035402,(20:0.093883,5:0.137380):0.061502):0.003673,13:0.117073):0.005130,(((4:0.144533,(9:0.105132,(17:0.022120,18:0.032950):0.050230):0.040627):0.003597,((7:0.096929,(15:0.030703,16:0.035944):0.050288):0.019786,10:0.091389):0.006407):0.019391,(((28:0.054539,(27:0.041883,26:0.023285):0.094106):0.026065,29:0.086 [...]
+   tree rep.2400000 = (((((13:0.105652,((((4:0.125145,((18:0.030073,17:0.016843):0.054136,9:0.129157):0.013619):0.012371,(7:0.110431,((16:0.028766,15:0.040057):0.067210,10:0.093483):0.005868):0.011344):0.040431,((28:0.052596,(26:0.039196,27:0.023411):0.061655):0.026630,29:0.103326):0.014849):0.008614,(19:0.082805,14:0.118078):0.037345):0.004722):0.000297,((8:0.105933,(21:0.029876,((25:0.003140,(24:0.008627,23:0.024111):0.007598):0.011108,22:0.041291):0.019579):0.049280):0.034967,(5:0.120 [...]
+   tree rep.2420000 = (2:0.161373,(((((((9:0.099925,(18:0.032413,17:0.012333):0.046034):0.024439,4:0.109797):0.005034,(10:0.080995,(7:0.085457,(15:0.041488,16:0.020102):0.046499):0.011684):0.008505):0.030165,(((28:0.031538,(27:0.025086,26:0.033863):0.072485):0.027246,29:0.093719):0.017624,(14:0.080045,19:0.066358):0.026285):0.002131):0.011421,(((20:0.095053,5:0.139735):0.095054,(8:0.105524,((((23:0.014497,24:0.009223):0.004492,25:0.026706):0.038881,22:0.027891):0.005275,21:0.042264):0.02 [...]
+   tree rep.2440000 = ((6:0.073850,3:0.121172):0.000738,((((8:0.067374,((22:0.022304,(25:0.006171,(24:0.006187,23:0.008541):0.007212):0.051117):0.017703,21:0.039998):0.031117):0.024756,((((14:0.111831,19:0.061126):0.028295,((9:0.105391,(18:0.048046,17:0.012955):0.050062):0.014915,((10:0.084483,((15:0.052487,16:0.008542):0.047502,7:0.085976):0.005692):0.028640,4:0.136671):0.006171):0.024633):0.003077,(29:0.113577,((27:0.041357,26:0.027386):0.056256,28:0.048527):0.016394):0.008256):0.00736 [...]
+   tree rep.2460000 = ((((((13:0.097417,(8:0.116241,(22:0.030929,(((24:0.014494,23:0.010122):0.014252,25:0.013042):0.012878,21:0.047539):0.006438):0.055536):0.020588):0.002759,((20:0.075877,5:0.105448):0.101227,((14:0.071432,19:0.082483):0.033617,(((27:0.026587,26:0.030513):0.067559,28:0.041454):0.024045,29:0.090769):0.016953):0.004359):0.003322):0.008677,(((7:0.119056,(15:0.019985,16:0.038597):0.034472):0.021059,10:0.076679):0.008942,((9:0.106105,(18:0.038494,17:0.020232):0.043792):0.00 [...]
+   tree rep.2480000 = ((((11:0.041246,12:0.067845):0.051537,((((22:0.033466,(25:0.009531,(23:0.014638,24:0.006840):0.008704):0.019209):0.025883,21:0.052158):0.032229,8:0.092802):0.025626,(((5:0.097651,20:0.130060):0.074257,13:0.068852):0.021305,((((26:0.027398,27:0.027783):0.069349,28:0.058331):0.018242,29:0.083801):0.001595,(((((18:0.052852,17:0.025385):0.067434,9:0.116490):0.026878,4:0.127350):0.015708,(10:0.094266,((16:0.028029,15:0.038076):0.041160,7:0.102547):0.000396):0.003972):0.0 [...]
+   tree rep.2500000 = ((3:0.146039,6:0.104839):0.000127,(((11:0.028752,12:0.065977):0.039055,((5:0.093831,20:0.087808):0.092025,(((7:0.115131,((((18:0.037770,17:0.024249):0.030424,9:0.102976):0.042650,4:0.116791):0.003856,(10:0.103880,(16:0.019066,15:0.041838):0.051358):0.010308):0.007472):0.035819,((29:0.091087,(28:0.058180,(26:0.021140,27:0.050584):0.047901):0.035686):0.012786,(14:0.083394,19:0.089279):0.042994):0.001121):0.010425,(((21:0.039890,((25:0.011301,(24:0.012253,23:0.009383): [...]
+   tree rep.2520000 = ((((12:0.058869,11:0.031015):0.060603,(((((18:0.031883,17:0.019670):0.048800,9:0.128269):0.037274,4:0.110037):0.007657,(((16:0.024537,15:0.035304):0.058621,7:0.085817):0.014371,10:0.110327):0.010650):0.034500,((((28:0.043647,(26:0.026124,27:0.027120):0.059336):0.029576,29:0.131361):0.010092,(19:0.075086,14:0.094669):0.025869):0.006578,((8:0.120633,(21:0.078775,(22:0.042860,((24:0.011144,23:0.005067):0.004974,25:0.007266):0.018823):0.014278):0.037827):0.024604,((5:0. [...]
+   tree rep.2540000 = ((3:0.142240,6:0.081119):0.006082,(((13:0.098302,(((((25:0.006457,(24:0.008766,23:0.003705):0.004296):0.012404,22:0.051124):0.004892,21:0.042098):0.032243,8:0.076133):0.026999,((5:0.092978,20:0.105077):0.064606,((29:0.096163,(28:0.044583,(26:0.018904,27:0.027391):0.069336):0.017852):0.015846,((7:0.107574,((16:0.020464,15:0.037211):0.052188,((((18:0.031217,17:0.026264):0.042902,9:0.101023):0.012396,4:0.134072):0.014478,10:0.088409):0.001394):0.018313):0.014839,(19:0. [...]
+   tree rep.2560000 = ((3:0.139158,6:0.110829):0.000715,((((((21:0.048608,(22:0.036522,((23:0.011341,24:0.006810):0.011280,25:0.001618):0.027327):0.005184):0.035831,8:0.091379):0.046822,(13:0.079353,(5:0.138724,20:0.142031):0.146100):0.007584):0.001449,((19:0.115125,14:0.115409):0.024029,((29:0.085874,(28:0.036272,(26:0.036426,27:0.026786):0.072711):0.016680):0.015111,((10:0.122909,(7:0.126734,(16:0.020880,15:0.033624):0.053575):0.019399):0.000341,(((18:0.037023,17:0.019418):0.053848,9:0 [...]
+   tree rep.2580000 = (((((5:0.108024,20:0.097650):0.125320,((((28:0.061239,(26:0.032041,27:0.033158):0.085291):0.007981,29:0.096818):0.024656,((19:0.098001,14:0.081727):0.030492,((4:0.149041,(((16:0.009860,15:0.039112):0.060795,7:0.101474):0.005443,((18:0.057647,17:0.018169):0.062486,9:0.110799):0.015263):0.006372):0.024381,10:0.103532):0.007708):0.007863):0.008986,(13:0.107246,((21:0.057713,(22:0.028622,(24:0.011558,(23:0.009124,25:0.010008):0.000081):0.031647):0.008321):0.046638,8:0.0 [...]
+   tree rep.2600000 = ((3:0.107766,6:0.109661):0.005986,(((11:0.041465,12:0.042309):0.047005,(((((10:0.077519,(7:0.112290,(15:0.034207,16:0.028992):0.052804):0.011050):0.006000,(4:0.115389,(9:0.124675,(17:0.020915,18:0.041866):0.042438):0.027178):0.013463):0.021814,((28:0.048559,(27:0.043141,26:0.017381):0.046475):0.020997,29:0.094979):0.012418):0.000965,(14:0.080241,19:0.085075):0.046186):0.009337,(13:0.110667,((8:0.079472,(21:0.033567,(((24:0.012665,23:0.011232):0.009469,25:0.012213):0 [...]
+   tree rep.2620000 = ((6:0.094641,3:0.169744):0.003187,(((12:0.067775,11:0.022982):0.050032,((((20:0.113457,5:0.084280):0.130112,13:0.120490):0.016625,(((14:0.072205,19:0.081037):0.038480,(((7:0.098298,(15:0.044484,16:0.039975):0.053463):0.009587,((9:0.131255,(18:0.039692,17:0.025167):0.041983):0.028265,4:0.199075):0.011027):0.002029,10:0.090341):0.031673):0.006173,(((27:0.030371,26:0.027457):0.076323,28:0.044701):0.027932,29:0.095347):0.018095):0.012714):0.003175,(8:0.093764,((((24:0.0 [...]
+   tree rep.2640000 = (6:0.092748,(3:0.148914,(2:0.278486,((12:0.090232,11:0.028544):0.073002,(((4:0.169414,(9:0.139937,(18:0.018742,17:0.038926):0.033674):0.015872):0.006990,(((15:0.063963,16:0.017460):0.036579,7:0.105063):0.026216,10:0.111954):0.029444):0.036881,((((14:0.085363,19:0.084896):0.041566,(29:0.085765,((27:0.033174,26:0.013388):0.084449,28:0.033000):0.045229):0.012166):0.008417,((((22:0.046558,(25:0.003843,(24:0.016334,23:0.014919):0.010617):0.017063):0.010853,21:0.080134):0 [...]
+   tree rep.2660000 = ((((12:0.069709,11:0.030793):0.039895,((8:0.089515,(21:0.050460,(22:0.018573,(25:0.005224,(24:0.005401,23:0.009303):0.009108):0.013374):0.019770):0.015388):0.028730,((((29:0.086419,(28:0.022705,(26:0.032653,27:0.041026):0.073972):0.031081):0.006071,(19:0.086732,14:0.072810):0.030701):0.005653,(10:0.103191,(((16:0.021160,15:0.034451):0.054133,7:0.085302):0.008528,(4:0.111137,((18:0.037688,17:0.026472):0.039920,9:0.108041):0.014628):0.011392):0.005287):0.032448):0.012 [...]
+   tree rep.2680000 = (((((((8:0.113922,(21:0.052032,(22:0.023240,(25:0.006063,(23:0.010500,24:0.011710):0.005985):0.033726):0.023226):0.013686):0.026084,(20:0.095526,5:0.106518):0.054832):0.012819,13:0.076862):0.012177,((29:0.083321,((27:0.031451,26:0.018075):0.080144,28:0.025501):0.018930):0.019315,((14:0.107035,19:0.065127):0.024575,((4:0.108488,((18:0.036853,17:0.027808):0.045558,9:0.124122):0.015216):0.010877,(10:0.105394,(7:0.086362,(15:0.034004,16:0.026352):0.055830):0.009182):0.0 [...]
+   tree rep.2700000 = ((3:0.133044,6:0.090104):0.036410,(((((((4:0.109737,(9:0.130364,(18:0.045068,17:0.028999):0.026877):0.008631):0.005547,(10:0.107316,(7:0.086375,(16:0.026687,15:0.036187):0.066824):0.008851):0.012139):0.027953,(19:0.103500,14:0.118371):0.021048):0.012452,(29:0.121383,(28:0.061611,(26:0.031872,27:0.033805):0.067759):0.016021):0.010495):0.014976,((8:0.076897,(21:0.069480,((25:0.011107,(24:0.007784,23:0.016560):0.004442):0.025746,22:0.027295):0.019759):0.039654):0.01970 [...]
+   tree rep.2720000 = (((((8:0.088412,(21:0.051678,(((23:0.005704,24:0.015284):0.006549,25:0.003313):0.020876,22:0.047458):0.005289):0.059762):0.003786,(13:0.088133,(((7:0.104914,((4:0.120145,((18:0.043417,17:0.018966):0.039489,9:0.116740):0.025252):0.004118,((15:0.030774,16:0.023057):0.077066,10:0.076973):0.002570):0.011216):0.018284,(14:0.107555,19:0.087466):0.018971):0.004738,((20:0.075152,5:0.136486):0.097675,(((27:0.034627,26:0.015554):0.065407,28:0.042204):0.029020,29:0.110233):0.0 [...]
+   tree rep.2740000 = ((2:0.190675,(((((28:0.071430,(26:0.016753,27:0.043471):0.059978):0.009137,29:0.100382):0.008375,((19:0.059796,14:0.090507):0.046123,(7:0.094800,((10:0.080726,(4:0.154322,(9:0.095714,(18:0.024391,17:0.034655):0.042758):0.028938):0.006563):0.001479,(16:0.021988,15:0.044111):0.055535):0.011142):0.025601):0.000009):0.015129,((((22:0.038031,((23:0.006051,24:0.007786):0.005086,25:0.004716):0.033968):0.018521,21:0.026193):0.031348,8:0.099733):0.021377,(13:0.096996,(5:0.08 [...]
+   tree rep.2760000 = (6:0.088946,(3:0.121080,(2:0.188951,((12:0.071887,11:0.034867):0.049078,(((13:0.094385,(5:0.094199,20:0.087561):0.070570):0.002699,(8:0.116341,(21:0.041677,(((23:0.008171,24:0.011464):0.013223,25:0.002617):0.020274,22:0.043631):0.020568):0.020530):0.023969):0.005994,((29:0.086895,(((((9:0.104421,(18:0.052395,17:0.022639):0.047830):0.014678,4:0.103898):0.024102,((16:0.034972,15:0.036327):0.046446,7:0.086667):0.008429):0.002667,10:0.095055):0.036960,((26:0.027348,27:0 [...]
+   tree rep.2780000 = ((6:0.057810,(2:0.151596,(((((7:0.095040,((((17:0.016843,18:0.022177):0.026861,9:0.133626):0.038504,4:0.117543):0.013556,(10:0.112319,(15:0.037619,16:0.030764):0.044767):0.001175):0.015749):0.023318,(((27:0.027253,26:0.035550):0.042658,28:0.050067):0.020666,29:0.111195):0.014943):0.002556,(14:0.061321,19:0.086272):0.024109):0.012245,((((21:0.055177,(22:0.029424,((24:0.009978,23:0.007500):0.005225,25:0.007752):0.015443):0.016789):0.029879,8:0.050711):0.027040,13:0.09 [...]
+   tree rep.2800000 = ((2:0.145560,((((20:0.080623,5:0.079530):0.090600,13:0.065067):0.001558,(((((25:0.009511,(23:0.018224,24:0.016622):0.002635):0.028886,22:0.041382):0.019537,21:0.019668):0.026591,8:0.069232):0.023905,((((10:0.079937,((15:0.053439,16:0.014383):0.048593,7:0.111616):0.002242):0.000300,(((17:0.025181,18:0.027683):0.036255,9:0.098213):0.010009,4:0.105976):0.007451):0.015600,(19:0.056024,14:0.096865):0.028390):0.004557,(29:0.090343,((27:0.035488,26:0.014656):0.067836,28:0. [...]
+   tree rep.2820000 = ((3:0.150571,(((12:0.077153,11:0.016018):0.040941,(((13:0.099193,((21:0.055585,(((24:0.006391,23:0.010036):0.007011,25:0.004108):0.020749,22:0.024695):0.009239):0.033395,8:0.087114):0.018752):0.009534,(20:0.085984,5:0.096326):0.057826):0.013520,(((29:0.121179,((27:0.036506,26:0.024583):0.047426,28:0.046433):0.023598):0.016480,(7:0.086673,(((15:0.051198,16:0.025105):0.038550,10:0.105168):0.008573,((9:0.109945,(18:0.031504,17:0.015535):0.048605):0.023869,4:0.101370):0 [...]
+   tree rep.2840000 = ((6:0.077730,3:0.141157):0.010963,((((13:0.101223,(((5:0.129847,20:0.141589):0.109397,((19:0.072098,14:0.112256):0.031362,(29:0.133159,(28:0.051892,(26:0.022531,27:0.026153):0.082103):0.014410):0.022689):0.007559):0.001394,((((18:0.046650,17:0.020478):0.025962,9:0.139925):0.028346,4:0.171776):0.025520,(((16:0.030967,15:0.046597):0.070792,7:0.108543):0.013155,10:0.122621):0.011649):0.036382):0.013138):0.009257,(8:0.097790,((((23:0.005563,24:0.013702):0.007156,25:0.00 [...]
+   tree rep.2860000 = ((3:0.095362,6:0.088683):0.002176,(2:0.137770,((11:0.019311,12:0.090461):0.039772,(((29:0.093556,((27:0.033584,26:0.024485):0.053423,28:0.044391):0.018216):0.015318,((19:0.072072,14:0.088172):0.021315,(7:0.112209,((4:0.112716,(9:0.106320,(17:0.036568,18:0.035420):0.035221):0.011145):0.005604,((15:0.032820,16:0.033542):0.043024,10:0.081626):0.002787):0.007690):0.019099):0.010080):0.008021,(13:0.087682,((8:0.073984,(((25:0.007935,(24:0.018307,23:0.013697):0.005860):0. [...]
+   tree rep.2880000 = ((3:0.112495,6:0.090079):0.008808,(((11:0.038912,12:0.077117):0.040001,((((8:0.115504,((22:0.034718,((23:0.008919,24:0.013058):0.010732,25:0.007307):0.011202):0.018492,21:0.048569):0.017633):0.026121,13:0.085883):0.002597,(20:0.113451,5:0.116794):0.082865):0.006260,((((4:0.124020,(9:0.104294,(17:0.025180,18:0.028187):0.054059):0.016617):0.026411,((7:0.100493,(15:0.034511,16:0.042489):0.039394):0.003446,10:0.109939):0.004898):0.036718,(19:0.095277,14:0.094294):0.0241 [...]
+   tree rep.2900000 = ((6:0.113221,3:0.136135):0.007338,(((12:0.077497,11:0.024621):0.036874,((8:0.119724,(((5:0.129076,20:0.082665):0.075551,(((24:0.007309,(23:0.011785,25:0.010003):0.002712):0.008962,22:0.040710):0.024771,21:0.064940):0.060442):0.017439,13:0.117573):0.015344):0.000274,((((26:0.025237,27:0.042312):0.087018,28:0.060175):0.014240,29:0.079402):0.023914,((7:0.098684,((10:0.081942,((9:0.108708,(18:0.031227,17:0.012150):0.050003):0.020233,4:0.116812):0.010577):0.000317,(16:0. [...]
+   tree rep.2920000 = (6:0.076314,(3:0.087388,(((((13:0.072845,((((4:0.116843,(9:0.113772,(18:0.025554,17:0.029067):0.033466):0.026494):0.025569,(10:0.081394,((15:0.055733,16:0.045556):0.035542,7:0.098056):0.007659):0.013764):0.013945,(19:0.083420,14:0.084660):0.015988):0.005098,(29:0.089138,(28:0.043177,(27:0.027507,26:0.018086):0.097077):0.011896):0.017087):0.006936):0.003798,(20:0.065575,5:0.077900):0.088629):0.004977,(((22:0.020658,((23:0.013472,24:0.003157):0.008017,25:0.008716):0.0 [...]
+   tree rep.2940000 = ((6:0.072225,3:0.166876):0.007421,(((12:0.073515,11:0.045465):0.030024,((13:0.130516,((21:0.052879,(((24:0.008783,23:0.009471):0.007705,25:0.009961):0.048037,22:0.031304):0.015682):0.025416,8:0.069949):0.011489):0.019843,(((5:0.088319,20:0.101705):0.100655,(29:0.098165,((26:0.021672,27:0.039895):0.069478,28:0.037692):0.019732):0.003108):0.003522,((19:0.080517,14:0.097111):0.017954,((((16:0.033397,15:0.029343):0.051868,10:0.090239):0.005606,((9:0.120328,(18:0.026906, [...]
+   tree rep.2960000 = ((6:0.108285,3:0.125924):0.009221,(((12:0.091407,11:0.017309):0.035945,(((21:0.042348,(22:0.036897,(25:0.009169,(23:0.008426,24:0.005692):0.008946):0.021052):0.025020):0.050093,8:0.107579):0.014179,(((28:0.053562,(26:0.029954,27:0.018454):0.046022):0.021114,(((((10:0.079637,(16:0.028122,15:0.032792):0.038971):0.001500,(4:0.109818,(9:0.077725,(18:0.043920,17:0.015803):0.027576):0.035769):0.005464):0.005359,7:0.112890):0.033854,(19:0.087304,14:0.064741):0.027268):0.01 [...]
+   tree rep.2980000 = ((2:0.213820,((11:0.049648,12:0.063580):0.063216,(((20:0.137457,5:0.088653):0.109421,(((29:0.109623,(19:0.099844,14:0.098111):0.028600):0.009402,((28:0.063360,(27:0.047957,26:0.037971):0.059081):0.028011,(7:0.092914,(((15:0.055999,16:0.032419):0.043483,((9:0.071413,(17:0.012458,18:0.047903):0.056656):0.020733,4:0.126243):0.003380):0.003337,10:0.094449):0.010264):0.021639):0.009518):0.023349,13:0.102427):0.000807):0.008403,(8:0.107183,((((24:0.005193,23:0.008830):0.0 [...]
+   tree rep.3000000 = ((2:0.158811,((12:0.054980,11:0.029905):0.062017,((((22:0.028879,((23:0.007155,25:0.012689):0.002546,24:0.002729):0.035832):0.023137,21:0.053628):0.032514,8:0.065511):0.048756,(13:0.113874,(((10:0.081486,(((9:0.126938,(18:0.041686,17:0.020582):0.041713):0.025645,4:0.141741):0.007163,(7:0.089537,(16:0.015303,15:0.046731):0.069256):0.015628):0.018142):0.022069,(19:0.079646,14:0.101087):0.052664):0.002953,((29:0.098712,(28:0.044840,(26:0.041799,27:0.026546):0.059575):0 [...]
+   tree rep.3020000 = ((3:0.167850,6:0.099216):0.012355,(2:0.189488,((((5:0.143090,20:0.111883):0.103338,(((21:0.050818,(22:0.036791,((24:0.008014,23:0.013018):0.006083,25:0.008112):0.023259):0.015208):0.008052,8:0.089230):0.028632,13:0.097920):0.004364):0.001817,((29:0.080729,((26:0.031639,27:0.032999):0.077007,28:0.089575):0.018848):0.017818,((19:0.101446,14:0.060741):0.044954,(10:0.095216,((7:0.066842,(16:0.021472,15:0.051940):0.063512):0.010749,(4:0.128012,((18:0.029075,17:0.021322): [...]
+   tree rep.3040000 = ((6:0.084751,(((((13:0.102588,(20:0.071301,5:0.145346):0.063603):0.013326,(((22:0.045157,(23:0.003671,(25:0.015013,24:0.007391):0.001294):0.016789):0.019518,21:0.045607):0.025903,8:0.078810):0.021607):0.003764,(((19:0.102579,14:0.101879):0.014335,((28:0.052246,(27:0.049915,26:0.021691):0.081063):0.017255,29:0.094742):0.010712):0.007190,((10:0.080462,((15:0.026552,16:0.033387):0.035221,((9:0.138700,(17:0.021346,18:0.034351):0.060515):0.008753,4:0.108839):0.020855):0. [...]
+   tree rep.3060000 = ((3:0.159640,(2:0.201149,((12:0.049740,11:0.041344):0.049170,(((((28:0.038982,(26:0.028630,27:0.033101):0.071597):0.030603,29:0.088335):0.031249,((((7:0.135031,(16:0.026906,15:0.047113):0.054036):0.011829,(((18:0.050822,17:0.017760):0.060529,9:0.079461):0.008487,4:0.111615):0.015302):0.001853,10:0.102430):0.039558,(5:0.090029,20:0.112828):0.109970):0.002142):0.005394,(13:0.084578,(19:0.065721,14:0.075284):0.039824):0.001465):0.015319,(8:0.097608,((((23:0.007769,24:0 [...]
+   tree rep.3080000 = ((3:0.173086,6:0.079767):0.010112,(((11:0.029187,12:0.056031):0.092921,((((19:0.089755,14:0.109911):0.023842,((((10:0.134449,(16:0.007898,15:0.053346):0.065385):0.000695,(((18:0.029049,17:0.016736):0.072129,9:0.139472):0.023632,4:0.099305):0.014652):0.023649,7:0.117238):0.030317,(29:0.067485,((26:0.019228,27:0.034966):0.086217,28:0.047414):0.022325):0.014261):0.000405):0.002646,(5:0.117843,20:0.093920):0.116433):0.014002,((((22:0.030049,(25:0.003533,(24:0.012490,23: [...]
+   tree rep.3100000 = (6:0.054626,((2:0.185699,((11:0.032192,12:0.069675):0.055404,((((21:0.037968,((25:0.007322,(24:0.011769,23:0.016408):0.004100):0.022896,22:0.032937):0.018706):0.027535,8:0.090739):0.025617,(13:0.071248,(5:0.103590,20:0.099577):0.074288):0.004004):0.011827,(((28:0.054951,(26:0.040590,27:0.029239):0.086923):0.020109,29:0.099952):0.018982,((((4:0.154668,((16:0.022496,15:0.031535):0.035977,7:0.121088):0.004584):0.000920,((18:0.054945,17:0.025181):0.051000,9:0.104928):0. [...]
+   tree rep.3120000 = ((3:0.109276,6:0.077450):0.013464,((((20:0.109752,5:0.078438):0.104190,((13:0.112548,(8:0.091474,(((25:0.009499,(24:0.009402,23:0.011688):0.009129):0.023868,22:0.034456):0.021224,21:0.055838):0.059709):0.009082):0.011490,(((19:0.063176,14:0.070995):0.022929,(10:0.116271,((((17:0.023207,18:0.032318):0.037568,9:0.104235):0.016899,4:0.102015):0.009381,((15:0.041796,16:0.020805):0.047147,7:0.088963):0.017480):0.001605):0.016964):0.010341,(29:0.114825,(28:0.049427,(27:0. [...]
+   tree rep.3140000 = ((3:0.117800,6:0.105871):0.008021,((((((21:0.055703,(22:0.041036,((23:0.009287,24:0.006439):0.008661,25:0.015976):0.039037):0.011053):0.043941,8:0.085027):0.016179,((((((17:0.026196,18:0.035755):0.059850,9:0.103099):0.031786,4:0.134320):0.011700,(10:0.116417,(7:0.105913,(15:0.034844,16:0.027608):0.060723):0.010628):0.003653):0.016212,(19:0.052644,14:0.098878):0.051449):0.016063,(29:0.098179,((27:0.047168,26:0.021762):0.068212,28:0.048501):0.013408):0.016129):0.00991 [...]
+   tree rep.3160000 = ((2:0.245322,(((((((25:0.012172,(23:0.010394,24:0.004973):0.011690):0.031408,22:0.046463):0.018391,21:0.054292):0.041490,8:0.127562):0.048583,((20:0.144896,5:0.091504):0.089194,13:0.098062):0.031698):0.008192,((19:0.095756,14:0.085365):0.042217,((10:0.103227,((4:0.128737,((17:0.033080,18:0.025244):0.048314,9:0.134877):0.033857):0.029974,((15:0.037034,16:0.032974):0.035252,7:0.124225):0.006288):0.022044):0.020844,((28:0.060882,(27:0.050668,26:0.023438):0.073249):0.05 [...]
+   tree rep.3180000 = ((6:0.083051,(((12:0.069914,11:0.040076):0.048150,((8:0.078377,(21:0.023430,(((24:0.008593,23:0.010123):0.003361,25:0.006543):0.035264,22:0.034159):0.016551):0.047015):0.019451,((((29:0.087070,((((15:0.042175,16:0.028005):0.040519,(10:0.088457,(4:0.112364,(9:0.099126,(18:0.047117,17:0.024878):0.034362):0.029173):0.015413):0.003986):0.006746,7:0.081291):0.021116,(14:0.103076,19:0.076785):0.021929):0.016065):0.003506,(28:0.047651,(27:0.012815,26:0.029845):0.057038):0. [...]
+   tree rep.3200000 = ((((((29:0.050123,(28:0.044282,(27:0.020072,26:0.035880):0.063148):0.027628):0.012283,((((4:0.164501,((17:0.020034,18:0.025032):0.041609,9:0.106450):0.032110):0.012139,(10:0.111530,(15:0.028920,16:0.019422):0.064108):0.006546):0.005717,7:0.127833):0.035664,(19:0.078512,14:0.097209):0.031414):0.006791):0.002683,((13:0.096279,(20:0.108939,5:0.075425):0.069453):0.013313,(8:0.079315,(21:0.044323,(22:0.030156,((25:0.004636,24:0.008279):0.002475,23:0.021165):0.023884):0.0 [...]
+   tree rep.3220000 = (((2:0.141492,((((5:0.135162,20:0.068699):0.105712,(21:0.033557,(22:0.032024,(25:0.008439,(23:0.015194,24:0.010960):0.004257):0.028189):0.028510):0.036649):0.012500,((8:0.138326,13:0.098427):0.003651,((14:0.083180,19:0.080498):0.031413,((7:0.082273,(10:0.112392,((16:0.024346,15:0.037786):0.056243,(4:0.105815,(9:0.125145,(18:0.043856,17:0.012156):0.067487):0.029593):0.007507):0.006231):0.007503):0.026692,((28:0.034644,(26:0.021745,27:0.047090):0.081069):0.021011,29:0 [...]
+   tree rep.3240000 = ((3:0.154131,(((12:0.088466,11:0.010604):0.052424,(((29:0.092100,(28:0.025090,(27:0.044445,26:0.027484):0.103118):0.033557):0.016426,((((7:0.109268,(15:0.051645,16:0.026871):0.036632):0.005298,10:0.085244):0.007383,(4:0.108475,(9:0.129940,(18:0.022564,17:0.020495):0.040445):0.040423):0.005591):0.031963,(14:0.077644,19:0.084368):0.033022):0.009287):0.004487,((13:0.077020,(20:0.087766,5:0.099881):0.087465):0.007960,(8:0.111533,((((23:0.007534,24:0.018104):0.003302,25: [...]
+   tree rep.3260000 = ((6:0.089358,3:0.127801):0.019019,(((((((22:0.016125,(25:0.008248,(23:0.003051,24:0.008086):0.003057):0.032771):0.021265,21:0.048121):0.016417,8:0.091586):0.019331,13:0.078212):0.006728,(((19:0.097935,14:0.095485):0.023217,(((28:0.049349,(26:0.033445,27:0.021811):0.071176):0.028566,29:0.063877):0.013626,((10:0.111377,((16:0.012261,15:0.042252):0.053415,(4:0.125387,((18:0.035313,17:0.008693):0.025588,9:0.102455):0.057150):0.007402):0.004019):0.010054,7:0.094681):0.03 [...]
+   tree rep.3280000 = ((3:0.097862,6:0.070451):0.010448,(((12:0.061713,11:0.047168):0.040545,(((20:0.114373,5:0.079965):0.089091,13:0.074886):0.009056,(((21:0.030315,((25:0.001191,(24:0.014119,23:0.010604):0.010971):0.019352,22:0.056860):0.019843):0.025645,8:0.094590):0.021854,(((14:0.073214,19:0.058983):0.038185,(10:0.101065,(7:0.066143,(((9:0.100868,(18:0.040603,17:0.018578):0.030548):0.009512,4:0.108921):0.031222,(15:0.035793,16:0.026249):0.045877):0.003644):0.002967):0.020936):0.0035 [...]
+   tree rep.3300000 = ((6:0.097526,(2:0.146602,((12:0.085646,11:0.029356):0.039276,((((29:0.084042,(28:0.048348,(27:0.043455,26:0.032985):0.075689):0.013575):0.018657,((20:0.082918,5:0.098785):0.101706,((8:0.075204,((22:0.035624,((24:0.013608,23:0.013245):0.009245,25:0.006024):0.032660):0.003235,21:0.030394):0.044219):0.031368,13:0.080456):0.000988):0.009493):0.004836,(14:0.093826,19:0.083092):0.025462):0.007910,(((7:0.084155,(15:0.042121,16:0.020796):0.040576):0.005399,10:0.101372):0.00 [...]
+   tree rep.3320000 = ((((((13:0.080739,((((25:0.009582,(24:0.008326,23:0.020234):0.005978):0.037356,22:0.034396):0.018160,21:0.042146):0.028748,8:0.093360):0.014894):0.000296,((((29:0.103965,(28:0.056751,(26:0.021241,27:0.033025):0.053266):0.024289):0.012760,(19:0.073422,14:0.080785):0.022338):0.003341,((((16:0.023314,15:0.049246):0.047826,((9:0.096372,(18:0.026083,17:0.011747):0.033233):0.022721,4:0.107537):0.010702):0.001801,10:0.113723):0.004880,7:0.087084):0.018950):0.010847,(5:0.11 [...]
+   tree rep.3340000 = (3:0.113166,(6:0.088695,(2:0.170069,((((21:0.048422,(((23:0.003484,24:0.007463):0.005734,25:0.003735):0.014642,22:0.046580):0.025565):0.021741,8:0.071003):0.040660,((13:0.099316,(5:0.100048,20:0.085616):0.075010):0.017241,((((26:0.025028,27:0.029705):0.060320,28:0.032740):0.020274,29:0.115017):0.002574,((19:0.066158,14:0.133045):0.030042,(7:0.085182,(10:0.102429,((4:0.120388,(9:0.090525,(18:0.041511,17:0.015316):0.063954):0.033092):0.015150,(16:0.024432,15:0.029865) [...]
+   tree rep.3360000 = (((2:0.142043,((((8:0.082882,((22:0.023164,(25:0.006112,(24:0.006538,23:0.008270):0.007900):0.023002):0.031797,21:0.035868):0.045928):0.034202,(((7:0.097087,((10:0.142079,(16:0.046782,15:0.030380):0.039719):0.001653,(4:0.114083,(9:0.157085,(18:0.032450,17:0.023173):0.079552):0.013347):0.015582):0.017382):0.013660,(19:0.099265,14:0.085896):0.014225):0.001843,(29:0.087314,((28:0.048407,(26:0.023736,27:0.038533):0.071487):0.024061,(5:0.103986,20:0.092891):0.092498):0.0 [...]
+   tree rep.3380000 = ((6:0.091163,3:0.135846):0.017932,(2:0.216657,((((29:0.106445,(19:0.088801,14:0.073010):0.031499):0.007022,((7:0.102782,(10:0.115190,(4:0.134617,((16:0.039346,15:0.043611):0.055761,(9:0.091224,(18:0.046441,17:0.019532):0.059889):0.030491):0.014192):0.002591):0.004983):0.019800,(28:0.035711,(26:0.043883,27:0.027566):0.070314):0.031746):0.002628):0.006030,(((5:0.067422,20:0.098933):0.118665,13:0.084231):0.013759,(((22:0.041291,((24:0.025875,25:0.007989):0.001286,23:0. [...]
+   tree rep.3400000 = (6:0.078095,(3:0.162370,(2:0.136163,((((21:0.035888,(22:0.031187,(25:0.014046,(23:0.020096,24:0.006986):0.005985):0.013908):0.025591):0.022613,(5:0.081273,20:0.096605):0.109463):0.003817,((((29:0.098642,((26:0.035336,27:0.031448):0.054182,28:0.051596):0.037520):0.005877,(((((16:0.021606,15:0.033194):0.044103,7:0.081804):0.004919,10:0.091511):0.005727,(((18:0.027980,17:0.033438):0.031445,9:0.120493):0.030142,4:0.119025):0.005224):0.016308,(19:0.073842,14:0.094800):0. [...]
+   tree rep.3420000 = ((3:0.117543,6:0.088286):0.013709,(((12:0.078763,11:0.028942):0.044424,(((((19:0.064682,14:0.091887):0.008193,(((10:0.095207,(((18:0.037109,17:0.017814):0.057347,9:0.083213):0.023445,4:0.093558):0.014677):0.004072,(16:0.022735,15:0.053930):0.038244):0.007583,7:0.095156):0.030865):0.013487,(((26:0.034818,27:0.027832):0.077313,28:0.056226):0.009291,29:0.102966):0.016286):0.018961,(13:0.091842,(8:0.079331,(21:0.052819,(22:0.040273,((24:0.010526,23:0.009793):0.010207,25 [...]
+   tree rep.3440000 = ((((12:0.075389,11:0.017673):0.043792,(((((((16:0.022682,15:0.049909):0.084936,10:0.106082):0.002657,(4:0.116133,((18:0.040432,17:0.013529):0.060521,9:0.094787):0.023052):0.019369):0.011213,7:0.113918):0.029194,(19:0.082130,14:0.071338):0.042178):0.005212,(((26:0.029265,27:0.037991):0.073183,28:0.060520):0.013056,29:0.127055):0.004067):0.006659,((13:0.071405,(5:0.105130,20:0.116095):0.106333):0.007037,(8:0.099963,(21:0.031541,(22:0.038282,((23:0.011172,24:0.011680): [...]
+   tree rep.3460000 = ((3:0.142979,6:0.072152):0.003329,(2:0.188817,(((((29:0.089160,(28:0.044777,(26:0.020670,27:0.038690):0.063007):0.020671):0.008677,(19:0.078243,14:0.058710):0.027636):0.005050,((((22:0.023746,((23:0.024869,24:0.012251):0.005865,25:0.001700):0.025705):0.003255,21:0.084127):0.032852,8:0.072779):0.047183,((10:0.073637,(7:0.128920,(16:0.021239,15:0.020986):0.027245):0.019999):0.009893,(4:0.111137,(9:0.128526,(18:0.039845,17:0.032462):0.033197):0.038592):0.008103):0.0151 [...]
+   tree rep.3480000 = ((((((19:0.116787,14:0.064226):0.032827,(((7:0.120364,(16:0.031770,15:0.043888):0.039952):0.011659,10:0.098588):0.013806,((9:0.105702,(18:0.051361,17:0.011422):0.057669):0.013292,4:0.152250):0.009118):0.033367):0.005079,((13:0.089925,(((21:0.055677,(22:0.034481,((24:0.013920,23:0.004549):0.004720,25:0.005291):0.028373):0.013594):0.040063,8:0.115104):0.039683,(5:0.087414,20:0.118399):0.117043):0.001096):0.015418,((28:0.059552,(26:0.034887,27:0.040888):0.076121):0.013 [...]
+   tree rep.3500000 = ((((12:0.076442,11:0.045816):0.054732,((((((26:0.036466,27:0.028402):0.067513,28:0.052883):0.020888,29:0.103855):0.022701,((19:0.100744,14:0.063484):0.027840,((10:0.111612,((((18:0.038167,17:0.011685):0.045729,9:0.086501):0.031645,4:0.109957):0.002173,(16:0.037593,15:0.025770):0.039295):0.012424):0.014468,7:0.083793):0.010466):0.002151):0.009434,(8:0.106910,((21:0.038902,(25:0.003710,(24:0.009195,23:0.009119):0.004750):0.042727):0.002460,22:0.046500):0.043059):0.029 [...]
+   tree rep.3520000 = ((3:0.125336,6:0.054884):0.025329,(((((((22:0.019293,(25:0.001780,(23:0.006142,24:0.009314):0.012154):0.020757):0.025335,21:0.054773):0.020713,8:0.086202):0.038786,13:0.081233):0.004798,(((7:0.114612,((((9:0.130981,(18:0.024968,17:0.027196):0.050171):0.013202,4:0.084028):0.014154,10:0.086408):0.004539,(15:0.031441,16:0.028002):0.052747):0.012319):0.009186,((28:0.035110,(27:0.030365,26:0.022354):0.055303):0.029216,(20:0.067248,5:0.132792):0.074848):0.002473):0.005824 [...]
+   tree rep.3540000 = (6:0.087132,((2:0.160065,((12:0.057102,11:0.030389):0.049953,(((5:0.077501,20:0.089070):0.075758,(8:0.096358,(21:0.039679,(((24:0.012176,23:0.020055):0.007679,25:0.007362):0.029230,22:0.032936):0.007244):0.035710):0.019351):0.020719,(((19:0.071217,14:0.075562):0.037276,((29:0.077344,(28:0.042147,(26:0.029682,27:0.019147):0.069640):0.023364):0.007266,(10:0.109121,(((16:0.015915,15:0.032631):0.044122,7:0.078752):0.007170,((9:0.119879,(18:0.034590,17:0.015847):0.031431 [...]
+   tree rep.3560000 = (6:0.073420,(3:0.142350,(((11:0.028891,12:0.076191):0.052940,((((20:0.117497,5:0.095290):0.158395,((28:0.046602,(27:0.033699,26:0.024525):0.053188):0.027733,29:0.120640):0.013373):0.000624,((14:0.083197,19:0.074990):0.049788,((((15:0.037113,16:0.022623):0.070544,(9:0.114341,(17:0.023201,18:0.059573):0.038250):0.037255):0.003885,4:0.143799):0.013421,(10:0.166670,7:0.103888):0.010880):0.018587):0.003203):0.011084,(13:0.115078,(8:0.087972,(21:0.045863,(((23:0.007821,24 [...]
+   tree rep.3580000 = (((2:0.178814,((12:0.064929,11:0.029522):0.047022,((8:0.066758,(21:0.034994,(((24:0.010466,23:0.005539):0.007397,25:0.010100):0.030756,22:0.033133):0.011547):0.042874):0.030594,(((5:0.110331,20:0.085469):0.107526,13:0.110697):0.009712,(((28:0.050668,(26:0.038343,27:0.035540):0.082671):0.024309,29:0.071193):0.003135,((19:0.087495,14:0.084206):0.023404,((((16:0.020565,15:0.055178):0.058048,4:0.128839):0.002087,(10:0.089517,(9:0.135661,(18:0.033281,17:0.028486):0.02991 [...]
+   tree rep.3600000 = ((2:0.205393,((11:0.019015,12:0.079704):0.078662,(((13:0.112061,(((22:0.085023,(25:0.006624,(24:0.010239,23:0.016362):0.026369):0.018876):0.008333,21:0.050464):0.076689,8:0.091540):0.016119):0.000450,(((14:0.111551,19:0.070286):0.070633,(((7:0.133205,(15:0.028024,16:0.036706):0.056752):0.013277,10:0.114442):0.007428,((9:0.116359,(17:0.020278,18:0.042810):0.093979):0.032884,4:0.208759):0.003259):0.025911):0.013317,(29:0.080244,(28:0.067142,(27:0.038427,26:0.022883):0 [...]
+   tree rep.3620000 = ((6:0.066881,3:0.169778):0.003887,(((11:0.030219,12:0.052553):0.037185,(13:0.131188,(((((25:0.009007,(24:0.016256,23:0.010024):0.012051):0.032592,22:0.019985):0.029702,21:0.060937):0.035027,8:0.097984):0.006203,((((10:0.093173,((15:0.043695,16:0.013346):0.047379,((9:0.110525,(17:0.020608,18:0.042420):0.042838):0.024515,4:0.120341):0.006739):0.001919):0.021766,7:0.076637):0.019453,(14:0.059333,19:0.064467):0.042520):0.006364,((20:0.105187,5:0.068280):0.071986,(((27:0 [...]
+   tree rep.3640000 = ((((12:0.081395,11:0.041950):0.030528,((((13:0.092653,(5:0.128297,20:0.096061):0.108783):0.008031,(8:0.062245,((((24:0.014722,25:0.021904):0.004159,23:0.009008):0.028131,22:0.036616):0.026840,21:0.033631):0.036252):0.038131):0.011344,((19:0.082336,14:0.117256):0.037049,((28:0.067664,(26:0.029062,27:0.041537):0.053365):0.020671,29:0.098748):0.006589):0.003903):0.013296,(((9:0.148629,(18:0.025805,17:0.021371):0.062828):0.015033,4:0.155641):0.013511,(7:0.067470,(10:0.1 [...]
+   tree rep.3660000 = (6:0.053432,(3:0.142745,(((12:0.061352,11:0.038492):0.055091,(((((25:0.008202,(23:0.015948,24:0.010916):0.007516):0.024381,22:0.029000):0.023362,21:0.045556):0.038984,8:0.102173):0.047693,((13:0.070183,(5:0.091840,20:0.114193):0.079604):0.011157,((29:0.089952,((26:0.022016,27:0.070114):0.097791,28:0.039128):0.030922):0.017509,((7:0.129778,((((9:0.089883,(18:0.030024,17:0.023980):0.047300):0.019979,4:0.127304):0.007817,(16:0.030353,15:0.038533):0.043114):0.007053,10: [...]
+   tree rep.3680000 = (((((((5:0.092150,20:0.103768):0.121404,13:0.080994):0.008543,(((4:0.169823,((18:0.035819,17:0.027036):0.047268,9:0.117567):0.024846):0.009064,(10:0.115129,((16:0.025466,15:0.042735):0.044547,7:0.130611):0.004373):0.005470):0.023924,(((26:0.037325,27:0.024934):0.100322,28:0.053859):0.029242,(29:0.120857,(19:0.102721,14:0.105748):0.033307):0.006851):0.001555):0.015547):0.003728,(((21:0.042181,22:0.016523):0.018814,(25:0.014078,(24:0.006964,23:0.014052):0.018134):0.01 [...]
+   tree rep.3700000 = ((2:0.166852,((11:0.037448,12:0.072545):0.033654,((8:0.062818,(21:0.058990,((25:0.009816,(23:0.015987,24:0.006218):0.004956):0.032959,22:0.033711):0.009139):0.025266):0.033492,(((20:0.068367,5:0.089327):0.066530,13:0.072733):0.009209,(((((9:0.096685,(17:0.014314,18:0.038067):0.035327):0.039437,4:0.140550):0.007005,(10:0.083408,((15:0.042740,16:0.014877):0.059543,7:0.081941):0.004752):0.004245):0.029530,(29:0.075376,((27:0.049575,26:0.029262):0.047151,28:0.045550):0. [...]
+   tree rep.3720000 = (((((11:0.025948,12:0.077547):0.052875,((((29:0.114499,((27:0.035756,26:0.017828):0.088924,28:0.041719):0.023818):0.023953,(14:0.096159,19:0.091382):0.036444):0.002707,(7:0.105730,(((15:0.031774,16:0.021489):0.070232,10:0.078158):0.027355,(4:0.169536,(9:0.092141,(17:0.015477,18:0.035887):0.036640):0.046724):0.012699):0.015007):0.024178):0.016415,(((20:0.080391,5:0.093523):0.083269,13:0.062704):0.004457,(8:0.087622,((22:0.042197,(25:0.004718,(23:0.008425,24:0.007098) [...]
+   tree rep.3740000 = ((((11:0.061504,12:0.054815):0.054446,(((13:0.102951,(20:0.091317,5:0.091453):0.094367):0.002873,((28:0.044126,(27:0.029302,26:0.026523):0.046613):0.034405,(29:0.154269,((10:0.109980,((4:0.099853,(9:0.128723,(17:0.033151,18:0.032227):0.047821):0.020657):0.018602,(7:0.129849,(15:0.030978,16:0.037046):0.049666):0.001361):0.009105):0.024423,(14:0.109740,19:0.062255):0.067061):0.008290):0.007382):0.007016):0.007266,(((22:0.046392,((23:0.015598,24:0.007622):0.004668,25:0 [...]
+   tree rep.3760000 = ((3:0.137621,6:0.092933):0.018639,(2:0.278989,(((((19:0.091855,14:0.104667):0.033599,(10:0.109379,(((4:0.126792,7:0.074442):0.003390,(9:0.091583,(18:0.045215,17:0.018924):0.071941):0.019854):0.000257,(16:0.025152,15:0.045797):0.064421):0.006153):0.040514):0.002353,((28:0.065553,(26:0.023425,27:0.040639):0.079579):0.015789,29:0.099462):0.009109):0.016379,((5:0.115381,20:0.107142):0.127812,(13:0.089193,((((24:0.021529,23:0.003132):0.010050,25:0.007484):0.009477,(21:0. [...]
+   tree rep.3780000 = ((6:0.126579,3:0.071186):0.010277,(((12:0.064217,11:0.012938):0.042183,(((((28:0.049912,(27:0.021919,26:0.028558):0.094443):0.030070,29:0.083445):0.008744,((19:0.060848,14:0.110884):0.039499,((((15:0.039851,16:0.025176):0.055984,10:0.125458):0.001830,(((18:0.042851,17:0.020524):0.053357,9:0.113341):0.006486,4:0.128484):0.008892):0.008673,7:0.076610):0.015241):0.005055):0.013768,((8:0.107586,(21:0.021756,(((23:0.004905,24:0.010339):0.006751,25:0.007925):0.037663,22:0 [...]
+   tree rep.3800000 = (((((((((22:0.033398,(25:0.007028,(24:0.020665,23:0.005119):0.007050):0.027515):0.004859,21:0.047513):0.019610,8:0.093156):0.026950,(5:0.110593,20:0.093322):0.083655):0.011826,13:0.077082):0.003694,((29:0.100319,(28:0.048044,(26:0.012286,27:0.035057):0.055763):0.032735):0.015230,((19:0.067036,14:0.088983):0.038077,((((18:0.036419,17:0.025232):0.029053,9:0.102829):0.026342,4:0.101091):0.007389,(10:0.097301,(7:0.090562,(16:0.024028,15:0.037455):0.053657):0.003217):0.0 [...]
+   tree rep.3820000 = ((((12:0.096250,11:0.026439):0.058114,(((20:0.081963,5:0.128593):0.081047,(13:0.080377,(8:0.113426,((22:0.021136,((25:0.019068,23:0.012288):0.003128,24:0.007810):0.012740):0.017007,21:0.033052):0.042959):0.022566):0.004945):0.003620,(((((18:0.033441,17:0.031455):0.044450,9:0.121431):0.024862,4:0.132864):0.013044,(10:0.078707,(7:0.123287,(15:0.036065,16:0.019924):0.066975):0.009519):0.002682):0.021236,((19:0.090095,14:0.098513):0.040109,(29:0.104946,(28:0.049856,(27: [...]
+   tree rep.3840000 = ((6:0.095589,(((12:0.085660,11:0.013635):0.066562,((13:0.107229,(8:0.072951,(21:0.048378,(22:0.033818,((24:0.013346,23:0.006298):0.006946,25:0.016528):0.026232):0.004846):0.036843):0.025476):0.015544,(((19:0.087391,14:0.078901):0.029194,((20:0.091816,5:0.088781):0.095291,(29:0.089215,(28:0.048221,(27:0.031279,26:0.032384):0.055320):0.029793):0.008609):0.000954):0.000514,((((18:0.037838,17:0.026752):0.072409,9:0.085665):0.014668,4:0.116412):0.017339,(10:0.108565,(7:0 [...]
+   tree rep.3860000 = (((((20:0.111681,5:0.092548):0.087306,(13:0.094186,(((21:0.059281,((25:0.007041,(23:0.009350,24:0.003938):0.004599):0.032030,22:0.021240):0.010745):0.046337,8:0.079963):0.034495,(((14:0.076575,19:0.063058):0.029903,((((15:0.051250,16:0.035890):0.059916,(((17:0.018895,18:0.039219):0.047435,9:0.106923):0.022008,4:0.115597):0.015738):0.003476,10:0.120182):0.005913,7:0.098255):0.014664):0.010491,(29:0.081963,((27:0.026644,26:0.024446):0.061348,28:0.057534):0.038822):0.0 [...]
+   tree rep.3880000 = (((2:0.193919,((((20:0.083907,5:0.141180):0.105961,((((((23:0.004906,24:0.012284):0.005165,25:0.008432):0.030529,22:0.015948):0.017675,21:0.045114):0.019124,8:0.099616):0.023277,13:0.082712):0.007941):0.005084,(((((27:0.048090,26:0.014581):0.073425,28:0.044825):0.035214,29:0.078627):0.010802,(14:0.111344,19:0.098840):0.034671):0.001903,(10:0.079785,((((17:0.029017,18:0.020809):0.038357,9:0.088198):0.023516,4:0.129297):0.027894,((15:0.035577,16:0.022385):0.048254,7:0 [...]
+   tree rep.3900000 = (3:0.127270,(6:0.089499,(2:0.183108,(((((21:0.060535,(((24:0.004521,23:0.004714):0.002925,25:0.003950):0.022914,22:0.037154):0.028761):0.046143,8:0.091930):0.026230,(13:0.074867,(20:0.114078,5:0.101759):0.100221):0.013908):0.011687,(((((27:0.045535,26:0.008092):0.093141,28:0.064762):0.020189,29:0.077485):0.019695,(14:0.085768,19:0.077765):0.030284):0.002912,((4:0.159429,((17:0.027964,18:0.042046):0.043380,9:0.095307):0.032681):0.005258,((7:0.093696,(15:0.023408,16:0 [...]
+   tree rep.3920000 = ((6:0.080637,3:0.139370):0.008007,(((11:0.032090,12:0.099406):0.047584,(((((22:0.047585,((24:0.015340,23:0.013968):0.008042,25:0.009076):0.025624):0.012409,21:0.053014):0.021336,8:0.103728):0.037139,(13:0.114229,((((((16:0.029528,15:0.031599):0.059626,7:0.110651):0.010879,10:0.105601):0.003371,(4:0.099690,(9:0.158631,(18:0.030421,17:0.015699):0.038133):0.025400):0.012498):0.029744,(19:0.102460,14:0.076664):0.027296):0.004229,(29:0.053448,((26:0.023452,27:0.037365):0 [...]
+   tree rep.3940000 = (((2:0.145997,((((8:0.065693,((((24:0.012866,23:0.009249):0.002173,25:0.009255):0.027100,22:0.033395):0.016839,21:0.045635):0.020145):0.011986,((20:0.095507,5:0.096028):0.088784,(((10:0.085539,((((17:0.023762,18:0.037864):0.028188,9:0.123019):0.026542,4:0.131318):0.011914,(15:0.046286,16:0.024842):0.047969):0.004046):0.015111,7:0.088962):0.006619,((14:0.103937,19:0.103799):0.018274,(((27:0.037873,26:0.024205):0.103504,28:0.059898):0.011791,29:0.134441):0.024529):0.0 [...]
+   tree rep.3960000 = ((6:0.096932,3:0.099179):0.011014,((((8:0.096600,((22:0.032319,(25:0.009149,(24:0.011995,23:0.011564):0.004651):0.016877):0.014181,21:0.046766):0.036782):0.020894,((((7:0.083249,(((((17:0.014337,18:0.040382):0.052123,9:0.115223):0.018733,4:0.117764):0.013257,(15:0.047027,16:0.039308):0.038876):0.002500,10:0.122529):0.006673):0.012385,(14:0.095366,19:0.086447):0.033495):0.017922,(29:0.111399,((27:0.052914,26:0.012548):0.080288,28:0.040288):0.012309):0.012810):0.01384 [...]
+   tree rep.3980000 = ((((((13:0.109434,((((23:0.004998,(24:0.005902,25:0.020802):0.005593):0.024369,22:0.037699):0.018877,21:0.037606):0.049566,8:0.089195):0.052037):0.001685,(5:0.143723,20:0.101232):0.124827):0.013337,(((((4:0.129476,(9:0.119272,(18:0.039653,17:0.031283):0.025627):0.035402):0.030102,(16:0.024492,15:0.026212):0.027115):0.010706,(7:0.088742,10:0.110538):0.010413):0.038091,(19:0.102385,14:0.088572):0.024526):0.004018,(29:0.086482,((26:0.030422,27:0.048594):0.056009,28:0.0 [...]
+   tree rep.4000000 = ((((12:0.058843,11:0.037492):0.050240,(((29:0.118279,((27:0.030424,26:0.023113):0.067275,28:0.038860):0.020658):0.007060,((19:0.074985,14:0.080685):0.022337,((4:0.122702,((15:0.038017,16:0.028606):0.034455,(7:0.133742,((18:0.035089,17:0.030278):0.044187,9:0.104888):0.016750):0.001773):0.005970):0.012124,10:0.101054):0.021361):0.016261):0.001576,((20:0.108820,5:0.117379):0.099400,(13:0.096732,((21:0.052482,(((24:0.008159,23:0.010260):0.008421,25:0.003741):0.025841,22 [...]
+   tree rep.4020000 = ((3:0.137462,6:0.076329):0.012984,((((((14:0.079226,19:0.060566):0.028542,(7:0.118752,((4:0.119319,(9:0.111311,(18:0.040189,17:0.023540):0.047877):0.020035):0.038719,((16:0.027577,15:0.038949):0.051066,10:0.110953):0.000245):0.004422):0.036435):0.005508,(29:0.118022,((26:0.022187,27:0.026712):0.058892,28:0.038402):0.027819):0.004068):0.017029,((13:0.075903,(5:0.081329,20:0.089029):0.072025):0.001567,(((((24:0.006601,23:0.011131):0.001539,25:0.009247):0.022650,22:0.0 [...]
+   tree rep.4040000 = ((2:0.170714,((((13:0.082576,((21:0.032839,(22:0.048055,(25:0.006528,(24:0.012549,23:0.013838):0.002328):0.032217):0.016964):0.044155,8:0.074687):0.030800):0.004009,(5:0.117516,20:0.072331):0.069652):0.001721,((10:0.121656,((4:0.118318,(9:0.099055,(18:0.048749,17:0.010685):0.032486):0.024767):0.006501,((16:0.040331,15:0.028318):0.040681,7:0.089849):0.018869):0.005366):0.021743,((29:0.079825,((26:0.032572,27:0.032824):0.086622,28:0.049828):0.018729):0.014398,(14:0.10 [...]
+   tree rep.4060000 = (3:0.099064,(6:0.072777,(((((29:0.092144,(14:0.066433,19:0.071883):0.039888):0.006330,(((((9:0.092024,(17:0.023427,18:0.034016):0.058170):0.026957,4:0.148223):0.018173,((15:0.033534,16:0.050432):0.037789,7:0.102480):0.008825):0.005943,10:0.069166):0.031011,(28:0.042944,(27:0.025418,26:0.016417):0.050483):0.027825):0.001320):0.014913,((13:0.109869,(((22:0.042713,(23:0.007761,(24:0.006002,25:0.008979):0.001327):0.032434):0.016410,21:0.022229):0.026697,8:0.082848):0.03 [...]
+   tree rep.4080000 = (6:0.070378,(3:0.113834,((((29:0.082442,(8:0.086093,((22:0.029460,(25:0.006072,(24:0.016561,23:0.004435):0.009102):0.043701):0.007912,21:0.046258):0.047345):0.005881):0.005367,((((19:0.083819,14:0.118635):0.020446,(((9:0.094332,(18:0.026463,17:0.030014):0.035068):0.017720,4:0.146837):0.009510,(10:0.080126,((16:0.016988,15:0.044389):0.056948,7:0.116736):0.015357):0.000645):0.025530):0.005390,(28:0.050177,(26:0.016791,27:0.031538):0.068709):0.022369):0.006137,((5:0.13 [...]
+   tree rep.4100000 = (((2:0.185744,((((8:0.106421,((21:0.066402,((24:0.004700,23:0.015450):0.006111,25:0.006091):0.037901):0.006636,22:0.041635):0.045841):0.025031,((29:0.087598,(28:0.056896,(26:0.021162,27:0.044722):0.089223):0.018723):0.022755,(((4:0.127293,((18:0.045843,17:0.022768):0.048853,9:0.145762):0.029658):0.015151,(10:0.091409,((16:0.017899,15:0.037676):0.067656,7:0.152484):0.011756):0.002142):0.025492,(14:0.082950,19:0.076943):0.042285):0.008401):0.005517):0.001362,((5:0.119 [...]
+   tree rep.4120000 = ((((11:0.015754,12:0.070525):0.057497,(((((28:0.049486,(27:0.036573,26:0.011225):0.082318):0.031351,((20:0.075075,5:0.096366):0.091666,29:0.085374):0.000743):0.012995,((14:0.109823,19:0.073892):0.042559,(((10:0.100024,(15:0.026146,16:0.027534):0.062331):0.003771,((9:0.143623,(17:0.021950,18:0.022570):0.047134):0.023315,4:0.118482):0.006527):0.015652,7:0.097449):0.013289):0.015414):0.002273,13:0.095680):0.003799,((21:0.054831,(22:0.036215,(25:0.017841,(23:0.013124,24 [...]
+   tree rep.4140000 = (3:0.163271,(6:0.079777,((((((5:0.135091,20:0.103336):0.106619,13:0.113259):0.010001,(8:0.108818,((22:0.018600,((23:0.011892,24:0.014730):0.005503,25:0.002294):0.032438):0.011487,21:0.049918):0.026264):0.026381):0.006290,(((28:0.058789,(26:0.022796,27:0.035397):0.064444):0.030312,29:0.110598):0.008282,(((((9:0.126119,(18:0.044619,17:0.017322):0.051628):0.031281,4:0.127596):0.014199,(10:0.104975,(16:0.020276,15:0.065428):0.042491):0.005735):0.003022,7:0.098163):0.027 [...]
+   tree rep.4160000 = ((((((((((22:0.044743,((23:0.012135,24:0.009585):0.004903,25:0.008450):0.028419):0.016721,21:0.045158):0.033322,8:0.089068):0.028647,(20:0.089624,5:0.119304):0.097724):0.009956,13:0.108258):0.002814,(((19:0.090358,14:0.089497):0.048672,(7:0.089609,((10:0.107431,(15:0.023623,16:0.037452):0.069192):0.005246,(4:0.130727,((18:0.034509,17:0.025277):0.037915,9:0.124210):0.020392):0.016733):0.004906):0.021604):0.003253,(29:0.103804,((27:0.036574,26:0.018870):0.082275,28:0. [...]
+   tree rep.4180000 = ((3:0.148889,6:0.134385):0.011940,(((((13:0.074515,(20:0.060728,5:0.134219):0.085576):0.001422,(((((24:0.007500,25:0.012880):0.003176,23:0.015756):0.030270,22:0.051584):0.047408,21:0.031111):0.021826,8:0.083100):0.040381):0.005323,((((10:0.086181,((4:0.129695,(9:0.107016,(18:0.024632,17:0.016003):0.044914):0.028945):0.004627,(15:0.031859,16:0.021407):0.062958):0.001587):0.019695,7:0.121451):0.022863,(19:0.063373,14:0.091282):0.062523):0.013303,(29:0.097259,((27:0.03 [...]
+   tree rep.4200000 = ((3:0.119357,6:0.064846):0.005749,(((((((((25:0.004958,(23:0.010188,24:0.010474):0.011427):0.009400,22:0.042086):0.020536,21:0.035901):0.016447,8:0.075520):0.031667,(20:0.106461,5:0.084903):0.051375):0.005932,13:0.066245):0.008283,((((((15:0.026017,16:0.019658):0.053974,7:0.083998):0.003690,10:0.095368):0.000208,(4:0.131437,(9:0.101557,(18:0.036396,17:0.015335):0.033695):0.029322):0.020292):0.020398,(19:0.057516,14:0.073587):0.027111):0.014943,(((27:0.030663,26:0.01 [...]
+   tree rep.4220000 = (3:0.124940,(6:0.081181,(2:0.165528,(((((((10:0.071577,(((18:0.043496,17:0.015896):0.045428,9:0.112866):0.020003,4:0.139000):0.012318):0.000838,(16:0.035264,15:0.031779):0.061203):0.008298,7:0.078365):0.018791,(14:0.100529,19:0.070818):0.023342):0.006954,(((26:0.033499,27:0.034811):0.057728,28:0.033333):0.028736,29:0.077875):0.026693):0.016669,((13:0.103723,(5:0.111040,20:0.115570):0.081337):0.004550,(8:0.063148,((((23:0.010547,24:0.014168):0.019805,25:0.004062):0.0 [...]
+   tree rep.4240000 = ((2:0.187463,((11:0.032643,12:0.072738):0.044986,((((14:0.105551,19:0.086740):0.031472,((7:0.110052,(15:0.036663,16:0.018441):0.038770):0.018766,(10:0.079738,((9:0.108756,(17:0.023382,18:0.031125):0.038517):0.023447,4:0.113284):0.018492):0.014207):0.030745):0.005731,(((27:0.035233,26:0.029278):0.061300,28:0.056785):0.023002,29:0.103820):0.007873):0.005593,((13:0.082731,(8:0.083822,(21:0.053197,((25:0.014829,(24:0.011166,23:0.013818):0.006293):0.020229,22:0.023239):0 [...]
+   tree rep.4260000 = (3:0.107011,((((12:0.082730,11:0.027493):0.036224,((13:0.106334,((8:0.113666,(((25:0.008368,(24:0.022210,23:0.007958):0.014095):0.028828,22:0.026351):0.026411,21:0.074948):0.028715):0.012736,(20:0.086909,5:0.103135):0.087319):0.005173):0.011338,((((15:0.038376,16:0.029504):0.036839,(10:0.100108,7:0.095810):0.005607):0.014389,((9:0.088904,(18:0.028105,17:0.013531):0.040226):0.031796,4:0.111085):0.006844):0.026446,((19:0.083529,14:0.105990):0.013182,((28:0.048109,(27: [...]
+   tree rep.4280000 = (6:0.079038,((((12:0.060153,11:0.023746):0.033448,((((((15:0.054614,16:0.023248):0.053105,7:0.099397):0.019431,((4:0.098803,(9:0.138541,(18:0.039344,17:0.020297):0.052311):0.018862):0.003946,10:0.134862):0.000137):0.019230,(19:0.081738,14:0.126306):0.015925):0.007490,((28:0.065732,(27:0.046161,26:0.011115):0.090565):0.028755,29:0.112667):0.021281):0.021409,((8:0.090628,(((25:0.009739,(24:0.009203,23:0.013424):0.004268):0.043749,22:0.034558):0.007623,21:0.050704):0.0 [...]
+   tree rep.4300000 = ((3:0.140604,(((11:0.013712,12:0.102904):0.067876,((((8:0.078386,(21:0.070833,(22:0.041206,((24:0.009961,23:0.010192):0.002406,25:0.008326):0.035276):0.047453):0.052067):0.029105,13:0.077946):0.002583,(5:0.067239,20:0.092908):0.094822):0.005706,(((14:0.063429,19:0.076042):0.034042,((10:0.100239,((16:0.027029,15:0.041722):0.049635,7:0.101381):0.009022):0.004303,(((18:0.034418,17:0.013244):0.064957,9:0.098986):0.026800,4:0.139592):0.007273):0.055029):0.000474,(29:0.09 [...]
+   tree rep.4320000 = ((6:0.175383,3:0.175729):0.001975,(2:0.164701,((12:0.049350,11:0.046319):0.054100,((((((((24:0.008922,23:0.009490):0.013361,25:0.005273):0.032284,21:0.050334):0.000838,22:0.033689):0.027869,8:0.115289):0.018211,13:0.107061):0.009886,(20:0.100915,5:0.108229):0.090437):0.005396,((29:0.077899,(28:0.041359,(27:0.030909,26:0.032504):0.083551):0.022908):0.008123,((10:0.089583,((4:0.141225,(9:0.092128,(18:0.043944,17:0.013847):0.048283):0.034033):0.016395,((15:0.056633,16: [...]
+   tree rep.4340000 = ((6:0.075971,(((12:0.064401,11:0.030025):0.043226,((8:0.080896,(21:0.060117,(((23:0.011284,24:0.014229):0.010620,25:0.004390):0.029104,22:0.026042):0.000211):0.048130):0.021718,(((5:0.097678,20:0.074528):0.059320,13:0.080525):0.017364,((29:0.071729,(28:0.050146,(26:0.020231,27:0.044139):0.049906):0.011761):0.020221,((7:0.096192,((10:0.084454,(16:0.024814,15:0.039164):0.064974):0.003261,((9:0.123320,(18:0.029543,17:0.014403):0.029318):0.017747,4:0.114600):0.011222):0 [...]
+   tree rep.4360000 = ((3:0.122074,6:0.074036):0.008898,(2:0.149989,((11:0.038875,12:0.040448):0.064214,((((20:0.083285,5:0.061453):0.087363,13:0.055697):0.012631,((((27:0.045542,26:0.022565):0.088828,28:0.028141):0.022436,29:0.083048):0.013142,((14:0.089281,19:0.092669):0.010477,(((9:0.098469,(17:0.018821,18:0.040234):0.043263):0.027449,4:0.156823):0.002575,(10:0.096659,((15:0.039306,16:0.034105):0.029498,7:0.120784):0.002080):0.003856):0.022291):0.000927):0.006871):0.004249,(8:0.077731 [...]
+   tree rep.4380000 = (3:0.116954,(6:0.105112,(2:0.223917,((11:0.024411,12:0.058320):0.062294,(((8:0.074310,(((24:0.009722,23:0.004882):0.007026,25:0.007461):0.026897,(22:0.038528,21:0.061682):0.006602):0.042615):0.022898,13:0.118897):0.001903,(((14:0.090540,19:0.094831):0.035225,(7:0.142654,((10:0.151232,(15:0.045121,16:0.027627):0.075319):0.016044,((9:0.095469,(17:0.018796,18:0.039320):0.040765):0.014283,4:0.155450):0.010355):0.011644):0.039233):0.006920,((29:0.085552,(28:0.050776,(27: [...]
+   tree rep.4400000 = ((2:0.229653,(((((28:0.055949,(26:0.028250,27:0.036233):0.076686):0.019457,29:0.091139):0.023532,((19:0.071400,14:0.100064):0.030334,(7:0.091693,(10:0.097333,(((9:0.081116,(18:0.032722,17:0.017767):0.026882):0.025514,4:0.122566):0.020034,(16:0.027372,15:0.022438):0.046414):0.013357):0.003347):0.026621):0.004451):0.007754,(((5:0.102438,20:0.079962):0.064368,13:0.073762):0.014331,(8:0.085306,((22:0.032802,21:0.080263):0.012608,(25:0.005175,(23:0.010095,24:0.013455):0. [...]
+   tree rep.4420000 = (((2:0.181442,((11:0.049590,12:0.070371):0.066331,(((8:0.090497,(21:0.036681,(22:0.048864,((25:0.011445,24:0.010024):0.000659,23:0.012005):0.021976):0.029162):0.043141):0.035686,((20:0.113587,5:0.086144):0.136444,13:0.086443):0.003847):0.003466,(((14:0.092995,19:0.092223):0.044525,((((17:0.008475,18:0.052126):0.064613,9:0.116472):0.036323,4:0.154508):0.011306,((7:0.104430,(15:0.037440,16:0.034570):0.064479):0.005637,10:0.097041):0.000510):0.024232):0.010810,(((27:0. [...]
+   tree rep.4440000 = (3:0.140542,(6:0.068621,(2:0.149671,((((5:0.089791,20:0.107609):0.094045,(((((16:0.023441,15:0.035427):0.038828,7:0.100999):0.020904,10:0.089079):0.013108,(4:0.095130,(9:0.100839,(18:0.031921,17:0.017074):0.040671):0.025132):0.024958):0.023861,((29:0.081743,(19:0.086810,14:0.104091):0.027700):0.003852,((26:0.028694,27:0.029946):0.063543,28:0.047508):0.045833):0.001365):0.007307):0.003501,(13:0.100461,((((25:0.007538,(24:0.012240,23:0.022453):0.006963):0.027247,22:0. [...]
+   tree rep.4460000 = ((3:0.098650,(2:0.134141,(((((5:0.086138,20:0.081644):0.093928,13:0.083547):0.011226,(8:0.095965,((22:0.030884,((23:0.011518,24:0.005629):0.008032,25:0.005954):0.019626):0.020042,21:0.039914):0.039722):0.038982):0.004364,(((10:0.104265,((7:0.078169,(16:0.025739,15:0.038967):0.051487):0.005806,((9:0.088806,(18:0.038901,17:0.020579):0.039142):0.011448,4:0.097161):0.022274):0.001846):0.022885,(19:0.077007,14:0.077628):0.017338):0.014818,((28:0.040344,(26:0.029321,27:0. [...]
+   tree rep.4480000 = (3:0.157005,(6:0.101626,(((12:0.066382,11:0.024536):0.041447,(((5:0.111345,20:0.130118):0.127492,(((((9:0.100645,(18:0.028753,17:0.030452):0.064772):0.025263,4:0.125716):0.011548,(10:0.126678,((16:0.027923,15:0.043405):0.057603,7:0.107158):0.008666):0.005277):0.065827,(14:0.092907,19:0.098661):0.038421):0.008805,((28:0.044457,(26:0.013107,27:0.067368):0.063578):0.028705,29:0.120518):0.004688):0.010046):0.002413,(13:0.107487,((22:0.047163,((25:0.008286,(24:0.011563,2 [...]
+   tree rep.4500000 = (((((11:0.038376,12:0.068024):0.021940,(((29:0.111312,(28:0.058573,(27:0.034839,26:0.028937):0.059851):0.020104):0.016846,(((((15:0.038004,16:0.024729):0.042647,7:0.080375):0.008637,10:0.068210):0.004757,(4:0.106662,((17:0.017350,18:0.049739):0.042550,9:0.103070):0.037179):0.003266):0.017997,(14:0.077943,19:0.067162):0.038515):0.009508):0.007395,((8:0.089592,(((25:0.002904,(23:0.009581,24:0.010920):0.003912):0.018644,22:0.052852):0.019404,21:0.037135):0.024588):0.02 [...]
+   tree rep.4520000 = ((((11:0.020055,12:0.052661):0.068741,(((14:0.133121,19:0.112273):0.032997,(((((17:0.019700,18:0.050991):0.067271,9:0.097745):0.031503,(((15:0.037193,16:0.017608):0.060648,7:0.113660):0.011216,10:0.082452):0.005004):0.004791,4:0.108937):0.019316,((28:0.040048,(27:0.032677,26:0.023080):0.075218):0.035200,29:0.093249):0.027939):0.001854):0.003764,((13:0.114865,(8:0.086009,(21:0.035606,(22:0.025328,(23:0.011895,(25:0.021656,24:0.017763):0.001216):0.042251):0.018304):0. [...]
+   tree rep.4540000 = ((6:0.117429,3:0.139887):0.009593,(2:0.169765,((((13:0.130039,(5:0.116615,20:0.090163):0.163394):0.003795,((((26:0.016995,27:0.041904):0.112706,28:0.033079):0.030447,29:0.117436):0.021271,((((4:0.154575,(9:0.089686,(18:0.033549,17:0.031434):0.058621):0.018112):0.012849,((16:0.030353,15:0.037652):0.072584,7:0.097782):0.001544):0.007744,10:0.117857):0.043027,(19:0.078396,14:0.059827):0.046056):0.002804):0.009127):0.001824,(8:0.128230,((21:0.061089,22:0.034247):0.00451 [...]
+   tree rep.4560000 = (3:0.107436,((2:0.142051,((11:0.017579,12:0.067906):0.050822,(13:0.078077,(((8:0.063950,((((23:0.008081,24:0.006929):0.007185,25:0.005198):0.039186,22:0.025136):0.019197,21:0.048974):0.031049):0.014237,(5:0.091989,20:0.068860):0.077637):0.011842,((19:0.082062,14:0.075723):0.029293,((10:0.075180,(((16:0.013285,15:0.056633):0.035242,7:0.081742):0.032718,(((18:0.020589,17:0.017305):0.041783,9:0.113458):0.035384,4:0.144852):0.023211):0.003022):0.019726,(29:0.084552,(28: [...]
+   tree rep.4580000 = ((((11:0.023398,12:0.061499):0.049285,(((((22:0.040061,((24:0.007975,23:0.017821):0.005629,25:0.001266):0.033168):0.029670,21:0.036883):0.010611,8:0.085826):0.054746,(13:0.080916,(5:0.109759,20:0.130421):0.125619):0.004010):0.012034,(((4:0.142930,(10:0.121898,(7:0.117443,(16:0.019298,15:0.037390):0.038021):0.009094):0.007494):0.004093,((18:0.046611,17:0.018033):0.048037,9:0.110956):0.021210):0.038668,((19:0.086229,14:0.099213):0.019877,(((26:0.033769,27:0.043870):0. [...]
+   tree rep.4600000 = (6:0.082475,((((11:0.029893,12:0.074185):0.043255,(((((27:0.027388,26:0.023103):0.062387,28:0.048830):0.031753,29:0.083953):0.010684,((14:0.088375,19:0.096359):0.015875,(10:0.105209,(((15:0.040643,16:0.022144):0.058566,7:0.078055):0.009025,(4:0.208045,(9:0.103251,(17:0.012863,18:0.041560):0.057953):0.031815):0.016105):0.001196):0.018575):0.000309):0.008282,((8:0.110803,(((24:0.004768,23:0.009114):0.003046,25:0.010954):0.037094,(21:0.050188,22:0.030802):0.000199):0.0 [...]
+   tree rep.4620000 = (((((8:0.112013,((((24:0.007758,23:0.004994):0.015643,25:0.000480):0.031697,22:0.030761):0.010410,21:0.053552):0.038870):0.016783,(13:0.117708,((((29:0.103329,((26:0.029786,27:0.048205):0.089358,28:0.039575):0.025722):0.011157,(7:0.090190,(10:0.111827,((16:0.016273,15:0.057717):0.058744,((9:0.138950,(18:0.036326,17:0.020394):0.037127):0.045239,4:0.111717):0.001291):0.004864):0.013162):0.030050):0.001609,(14:0.073150,19:0.093939):0.036488):0.009546,(5:0.071990,20:0.0 [...]
+   tree rep.4640000 = ((3:0.104982,6:0.070756):0.007284,(((11:0.040760,12:0.051268):0.033755,(((20:0.085092,5:0.103537):0.069931,((14:0.089242,19:0.077244):0.025779,((((27:0.037671,26:0.017128):0.048927,28:0.042387):0.017457,29:0.090641):0.015128,(((10:0.094935,(4:0.138352,((17:0.019848,18:0.037678):0.037430,9:0.092191):0.026303):0.015281):0.006661,(15:0.037303,16:0.026893):0.030554):0.007191,7:0.093571):0.039986):0.003336):0.002817):0.000888,((((22:0.033865,(25:0.003043,(24:0.010840,23: [...]
+   tree rep.4660000 = ((3:0.163185,(2:0.172260,((11:0.017031,12:0.086195):0.067907,((((14:0.080164,19:0.075680):0.031070,((28:0.045210,(27:0.026896,26:0.034617):0.072688):0.025974,29:0.093380):0.019723):0.007467,((20:0.119593,5:0.111553):0.089329,((8:0.083537,(21:0.060346,((25:0.006424,(23:0.011194,24:0.013504):0.008703):0.027344,22:0.013579):0.021148):0.027564):0.028506,13:0.098865):0.010830):0.003983):0.009733,((10:0.100841,(7:0.095248,(15:0.046200,16:0.046581):0.051973):0.005395):0.00 [...]
+   tree rep.4680000 = ((2:0.234149,((11:0.052366,12:0.077254):0.032444,((29:0.122001,((((((15:0.046747,16:0.036029):0.085464,(10:0.155495,(4:0.181532,((17:0.017471,18:0.041664):0.042789,9:0.187004):0.037200):0.012648):0.000628):0.012595,7:0.113754):0.026081,(20:0.078601,5:0.096578):0.094063):0.005667,(14:0.099265,19:0.090468):0.031695):0.000120,(28:0.066477,(27:0.038965,26:0.023155):0.086869):0.029002):0.004739):0.017159,(13:0.125564,(8:0.076820,(21:0.062509,(22:0.047052,(25:0.017049,(23 [...]
+   tree rep.4700000 = (((((((21:0.050319,((25:0.005272,(24:0.005708,23:0.015926):0.011193):0.029589,22:0.058554):0.018028):0.030437,8:0.078372):0.024812,((5:0.111869,20:0.121281):0.107693,13:0.107569):0.000126):0.009926,(((((((18:0.034015,17:0.030295):0.062222,9:0.072555):0.021678,4:0.136347):0.001401,((16:0.040706,15:0.049331):0.051856,10:0.095149):0.013498):0.003711,7:0.078446):0.009587,(19:0.067919,14:0.101484):0.033901):0.004661,(29:0.089508,((26:0.040803,27:0.034317):0.049889,28:0.0 [...]
+   tree rep.4720000 = ((3:0.107919,6:0.076408):0.024033,(((((5:0.094192,20:0.114607):0.081954,((10:0.101548,((4:0.125128,((18:0.042786,17:0.019267):0.047145,9:0.092977):0.027499):0.024534,(7:0.078858,(16:0.024783,15:0.026439):0.032200):0.011782):0.016717):0.034484,((19:0.079664,14:0.095295):0.033723,(((26:0.011575,27:0.036120):0.072682,28:0.056652):0.033890,29:0.089512):0.008624):0.018093):0.008779):0.001653,(13:0.083326,(((((24:0.010512,23:0.010334):0.002369,25:0.001689):0.031823,22:0.0 [...]
+   tree rep.4740000 = ((((((((29:0.083047,((27:0.027602,26:0.018100):0.060172,28:0.059854):0.020840):0.024045,((4:0.135099,((18:0.027403,17:0.022524):0.053176,9:0.109499):0.042348):0.014645,(10:0.110439,((15:0.043463,16:0.012697):0.043174,7:0.086472):0.015011):0.001716):0.023403):0.000846,(19:0.079365,14:0.076978):0.034353):0.002020,(((20:0.109858,5:0.083634):0.083124,13:0.095523):0.001109,(8:0.063569,(21:0.078716,((25:0.004620,(23:0.008297,24:0.012970):0.005157):0.021282,22:0.027830):0. [...]
+   tree rep.4760000 = (((2:0.185119,((((8:0.103651,(22:0.051969,((25:0.008773,(24:0.005788,23:0.010228):0.004210):0.021111,21:0.068209):0.004867):0.045178):0.032642,((20:0.092561,5:0.097976):0.087350,13:0.083650):0.004053):0.006692,((((27:0.024022,26:0.041251):0.070661,28:0.071279):0.017268,29:0.091015):0.040074,((19:0.079611,14:0.085141):0.029969,(((10:0.103998,(15:0.033405,16:0.041890):0.033620):0.014874,(4:0.110638,((18:0.037280,17:0.014042):0.039947,9:0.105233):0.023944):0.018864):0. [...]
+   tree rep.4780000 = ((((11:0.019612,12:0.084468):0.056479,(((13:0.115442,(8:0.083897,(21:0.061150,(22:0.046086,((24:0.010129,23:0.015043):0.006427,25:0.002990):0.027368):0.009809):0.045221):0.034466):0.007335,(20:0.112705,5:0.076224):0.124718):0.008133,(((27:0.029766,26:0.033190):0.057426,28:0.042432):0.022804,(((10:0.087390,(7:0.107016,(15:0.040618,16:0.032079):0.041693):0.012421):0.004041,((9:0.125378,(17:0.017521,18:0.048907):0.034604):0.033477,4:0.158044):0.027685):0.026456,((19:0. [...]
+   tree rep.4800000 = ((((((13:0.091507,((5:0.106448,20:0.089095):0.053926,(8:0.109663,((22:0.036347,((23:0.013961,24:0.005166):0.003929,25:0.008260):0.024961):0.006173,21:0.040976):0.040306):0.012100):0.014740):0.004754,((((28:0.044496,(26:0.029475,27:0.031678):0.068464):0.027719,29:0.099305):0.009437,(14:0.112393,19:0.059799):0.056171):0.010912,(((7:0.089466,10:0.095826):0.000749,(16:0.020922,15:0.035176):0.042695):0.013204,((9:0.120271,(18:0.032164,17:0.019843):0.051443):0.011799,4:0. [...]
+   tree rep.4820000 = (3:0.118036,(6:0.074879,(((((((22:0.041611,((24:0.007797,23:0.013874):0.002845,25:0.016279):0.026451):0.042373,21:0.045455):0.028876,8:0.101558):0.017428,(((19:0.065463,14:0.087142):0.043875,(29:0.070957,(28:0.039901,(27:0.030263,26:0.014946):0.059777):0.035099):0.018663):0.016102,(13:0.095381,(20:0.098873,5:0.062171):0.092943):0.003120):0.003758):0.004017,((4:0.128514,((18:0.033969,17:0.018969):0.042454,9:0.086332):0.024139):0.006756,(10:0.098041,((15:0.047612,16:0 [...]
+   tree rep.4840000 = ((((11:0.035610,12:0.059669):0.032862,(((20:0.098683,5:0.120913):0.096027,(((21:0.040797,((25:0.006550,(23:0.017955,24:0.010060):0.011030):0.021318,22:0.029329):0.021052):0.016095,8:0.097121):0.024818,13:0.107877):0.001403):0.001759,(((((27:0.036729,26:0.037754):0.063701,28:0.062297):0.025235,29:0.089216):0.018270,(((9:0.154212,(17:0.028613,18:0.028248):0.070197):0.059725,4:0.141637):0.009475,(10:0.123480,((15:0.016585,16:0.033945):0.065610,7:0.107986):0.009077):0.0 [...]
+   tree rep.4860000 = (((((11:0.037974,12:0.058791):0.067050,(((((19:0.107377,14:0.108919):0.069649,(((9:0.120716,(17:0.017100,18:0.035881):0.028893):0.034477,4:0.153973):0.013873,((7:0.074359,(15:0.046571,16:0.039664):0.052553):0.017200,10:0.105834):0.010029):0.032544):0.006952,(29:0.102394,((27:0.023641,26:0.043979):0.096463,28:0.045875):0.037942):0.024822):0.017146,(((21:0.044155,(((23:0.010142,24:0.007993):0.014133,25:0.005647):0.025124,22:0.039496):0.016903):0.063913,8:0.123888):0.0 [...]
+   tree rep.4880000 = ((6:0.096182,3:0.101314):0.008850,(2:0.180802,((((7:0.089270,(10:0.098334,((16:0.015220,15:0.033714):0.059227,(4:0.126859,((18:0.044923,17:0.018152):0.033763,9:0.104549):0.023605):0.020389):0.005051):0.021175):0.013761,((29:0.088563,(19:0.086849,14:0.053197):0.021924):0.011126,(28:0.031994,(26:0.027048,27:0.036878):0.099366):0.036619):0.010085):0.007875,((5:0.100651,20:0.063319):0.062922,(13:0.094083,(((22:0.042638,(25:0.003395,(24:0.009470,23:0.019103):0.008457):0. [...]
+   tree rep.4900000 = (6:0.079466,((((((((29:0.115017,(28:0.058494,(26:0.032822,27:0.026283):0.052022):0.018191):0.011642,(10:0.099163,(((16:0.018209,15:0.027411):0.045727,7:0.112909):0.007909,(4:0.103859,((18:0.025815,17:0.027008):0.029136,9:0.131084):0.023483):0.019349):0.009114):0.022406):0.004248,(14:0.100186,19:0.093108):0.031489):0.015555,13:0.082623):0.001215,((5:0.091761,20:0.095008):0.082168,(((22:0.034943,(23:0.008092,(24:0.007883,25:0.014776):0.006085):0.032863):0.008680,21:0. [...]
+   tree rep.4920000 = (3:0.163846,(6:0.082412,(2:0.205721,((((19:0.098413,14:0.104002):0.026285,((7:0.118665,((((18:0.045124,17:0.026366):0.037136,9:0.107981):0.035866,4:0.134896):0.014283,(10:0.118527,(16:0.022360,15:0.056661):0.044992):0.014366):0.007290):0.022064,(29:0.089083,(28:0.048174,(26:0.022013,27:0.044640):0.078280):0.022977):0.008582):0.002336):0.010743,(((((25:0.002506,(24:0.015000,23:0.026066):0.009802):0.018909,22:0.032086):0.020197,21:0.057781):0.036968,8:0.107019):0.0322 [...]
+   tree rep.4940000 = (6:0.090252,(3:0.166706,(((11:0.017761,12:0.074907):0.046531,((((22:0.042726,((24:0.010446,23:0.013375):0.007646,25:0.014549):0.036694):0.013700,21:0.054280):0.015908,8:0.145896):0.016343,((20:0.089144,5:0.082169):0.079384,(((((27:0.043539,26:0.013926):0.082993,28:0.031819):0.029618,29:0.105155):0.017860,((19:0.083592,14:0.120837):0.020663,((10:0.094236,(7:0.119895,(15:0.039598,16:0.018540):0.052054):0.010732):0.015774,(((17:0.027205,18:0.028683):0.041006,9:0.099694 [...]
+   tree rep.4960000 = (3:0.134279,(6:0.111997,(2:0.170050,((11:0.032562,12:0.059747):0.050326,((((19:0.061189,14:0.087169):0.031111,(((((17:0.025246,18:0.032854):0.050000,9:0.096214):0.019035,4:0.123994):0.014550,(7:0.073161,(15:0.038863,16:0.031693):0.042009):0.005879):0.001910,10:0.109768):0.030449):0.015489,(((27:0.030127,26:0.032176):0.073683,28:0.036036):0.020928,29:0.090049):0.021241):0.006515,((20:0.076972,5:0.084801):0.066667,(((((25:0.005229,(23:0.006139,24:0.010029):0.005943):0 [...]
+   tree rep.4980000 = ((6:0.107270,(2:0.214999,(((13:0.124026,((((19:0.094271,14:0.076453):0.028682,((((16:0.022192,15:0.039835):0.057285,7:0.114975):0.015121,(((18:0.043746,17:0.034835):0.039703,9:0.144503):0.020525,4:0.172094):0.017660):0.010757,10:0.117036):0.015167):0.009440,(29:0.112066,(28:0.052657,(26:0.021642,27:0.037741):0.076527):0.036834):0.007699):0.025663,(5:0.150323,20:0.092580):0.106370):0.002850):0.000454,(((22:0.042066,((23:0.014072,24:0.009166):0.007042,25:0.007939):0.0 [...]
+   tree rep.5000000 = (((2:0.157119,((((8:0.097565,((22:0.030903,(23:0.014546,(25:0.010608,24:0.014254):0.001415):0.027199):0.008680,21:0.084083):0.033160):0.041970,((((26:0.040343,27:0.019802):0.050195,28:0.052784):0.015964,29:0.094431):0.014320,((19:0.064918,14:0.095650):0.006039,((((4:0.111271,((18:0.041099,17:0.024849):0.061747,9:0.111295):0.020968):0.015581,10:0.111055):0.001423,(16:0.031961,15:0.022739):0.072419):0.009121,7:0.102923):0.008271):0.001319):0.009253):0.000528,((5:0.096 [...]
+   tree rep.5020000 = (((((((16:0.038915,15:0.022722):0.030360,7:0.094208):0.019979,10:0.091293):0.004328,(4:0.125817,((18:0.036322,17:0.020112):0.040846,9:0.092672):0.031069):0.009235):0.037819,((13:0.115377,((5:0.116914,20:0.081395):0.098165,(((26:0.027523,27:0.030697):0.086332,28:0.035497):0.023426,((19:0.105571,14:0.066801):0.035163,29:0.089121):0.022438):0.007108):0.002136):0.001049,(8:0.119145,(21:0.039232,(22:0.028056,(23:0.004765,(25:0.009753,24:0.008210):0.001249):0.027444):0.01 [...]
+   tree rep.5040000 = ((3:0.120463,6:0.099593):0.025032,((((((14:0.091900,19:0.069444):0.023008,(((10:0.097344,(7:0.082585,(16:0.017596,15:0.041270):0.021106):0.005030):0.001973,4:0.131546):0.005560,(9:0.109836,(18:0.042464,17:0.017106):0.049076):0.022732):0.019949):0.007959,(29:0.088451,(28:0.052710,(26:0.017549,27:0.029125):0.043833):0.016194):0.021211):0.002629,(((21:0.047684,(22:0.043280,((23:0.012473,24:0.012211):0.007206,25:0.012247):0.025212):0.006842):0.028233,8:0.077758):0.04279 [...]
+   tree rep.5060000 = (3:0.113466,((2:0.196914,((11:0.011447,12:0.084782):0.049920,(((29:0.098208,((27:0.036192,26:0.022906):0.064418,28:0.035916):0.034433):0.015362,((((4:0.145751,(9:0.111410,(17:0.013951,18:0.026708):0.038676):0.041061):0.012511,(7:0.112778,(15:0.045034,16:0.015442):0.076562):0.009469):0.008992,10:0.084202):0.021320,(19:0.079991,14:0.075313):0.024703):0.006721):0.008956,(((21:0.056476,(((24:0.007370,25:0.014805):0.004311,23:0.001374):0.029526,22:0.037960):0.004445):0.0 [...]
+   tree rep.5080000 = ((6:0.092679,3:0.117684):0.018562,(((12:0.061699,11:0.019627):0.031358,(((29:0.089639,((27:0.039484,26:0.018604):0.065099,28:0.065020):0.011648):0.011770,(((10:0.087724,((15:0.043719,16:0.021584):0.060302,((9:0.106863,(18:0.027992,17:0.031589):0.049855):0.025803,4:0.113930):0.005155):0.000429):0.010275,7:0.097297):0.012771,(14:0.092021,19:0.071584):0.022760):0.013301):0.010463,((8:0.112137,(21:0.031976,(22:0.033837,((24:0.008048,23:0.011315):0.003702,25:0.011327):0. [...]
+   tree rep.5100000 = ((2:0.151632,(((((28:0.052474,(26:0.020147,27:0.036929):0.057754):0.025001,29:0.097853):0.014919,((14:0.066058,19:0.067071):0.033994,(((((18:0.034879,17:0.013499):0.036563,9:0.097201):0.015488,4:0.105651):0.009791,(7:0.100742,(16:0.025623,15:0.028372):0.041835):0.017541):0.008268,10:0.096388):0.017070):0.000197):0.005395,((8:0.079053,(21:0.037607,(22:0.033942,((24:0.003133,23:0.007856):0.002868,25:0.004805):0.044000):0.015247):0.027699):0.013221,(13:0.085592,(5:0.07 [...]
+   tree rep.5120000 = ((6:0.090198,3:0.160036):0.023845,(((12:0.080276,11:0.011242):0.076900,((8:0.141193,((22:0.051302,(25:0.011708,(23:0.011096,24:0.008514):0.005244):0.017652):0.002981,21:0.054998):0.035812):0.015725,((((((27:0.030380,26:0.026033):0.080741,28:0.049840):0.022317,29:0.076541):0.014884,((14:0.104796,19:0.087276):0.030621,((10:0.126423,((15:0.054818,16:0.019488):0.047777,7:0.108608):0.005771):0.016753,(4:0.096423,(9:0.097127,(18:0.039173,17:0.018647):0.055863):0.048177):0 [...]
+   tree rep.5140000 = ((((12:0.055313,11:0.034558):0.036414,((((14:0.081327,19:0.070034):0.037810,(((9:0.091066,(18:0.046711,17:0.007896):0.055050):0.039973,4:0.131667):0.004088,(((15:0.026050,16:0.036656):0.039513,7:0.110041):0.018313,10:0.102332):0.004694):0.021692):0.008077,((28:0.053118,(27:0.052068,26:0.030387):0.049705):0.017301,29:0.076636):0.014563):0.007999,((13:0.080393,(8:0.078118,((22:0.037884,((24:0.012343,25:0.010433):0.004504,23:0.001953):0.019338):0.008699,21:0.044244):0. [...]
+   tree rep.5160000 = ((((12:0.069354,11:0.032064):0.065486,((20:0.088105,5:0.081419):0.065514,(((8:0.070386,((21:0.045002,((23:0.018364,24:0.011331):0.004833,25:0.012334):0.025339):0.000238,22:0.047689):0.042050):0.027575,13:0.115401):0.014232,((((((9:0.097487,(18:0.041005,17:0.020334):0.045187):0.022756,4:0.108227):0.003128,(15:0.025630,16:0.029906):0.048953):0.004543,10:0.082242):0.029957,7:0.100860):0.015915,((14:0.136604,19:0.077502):0.004339,((28:0.035561,(27:0.045729,26:0.020829): [...]
+   tree rep.5180000 = ((((12:0.073672,11:0.016637):0.064037,((((14:0.100595,19:0.087830):0.025156,(((28:0.046017,(26:0.050740,27:0.029962):0.075672):0.021729,29:0.068305):0.013018,((4:0.134993,(9:0.098504,(18:0.029274,17:0.011525):0.031031):0.030331):0.019367,(10:0.079235,((16:0.021043,15:0.041106):0.044282,7:0.088833):0.005837):0.011080):0.030069):0.005945):0.008766,(8:0.061289,(21:0.052432,(22:0.026788,((24:0.014272,23:0.008899):0.017693,25:0.003735):0.034367):0.014378):0.012531):0.060 [...]
+   tree rep.5200000 = ((((((((29:0.078037,(28:0.036922,(27:0.026830,26:0.023028):0.066186):0.011807):0.008647,(14:0.098567,19:0.071302):0.030709):0.007592,(((7:0.097236,(15:0.039246,16:0.018383):0.053308):0.005404,(4:0.118707,(9:0.094444,(18:0.040864,17:0.029696):0.058590):0.020185):0.008901):0.024887,10:0.092281):0.014789):0.006857,(13:0.083272,((20:0.076837,5:0.090957):0.085557,(((22:0.028834,((23:0.011865,24:0.010228):0.005421,25:0.002789):0.032975):0.019525,21:0.042084):0.068593,8:0. [...]
+   tree rep.5220000 = (3:0.144774,(((((((((28:0.049654,(26:0.027906,27:0.049523):0.057120):0.014719,29:0.093830):0.009839,(5:0.103759,20:0.097043):0.106554):0.002171,((19:0.083789,14:0.084863):0.026758,((((4:0.112022,(9:0.133932,(18:0.034628,17:0.016759):0.030472):0.045088):0.010913,(16:0.036614,15:0.037990):0.055065):0.003343,10:0.090981):0.008337,7:0.082627):0.010497):0.013133):0.012273,13:0.078239):0.018158,((21:0.051307,(22:0.049051,(23:0.007960,(25:0.014207,24:0.012256):0.004335):0. [...]
+   tree rep.5240000 = (((((((10:0.096892,(((16:0.020114,15:0.044319):0.031108,7:0.129531):0.003755,((9:0.117945,(18:0.026390,17:0.028459):0.071122):0.017868,4:0.118108):0.010094):0.019545):0.035866,((19:0.109054,14:0.076308):0.040143,((5:0.127767,20:0.132670):0.103865,(28:0.088370,(26:0.018960,27:0.033238):0.073962):0.050845):0.004320):0.010436):0.004186,29:0.100024):0.018873,(13:0.105657,((22:0.039386,(21:0.046150,(25:0.003908,(24:0.009974,23:0.009071):0.006708):0.032395):0.008026):0.03 [...]
+   tree rep.5260000 = (((2:0.139210,((((((((16:0.025758,15:0.028772):0.049445,(4:0.120523,((18:0.050435,17:0.014858):0.052450,9:0.120180):0.019402):0.015510):0.005796,10:0.086980):0.019743,7:0.107813):0.016722,(14:0.092853,19:0.091677):0.026864):0.000669,(29:0.064660,(28:0.038513,(26:0.025974,27:0.030535):0.066388):0.017219):0.020451):0.008629,((13:0.092877,(5:0.083825,20:0.085916):0.123436):0.005728,(8:0.098560,((((23:0.013810,24:0.011953):0.011418,25:0.016255):0.027643,22:0.031773):0.0 [...]
+   tree rep.5280000 = ((6:0.074641,3:0.141782):0.002374,(((11:0.033882,12:0.060910):0.044466,((20:0.100885,5:0.091776):0.068273,((13:0.101861,(((23:0.007193,(25:0.009303,24:0.031209):0.000395):0.027440,(21:0.055248,22:0.042760):0.003667):0.041541,8:0.096057):0.022123):0.000303,(((19:0.086496,14:0.065960):0.033503,29:0.098214):0.011109,(((((15:0.036254,16:0.023003):0.031529,7:0.090784):0.004384,(((17:0.014638,18:0.045446):0.031408,9:0.104416):0.033183,4:0.131837):0.007813):0.011333,10:0.0 [...]
+   tree rep.5300000 = ((6:0.102488,3:0.109768):0.006892,(2:0.160115,((((((28:0.032701,(26:0.023417,27:0.030934):0.066942):0.023030,29:0.091123):0.013667,((14:0.070596,19:0.070373):0.035545,((((4:0.129068,((18:0.033261,17:0.014799):0.033974,9:0.111253):0.029572):0.020802,(16:0.018078,15:0.038381):0.050292):0.008859,10:0.093352):0.011077,7:0.078688):0.026883):0.000099):0.006333,(5:0.125434,20:0.086576):0.072599):0.004464,(13:0.067027,(8:0.089462,(21:0.030542,(22:0.051074,((23:0.008553,24:0 [...]
+   tree rep.5320000 = (3:0.138652,(6:0.070000,(2:0.195461,((((13:0.093876,(20:0.102906,5:0.105683):0.110726):0.011823,(8:0.090641,(21:0.059246,((25:0.007500,(23:0.013974,24:0.010444):0.012804):0.013236,22:0.040153):0.023260):0.017077):0.021218):0.011244,((29:0.116696,((27:0.016265,26:0.036105):0.066680,28:0.069146):0.014648):0.012462,(((15:0.035625,16:0.026859):0.046532,((((18:0.040978,17:0.023606):0.046920,9:0.098000):0.036826,4:0.107895):0.019924,(10:0.091305,7:0.100623):0.010932):0.00 [...]
+   tree rep.5340000 = ((((((((((27:0.020934,26:0.031315):0.087308,28:0.043683):0.039185,29:0.093708):0.006527,((10:0.104943,(((15:0.036798,16:0.029192):0.052243,7:0.101704):0.013284,(((18:0.039641,17:0.023656):0.049026,9:0.082007):0.021023,4:0.101877):0.015822):0.002750):0.017463,(14:0.067012,19:0.079505):0.032043):0.006806):0.005485,((21:0.043176,(22:0.031570,((23:0.008056,24:0.020655):0.002111,25:0.007234):0.009284):0.032450):0.043841,8:0.104223):0.029472):0.003552,(13:0.074131,(20:0.0 [...]
+   tree rep.5360000 = (6:0.088936,((2:0.178077,((((8:0.084638,((22:0.033183,((24:0.018354,23:0.017901):0.016667,25:0.007013):0.035018):0.011532,21:0.034712):0.035558):0.013420,(13:0.079965,(20:0.083476,5:0.102142):0.067438):0.003385):0.003821,((14:0.075886,19:0.061968):0.030943,((7:0.072513,(10:0.127171,((15:0.033528,16:0.043923):0.054401,(((18:0.046898,17:0.019869):0.053754,9:0.103843):0.031949,4:0.097431):0.002604):0.029510):0.016647):0.029197,(29:0.073573,((27:0.027168,26:0.027995):0. [...]
+   tree rep.5380000 = ((6:0.102814,3:0.170710):0.000734,(2:0.149662,(((5:0.096251,20:0.113370):0.107390,((((((7:0.078291,(16:0.017095,15:0.059169):0.042241):0.002080,10:0.108386):0.003535,(((18:0.028174,17:0.019047):0.044965,9:0.090657):0.049242,4:0.125587):0.022811):0.016885,(14:0.088411,19:0.065336):0.038464):0.003677,(((26:0.036130,27:0.022966):0.082789,28:0.057295):0.020598,29:0.095375):0.022666):0.003378,(13:0.086042,((21:0.064377,((25:0.005448,(23:0.010779,24:0.011233):0.003826):0. [...]
+   tree rep.5400000 = (6:0.083452,((2:0.222480,((((8:0.093255,(21:0.055015,((25:0.005819,(23:0.010374,24:0.017148):0.010128):0.007667,22:0.022555):0.011230):0.035534):0.029837,13:0.057809):0.009225,(((19:0.092648,14:0.076698):0.031828,(((27:0.032874,26:0.032555):0.063212,28:0.038631):0.012291,29:0.094831):0.016512):0.002146,((7:0.110531,(20:0.084374,5:0.121144):0.102493):0.042429,(10:0.098819,((((17:0.019670,18:0.038758):0.040584,9:0.115492):0.031251,4:0.114012):0.006253,(15:0.038567,16: [...]
+   tree rep.5420000 = (6:0.050916,((2:0.174785,((((4:0.137884,(9:0.096978,(17:0.023416,18:0.041855):0.065604):0.009872):0.008288,((7:0.114081,(15:0.029746,16:0.035682):0.045141):0.016084,10:0.107708):0.014379):0.037942,(((8:0.093876,(21:0.031189,((25:0.005347,(24:0.008659,23:0.017857):0.004320):0.029557,22:0.025603):0.017355):0.017816):0.017940,(13:0.084229,(20:0.086780,5:0.101636):0.128314):0.007611):0.008453,((19:0.099826,14:0.077031):0.031227,(29:0.084080,((27:0.043022,26:0.025128):0. [...]
+   tree rep.5440000 = ((6:0.054427,((((((8:0.112984,(21:0.023471,(22:0.035165,((24:0.012785,23:0.008058):0.003555,25:0.001890):0.019640):0.037856):0.021305):0.025995,13:0.091055):0.002235,(20:0.085536,5:0.094107):0.109738):0.002208,(((4:0.121504,(9:0.113204,(17:0.013270,18:0.039542):0.047220):0.024394):0.014409,((7:0.092676,(15:0.041348,16:0.025320):0.031027):0.018380,10:0.116919):0.006499):0.019816,((29:0.093292,(19:0.070789,14:0.073228):0.031947):0.016003,((27:0.037956,26:0.024885):0.0 [...]
+   tree rep.5460000 = (6:0.110453,(3:0.113361,(2:0.152890,((((14:0.067766,19:0.061823):0.035042,((7:0.095830,(((15:0.034116,16:0.034699):0.054599,10:0.117024):0.001955,(4:0.086306,(9:0.122512,(18:0.051837,17:0.023957):0.041340):0.027584):0.007662):0.015977):0.022878,((28:0.047988,(27:0.048042,26:0.019380):0.064195):0.025187,29:0.107148):0.003860):0.000469):0.011846,(((20:0.137107,5:0.102300):0.091069,(((22:0.024676,(25:0.004557,(23:0.019768,24:0.009273):0.015238):0.027422):0.020615,21:0. [...]
+   tree rep.5480000 = ((((12:0.072086,11:0.025899):0.053887,(((((21:0.037754,((25:0.003019,(23:0.010110,24:0.007473):0.008681):0.034300,22:0.047799):0.007302):0.042530,8:0.117360):0.009550,13:0.092276):0.021800,(((7:0.109961,(10:0.093150,((((18:0.032602,17:0.015334):0.040612,9:0.090923):0.023701,4:0.106912):0.016539,(16:0.027254,15:0.029981):0.062343):0.004789):0.010341):0.012481,(14:0.090461,19:0.080638):0.021792):0.001151,((28:0.039456,(26:0.031682,27:0.034247):0.048949):0.015032,29:0. [...]
+   tree rep.5500000 = ((6:0.096794,3:0.101075):0.000368,(((11:0.042848,12:0.050856):0.047861,((((20:0.123095,5:0.096085):0.102516,13:0.075383):0.002247,(8:0.092682,((((23:0.006612,24:0.009537):0.007127,25:0.005197):0.034844,22:0.029835):0.032056,21:0.040648):0.021485):0.038332):0.004158,(((7:0.095617,((((9:0.119960,(17:0.007974,18:0.039979):0.051981):0.038629,4:0.125220):0.006095,10:0.073060):0.001845,(15:0.035284,16:0.028038):0.071477):0.010538):0.005715,(19:0.060496,14:0.099886):0.0399 [...]
+   tree rep.5520000 = ((3:0.118692,(((12:0.071698,11:0.025526):0.045946,((13:0.085824,(20:0.124781,5:0.116292):0.082535):0.002933,(((((28:0.045620,(27:0.022118,26:0.032549):0.070648):0.006726,29:0.126294):0.021049,(10:0.085541,((4:0.132588,(9:0.099329,(18:0.038566,17:0.015810):0.038701):0.017888):0.007596,(7:0.101072,(15:0.057163,16:0.023949):0.031415):0.002966):0.008974):0.020400):0.001985,(14:0.108961,19:0.055292):0.043359):0.009763,((21:0.042230,(22:0.045339,((24:0.006021,23:0.009672) [...]
+   tree rep.5540000 = (((((12:0.054948,11:0.036501):0.046643,(((29:0.106908,(28:0.050447,(27:0.048151,26:0.016420):0.087298):0.026098):0.015579,((((10:0.096770,(15:0.027483,16:0.041934):0.048286):0.011434,((9:0.091329,(18:0.038986,17:0.018477):0.036761):0.021268,4:0.114338):0.014848):0.010588,7:0.088682):0.007925,(14:0.127872,19:0.079910):0.032731):0.004157):0.007947,((13:0.096392,(20:0.102703,5:0.072230):0.106659):0.002823,((21:0.033004,(22:0.038928,((23:0.007089,24:0.017243):0.003172,2 [...]
+   tree rep.5560000 = ((6:0.090446,3:0.133689):0.014012,(((12:0.062759,11:0.032347):0.044565,((((5:0.123478,20:0.116579):0.085290,13:0.081587):0.011625,(((22:0.023283,((24:0.006481,23:0.026223):0.003692,25:0.008627):0.027984):0.005368,21:0.081453):0.070198,8:0.091826):0.040582):0.008851,((29:0.090662,((26:0.029331,27:0.033935):0.072034,28:0.062639):0.013778):0.013936,((14:0.070402,19:0.113027):0.024356,(7:0.101836,(10:0.124364,((4:0.123441,(9:0.143537,(18:0.038065,17:0.028349):0.031984): [...]
+   tree rep.5580000 = ((3:0.101622,6:0.095906):0.008384,(2:0.145529,((12:0.057102,11:0.025467):0.050144,(((8:0.081678,((((23:0.007766,24:0.008652):0.004373,25:0.009624):0.035212,22:0.030184):0.015581,21:0.054760):0.022991):0.027954,((5:0.098266,20:0.064905):0.124202,13:0.110133):0.003066):0.002887,((14:0.101284,19:0.072166):0.050264,(((28:0.055296,(26:0.028495,27:0.029853):0.046668):0.034557,29:0.108125):0.000225,(7:0.096695,(10:0.082493,((16:0.023489,15:0.054497):0.033437,((9:0.073747,( [...]
+   tree rep.5600000 = (3:0.216255,(6:0.061131,(2:0.269898,((12:0.053323,11:0.051512):0.080550,((13:0.108934,(((((4:0.136407,(9:0.140461,(18:0.035990,17:0.025171):0.039813):0.030194):0.024041,(15:0.045341,16:0.024653):0.077771):0.006300,10:0.096964):0.019323,((20:0.087618,5:0.132593):0.158298,7:0.101603):0.062974):0.009751,(((28:0.041611,(27:0.055722,26:0.030923):0.077512):0.022083,29:0.089269):0.033832,(14:0.089082,19:0.095023):0.034749):0.001804):0.031428):0.003530,(8:0.081036,((21:0.08 [...]
+   tree rep.5620000 = ((2:0.150445,((12:0.052881,11:0.044406):0.044904,((13:0.089628,((5:0.121826,20:0.107988):0.112595,(8:0.095898,(21:0.051807,(22:0.028681,((24:0.010592,25:0.009611):0.005531,23:0.009959):0.043555):0.016109):0.038374):0.027865):0.001242):0.006359,(((28:0.016621,(26:0.017008,27:0.043361):0.055350):0.026216,29:0.091684):0.015403,((14:0.090810,19:0.079114):0.035907,(7:0.121267,(((9:0.100849,(18:0.028814,17:0.028205):0.056969):0.021265,4:0.110058):0.015159,((16:0.022160,15 [...]
+   tree rep.5640000 = (3:0.139819,(6:0.123083,(((12:0.064371,11:0.047858):0.045348,(((((28:0.062080,(27:0.046232,26:0.017307):0.097486):0.028197,(29:0.106472,(20:0.097098,5:0.090104):0.088598):0.006385):0.006920,(((10:0.094445,((15:0.049552,16:0.024035):0.077285,7:0.099465):0.004577):0.004892,((9:0.145144,(18:0.052445,17:0.014628):0.073752):0.014590,4:0.114479):0.022451):0.022208,(14:0.112055,19:0.121623):0.020057):0.002805):0.011165,13:0.093363):0.002286,(8:0.078271,(21:0.051858,(22:0.0 [...]
+   tree rep.5660000 = ((6:0.110930,3:0.119516):0.017178,(((12:0.064647,11:0.013345):0.073604,((20:0.064068,5:0.153093):0.129575,((((((10:0.101070,(15:0.034310,16:0.032751):0.042984):0.005064,7:0.111913):0.002760,(4:0.106877,(9:0.107108,(18:0.041081,17:0.021363):0.060034):0.023318):0.009279):0.042430,(((27:0.032598,26:0.036440):0.068775,28:0.054053):0.010144,29:0.101931):0.009271):0.002620,(14:0.097328,19:0.075222):0.037917):0.012785,((((((23:0.013114,24:0.005325):0.011467,25:0.002891):0. [...]
+   tree rep.5680000 = ((((12:0.093331,11:0.050165):0.063912,((((14:0.086083,19:0.080351):0.038562,(29:0.110266,(28:0.041020,(27:0.035947,26:0.022387):0.066317):0.031622):0.008200):0.014655,(((20:0.104729,5:0.079841):0.066416,13:0.091491):0.008580,(((22:0.052627,(25:0.005396,(23:0.009334,24:0.013280):0.006266):0.024680):0.033114,21:0.032070):0.022872,8:0.091638):0.039615):0.006939):0.004252,((((((18:0.029847,17:0.020824):0.038355,9:0.117102):0.029776,4:0.126859):0.016393,(15:0.036466,16:0 [...]
+   tree rep.5700000 = (3:0.119498,((2:0.164155,((12:0.064125,11:0.019247):0.055557,((8:0.076604,((22:0.034420,(25:0.007084,(23:0.009652,24:0.007511):0.007352):0.021842):0.015815,21:0.078171):0.042819):0.048435,(13:0.112050,((5:0.160496,20:0.078718):0.102054,(((28:0.069222,(26:0.020380,27:0.032550):0.076916):0.018384,29:0.098948):0.012018,((7:0.112510,((((9:0.104379,(18:0.038369,17:0.038366):0.036000):0.043906,4:0.139141):0.005697,(16:0.015700,15:0.050298):0.044642):0.007733,10:0.061277): [...]
+   tree rep.5720000 = (3:0.116027,((((11:0.041811,12:0.049626):0.039561,(((((28:0.051346,(27:0.033115,26:0.012624):0.064592):0.037519,29:0.106876):0.013111,(14:0.103491,19:0.097195):0.023055):0.005348,((((15:0.043205,16:0.018673):0.051988,7:0.116865):0.003318,10:0.112622):0.016635,((9:0.096683,(17:0.023331,18:0.047785):0.031155):0.030212,4:0.121941):0.002548):0.021073):0.006420,((8:0.084276,(21:0.057675,((25:0.008657,(23:0.006245,24:0.009475):0.007864):0.028316,22:0.037873):0.001443):0.0 [...]
+   tree rep.5740000 = ((6:0.101547,3:0.131006):0.024855,(2:0.118200,((12:0.059914,11:0.054802):0.056487,(((((28:0.070582,(26:0.031190,27:0.037623):0.073302):0.038814,29:0.107421):0.009601,((7:0.105084,(((16:0.023180,15:0.054364):0.035848,10:0.084267):0.004048,(4:0.168210,((18:0.035771,17:0.018525):0.064514,9:0.098346):0.014887):0.007898):0.007132):0.034840,(14:0.105378,19:0.100253):0.024138):0.000827):0.013116,13:0.082011):0.002959,((((22:0.032984,((24:0.005076,23:0.008765):0.003443,25:0 [...]
+   tree rep.5760000 = ((3:0.098795,6:0.084736):0.006495,(2:0.126991,((12:0.082709,11:0.024620):0.057073,(((((9:0.079709,(18:0.032995,17:0.017395):0.053905):0.020919,4:0.117975):0.013265,((7:0.092145,(16:0.024203,15:0.038296):0.044054):0.004837,10:0.096980):0.007004):0.032790,((29:0.082590,(28:0.042807,(26:0.031309,27:0.027247):0.066884):0.027412):0.023709,(14:0.085570,19:0.082750):0.053397):0.003604):0.003323,((13:0.097183,((21:0.027227,(((23:0.007776,24:0.008018):0.003719,25:0.004735):0 [...]
+   tree rep.5780000 = (3:0.111512,((2:0.183824,((((8:0.101988,(21:0.068279,(((24:0.007790,23:0.010838):0.012434,25:0.006530):0.034996,22:0.047392):0.011090):0.019748):0.037162,(13:0.094812,(20:0.103670,5:0.093124):0.082808):0.011747):0.002476,((((10:0.088084,((15:0.046960,16:0.020181):0.048544,7:0.107893):0.004610):0.010001,(4:0.102183,(9:0.100028,(17:0.018588,18:0.041060):0.068565):0.019153):0.021201):0.017853,(19:0.085769,14:0.084105):0.022168):0.009295,((28:0.023881,(27:0.030789,26:0. [...]
+   tree rep.5800000 = ((3:0.113850,6:0.112652):0.008840,(2:0.167478,(((10:0.084225,(((9:0.105915,(17:0.021655,18:0.030823):0.051025):0.017793,4:0.149940):0.005738,((15:0.032988,16:0.017202):0.045988,7:0.105336):0.016954):0.007925):0.011246,((19:0.069103,14:0.089262):0.028102,(((13:0.102811,(8:0.125425,((22:0.044208,((23:0.011722,24:0.012071):0.009145,25:0.005489):0.013571):0.008792,21:0.058775):0.051876):0.015604):0.009797,(20:0.078460,5:0.102666):0.095096):0.005107,(29:0.116034,((27:0.0 [...]
+   tree rep.5820000 = ((2:0.142551,((12:0.068710,11:0.028043):0.042990,((13:0.073325,(((14:0.100301,19:0.072558):0.036969,((29:0.080903,((27:0.024557,26:0.021028):0.072665,28:0.045110):0.024750):0.007210,((((((18:0.036098,17:0.028103):0.058189,9:0.081010):0.018720,4:0.148096):0.006587,10:0.145230):0.000404,(15:0.039139,16:0.046914):0.050224):0.011604,7:0.111483):0.028165):0.000746):0.012820,(20:0.130911,5:0.121456):0.093179):0.001353):0.000469,((21:0.069480,(22:0.021086,((25:0.012191,24: [...]
+   tree rep.5840000 = (((((12:0.079952,11:0.028748):0.058441,((13:0.111079,((21:0.025021,(((24:0.004060,25:0.022709):0.017005,23:0.008215):0.037617,22:0.031831):0.016530):0.050384,8:0.111653):0.038589):0.006311,((((7:0.092250,((4:0.145164,(9:0.108465,(18:0.042772,17:0.014139):0.065881):0.025491):0.002420,(10:0.094127,(16:0.022098,15:0.030962):0.043521):0.028752):0.006066):0.019880,(14:0.082999,19:0.099163):0.028723):0.004449,(29:0.140660,(28:0.058340,(26:0.023825,27:0.035406):0.047785):0 [...]
+   tree rep.5860000 = ((6:0.081540,(3:0.091903,((12:0.068285,11:0.016762):0.039810,((((((7:0.124102,(15:0.043622,16:0.019540):0.056341):0.001238,(((18:0.022912,17:0.019635):0.044815,9:0.106827):0.020007,4:0.114534):0.014662):0.004067,10:0.111894):0.029402,(((27:0.027935,26:0.025339):0.079633,28:0.045276):0.020946,29:0.088258):0.018745):0.001177,(14:0.103271,19:0.063983):0.034281):0.016076,((13:0.107250,(8:0.094246,(21:0.041882,(((23:0.007334,24:0.016524):0.002099,25:0.006429):0.042599,22 [...]
+   tree rep.5880000 = ((3:0.127594,6:0.066756):0.012896,(((12:0.075865,11:0.022523):0.051142,((((((27:0.022651,26:0.026459):0.069839,28:0.042908):0.024587,29:0.115336):0.020253,(14:0.075798,19:0.093491):0.023111):0.006899,((4:0.082803,(9:0.106816,(18:0.035485,17:0.016283):0.025519):0.032269):0.009892,((15:0.024665,16:0.022457):0.062984,(10:0.093811,7:0.083764):0.007522):0.019072):0.022301):0.004189,(((21:0.036707,((25:0.008261,(23:0.013037,24:0.012402):0.002376):0.029027,22:0.031327):0.0 [...]
+   tree rep.5900000 = ((((12:0.060946,11:0.029162):0.053873,(((13:0.080810,(20:0.066631,5:0.096081):0.058900):0.009274,((21:0.045524,(22:0.024252,(25:0.009013,(24:0.009315,23:0.015263):0.013584):0.028962):0.002299):0.019722,8:0.109057):0.014316):0.001340,(((10:0.091682,7:0.117893):0.004862,((((18:0.035733,17:0.010323):0.054912,9:0.115074):0.017087,4:0.141217):0.010704,(15:0.023768,16:0.031871):0.044727):0.000510):0.030521,((14:0.079901,19:0.096512):0.023708,(((27:0.050458,26:0.020483):0. [...]
+   tree rep.5920000 = ((3:0.119977,(2:0.204832,((12:0.069728,11:0.030899):0.052468,(((8:0.100409,((((23:0.007143,24:0.010880):0.005947,25:0.010803):0.022513,22:0.027530):0.005288,21:0.064188):0.030690):0.030749,13:0.093813):0.000566,(((14:0.107600,19:0.077314):0.025148,(7:0.093744,((10:0.097236,((9:0.103497,(18:0.037798,17:0.026957):0.033439):0.022567,4:0.106514):0.011663):0.004290,(16:0.016109,15:0.040979):0.055687):0.011828):0.012108):0.000490,((((26:0.030567,27:0.036474):0.083803,28:0 [...]
+   tree rep.5940000 = ((6:0.110080,(((12:0.047473,11:0.026954):0.063859,((((((15:0.043214,16:0.015977):0.063609,7:0.085677):0.010168,(((18:0.037337,17:0.016557):0.049015,9:0.098065):0.018862,4:0.131411):0.029170):0.000581,10:0.117566):0.031513,(((19:0.118261,14:0.075021):0.038277,(((27:0.028250,26:0.025821):0.068958,28:0.055875):0.025173,29:0.069687):0.012684):0.003753,(13:0.090391,(20:0.099397,5:0.099925):0.100549):0.000806):0.005778):0.005033,(8:0.080011,(21:0.033639,(((23:0.014409,24: [...]
+   tree rep.5960000 = ((3:0.140701,6:0.094064):0.001818,(((((((19:0.064896,14:0.127896):0.041451,((10:0.087745,((16:0.022149,15:0.046793):0.045330,((9:0.121753,(18:0.041188,17:0.037601):0.059092):0.038497,4:0.136090):0.014990):0.006621):0.026102,7:0.075739):0.011244):0.013349,((28:0.056545,(26:0.032887,27:0.018949):0.063475):0.029759,29:0.116799):0.008190):0.012704,(11:0.018334,12:0.066218):0.077206):0.001283,((5:0.133672,20:0.099805):0.095108,13:0.119564):0.004920):0.004593,((21:0.04739 [...]
+   tree rep.5980000 = ((3:0.132802,6:0.077388):0.011071,(2:0.152499,((12:0.075951,11:0.029867):0.065991,((((5:0.081193,20:0.077131):0.090938,13:0.079696):0.000400,((21:0.045073,(22:0.021969,(25:0.005194,(24:0.016985,23:0.019282):0.003137):0.023953):0.025797):0.017435,8:0.092787):0.033877):0.004936,(((14:0.080458,19:0.075118):0.030829,(((16:0.033467,15:0.023014):0.038129,(10:0.086602,7:0.091046):0.016562):0.006533,(4:0.091983,(9:0.082416,(18:0.033245,17:0.017421):0.040357):0.021034):0.009 [...]
+   tree rep.6000000 = ((3:0.105634,(2:0.163200,((12:0.067391,11:0.046041):0.051646,(((((10:0.081758,(7:0.105045,(16:0.045297,15:0.038546):0.037581):0.008155):0.010343,(4:0.149651,(9:0.096475,(18:0.038851,17:0.013867):0.039932):0.023239):0.008076):0.022179,(14:0.095539,19:0.105759):0.026653):0.005032,(29:0.087289,((26:0.022122,27:0.042867):0.093411,28:0.036173):0.031901):0.006862):0.005486,((13:0.077633,(5:0.089673,20:0.125966):0.062770):0.007156,((21:0.052367,(((23:0.014961,24:0.011878): [...]
+   tree rep.6020000 = (3:0.118017,(6:0.079542,(2:0.225777,((12:0.077506,11:0.008979):0.070333,(((21:0.045692,(22:0.054577,((23:0.006731,24:0.014067):0.005074,25:0.006427):0.007659):0.025139):0.048485,8:0.060659):0.037583,(13:0.073156,((20:0.077172,5:0.162805):0.102141,(((19:0.076494,14:0.096720):0.016376,(7:0.121614,(((9:0.099012,(18:0.042684,17:0.020486):0.038456):0.013596,4:0.118943):0.010193,(10:0.095054,(15:0.038240,16:0.018801):0.070357):0.003424):0.019299):0.023087):0.002828,((28:0 [...]
+   tree rep.6040000 = (3:0.122333,(((((((22:0.032152,(25:0.003507,(24:0.009137,23:0.008261):0.008554):0.016060):0.012261,21:0.028042):0.023865,8:0.106314):0.040211,((((28:0.041482,(27:0.035700,26:0.024033):0.062846):0.034472,((14:0.093902,19:0.055877):0.047913,(7:0.107894,((4:0.113791,(9:0.091065,(17:0.025042,18:0.028217):0.051815):0.020267):0.016843,((15:0.036550,16:0.033095):0.043392,10:0.122987):0.003823):0.005822):0.016249):0.003653):0.000073,29:0.129182):0.005049,(13:0.106494,(20:0. [...]
+   tree rep.6060000 = ((3:0.085243,6:0.114432):0.002671,(((((13:0.137314,(20:0.089588,5:0.083609):0.070120):0.007388,((29:0.089331,(28:0.050473,(27:0.026293,26:0.031535):0.068668):0.029844):0.013684,((10:0.087534,(((7:0.077778,((18:0.041480,17:0.017881):0.035050,9:0.110505):0.018629):0.010241,(15:0.033783,16:0.014356):0.066894):0.013626,4:0.107927):0.005316):0.006743,(19:0.100081,14:0.073334):0.023386):0.008381):0.009914):0.011928,(8:0.084489,(21:0.048274,(22:0.020591,(25:0.002508,(24:0. [...]
+   tree rep.6080000 = (((((((8:0.112784,(21:0.050788,(((24:0.006361,23:0.007179):0.002577,25:0.018051):0.019209,22:0.033962):0.006155):0.028691):0.029189,((5:0.100671,20:0.083790):0.076122,13:0.085566):0.001715):0.001623,(((((4:0.116551,(9:0.093875,(18:0.029397,17:0.024779):0.038075):0.026058):0.007012,(10:0.105667,(16:0.031672,15:0.044023):0.041507):0.018116):0.011684,7:0.114303):0.017554,(14:0.092553,19:0.082857):0.027119):0.005293,(29:0.077396,((26:0.036068,27:0.035586):0.046171,28:0. [...]
+   tree rep.6100000 = (3:0.111432,(6:0.073014,(((12:0.054655,11:0.039159):0.030531,(((5:0.081899,20:0.053889):0.102124,13:0.075838):0.003761,((8:0.107561,(21:0.055970,(22:0.029367,((24:0.009828,23:0.012481):0.015767,25:0.003735):0.026777):0.019416):0.019323):0.026708,((29:0.139745,((26:0.009906,27:0.048476):0.068637,28:0.045714):0.019888):0.004918,((7:0.099935,((10:0.062867,((9:0.139702,(18:0.032809,17:0.027055):0.033590):0.025720,4:0.121965):0.012443):0.008606,(16:0.029946,15:0.044219): [...]
+   tree rep.6120000 = ((6:0.117439,(((11:0.018973,12:0.043176):0.046837,((((28:0.027759,(26:0.019857,27:0.044346):0.046525):0.015706,29:0.090656):0.017993,(((4:0.183632,(9:0.127171,(18:0.022456,17:0.042054):0.038505):0.058146):0.011785,(((16:0.035037,15:0.025866):0.053709,7:0.092214):0.010044,10:0.104777):0.007688):0.028671,(19:0.090238,14:0.103331):0.024589):0.005124):0.007762,((5:0.166412,20:0.068364):0.126973,((8:0.084690,((22:0.037638,(25:0.002070,(24:0.014249,23:0.012406):0.007608): [...]
+   tree rep.6140000 = ((3:0.142650,6:0.071706):0.019791,(((((8:0.092298,(21:0.033573,((25:0.005061,(24:0.005457,23:0.009819):0.003026):0.035237,22:0.024964):0.018435):0.027213):0.016472,((20:0.092476,5:0.088564):0.065129,13:0.069360):0.009091):0.016731,((29:0.067866,((27:0.027944,26:0.026915):0.048054,28:0.024852):0.028506):0.012807,((19:0.063103,14:0.085224):0.024442,((10:0.103187,((15:0.043843,16:0.036049):0.044860,7:0.082958):0.006098):0.013152,((9:0.111703,(18:0.032562,17:0.012359):0 [...]
+   tree rep.6160000 = ((3:0.121065,(2:0.147359,(((((5:0.084039,20:0.088938):0.092214,13:0.085615):0.008285,((21:0.051824,((23:0.007199,(25:0.012289,24:0.008624):0.000577):0.020194,22:0.029653):0.016889):0.033127,8:0.074320):0.064126):0.007035,(((19:0.075362,14:0.107039):0.031994,((10:0.085842,(7:0.082433,(16:0.023743,15:0.035554):0.044877):0.011698):0.002465,((9:0.123265,(18:0.035222,17:0.013172):0.027642):0.027898,4:0.121762):0.011321):0.019976):0.008176,(((26:0.030525,27:0.037942):0.06 [...]
+   tree rep.6180000 = ((((12:0.063715,11:0.035539):0.026962,((((((((26:0.036924,27:0.024097):0.093926,28:0.036054):0.028063,29:0.095890):0.007099,(((10:0.085976,(16:0.028040,15:0.061549):0.066503):0.003153,(4:0.165937,(9:0.113981,(18:0.039747,17:0.018110):0.070615):0.026501):0.005699):0.014166,7:0.104824):0.025376):0.002957,(14:0.105888,19:0.078733):0.030936):0.006728,(5:0.071654,20:0.120602):0.126310):0.003314,((((25:0.009208,(23:0.010888,24:0.010246):0.012751):0.032649,22:0.036898):0.0 [...]
+   tree rep.6200000 = (((((13:0.140977,(8:0.116597,(21:0.045226,(22:0.065026,(25:0.006710,(24:0.015004,23:0.012259):0.008259):0.023225):0.017113):0.006261):0.053036):0.004307,((29:0.111745,((26:0.028517,27:0.045958):0.070135,28:0.056797):0.031974):0.026146,(((((16:0.024193,15:0.026155):0.065292,7:0.069813):0.007265,10:0.123081):0.004109,(((18:0.054205,17:0.023131):0.043033,9:0.117707):0.045205,4:0.167552):0.012862):0.037827,(19:0.066040,14:0.108516):0.053513):0.011084):0.014326):0.010070 [...]
+   tree rep.6220000 = ((6:0.080948,3:0.142179):0.005693,(2:0.212843,((12:0.068551,11:0.031724):0.049627,((13:0.085764,((5:0.107159,20:0.114108):0.088470,((29:0.087236,((26:0.030199,27:0.014607):0.074034,28:0.085192):0.015109):0.007880,(((4:0.137471,(9:0.113465,(18:0.031147,17:0.014742):0.024010):0.049622):0.018391,((7:0.068201,(16:0.017124,15:0.044469):0.029732):0.009065,10:0.091984):0.006777):0.020618,(14:0.096598,19:0.075492):0.044849):0.002947):0.019317):0.003115):0.003521,((21:0.0354 [...]
+   tree rep.6240000 = (3:0.155083,((2:0.123136,(((((19:0.067600,14:0.074672):0.024000,((5:0.066816,20:0.105022):0.089048,(7:0.091942,((16:0.028742,15:0.040173):0.038943,(10:0.106613,(4:0.131829,((18:0.036019,17:0.036645):0.055157,9:0.085720):0.029383):0.006605):0.008154):0.008092):0.031504):0.001451):0.008050,((28:0.039314,(26:0.019853,27:0.046320):0.082286):0.032644,29:0.123307):0.014591):0.009126,(((21:0.039179,((25:0.005398,(24:0.013805,23:0.006915):0.008807):0.013378,22:0.035649):0.0 [...]
+   tree rep.6260000 = ((6:0.080102,3:0.145672):0.019525,(((12:0.094345,11:0.025688):0.044128,((((((((18:0.033199,17:0.020658):0.046724,9:0.084313):0.023806,4:0.117416):0.018985,(10:0.105996,((15:0.029110,16:0.020206):0.061731,7:0.096426):0.015670):0.000996):0.032513,(19:0.093881,14:0.084802):0.024179):0.013041,((28:0.035363,(27:0.032117,26:0.016032):0.078816):0.025201,29:0.120546):0.022990):0.007192,((20:0.122912,5:0.103369):0.069727,13:0.111012):0.008440):0.009485,(((((24:0.007461,23:0. [...]
+   tree rep.6280000 = (6:0.078968,((((((20:0.084199,5:0.092555):0.055865,((((9:0.105212,(17:0.030692,18:0.051544):0.040806):0.026745,4:0.163187):0.022699,((15:0.051288,16:0.020515):0.051391,(7:0.085576,10:0.085752):0.007057):0.020678):0.040984,((29:0.099417,(28:0.067492,(27:0.035637,26:0.027271):0.091556):0.022767):0.028048,(14:0.105699,19:0.077328):0.031803):0.001640):0.003625):0.007991,(((22:0.030058,((23:0.007882,(24:0.009265,25:0.009063):0.002188):0.028538,21:0.052041):0.008228):0.04 [...]
+   tree rep.6300000 = ((3:0.117978,6:0.119300):0.012691,(2:0.185367,((11:0.025335,12:0.070647):0.039029,((((20:0.119250,5:0.072753):0.098327,(29:0.097885,((27:0.024868,26:0.022792):0.074978,28:0.036562):0.036550):0.009207):0.010543,((14:0.113008,19:0.074817):0.038602,(((7:0.100947,(15:0.037126,16:0.025671):0.033653):0.015129,10:0.116756):0.004282,(4:0.170199,(9:0.142752,(17:0.032305,18:0.025787):0.028880):0.038341):0.012919):0.033874):0.005739):0.013106,(13:0.092843,((21:0.050176,(22:0.0 [...]
+   tree rep.6320000 = (3:0.140902,((2:0.168109,((((8:0.091405,(((25:0.003805,(24:0.018069,23:0.011885):0.012418):0.045858,22:0.047798):0.009223,21:0.043212):0.039054):0.015716,((20:0.115104,5:0.089402):0.072851,13:0.089185):0.012262):0.008495,((29:0.107444,((27:0.041067,26:0.030559):0.069275,28:0.044762):0.025794):0.036456,(((4:0.125586,((18:0.034945,17:0.016682):0.035233,9:0.104530):0.026089):0.019531,((7:0.122934,(15:0.038270,16:0.024693):0.050104):0.008062,10:0.111636):0.016524):0.029 [...]
+   tree rep.6340000 = ((6:0.091717,(2:0.192996,((((((10:0.106344,((15:0.044476,16:0.008241):0.036689,7:0.108862):0.006872):0.007447,((9:0.110323,(18:0.046204,17:0.017804):0.043302):0.045374,4:0.133773):0.016394):0.019046,(14:0.079506,19:0.071956):0.034220):0.013132,(((27:0.041104,26:0.033933):0.065640,28:0.045429):0.014145,29:0.093711):0.012276):0.013436,(((20:0.109128,5:0.111261):0.073341,13:0.097004):0.002206,(((22:0.033556,((23:0.009873,24:0.017288):0.008840,25:0.003221):0.032113):0.0 [...]
+   tree rep.6360000 = (((((((5:0.092928,20:0.111145):0.065777,((8:0.096224,((22:0.035077,(25:0.011788,(23:0.026018,24:0.007023):0.010735):0.026974):0.011593,21:0.040514):0.038785):0.019365,13:0.129571):0.002364):0.012690,((14:0.131032,19:0.073124):0.025763,(((10:0.102523,((((18:0.021253,17:0.021374):0.042062,9:0.112967):0.020119,4:0.104100):0.009664,(16:0.037986,15:0.044877):0.051427):0.000037):0.021167,7:0.110391):0.018794,(((26:0.014782,27:0.038893):0.068851,28:0.043237):0.018463,29:0. [...]
+   tree rep.6380000 = (((((13:0.087236,((((14:0.110145,19:0.082723):0.048526,(((7:0.119682,(15:0.031678,16:0.022497):0.031450):0.012602,((9:0.082491,(18:0.031652,17:0.024248):0.039257):0.027577,4:0.142025):0.022000):0.005921,10:0.111103):0.028980):0.005101,(29:0.078035,(28:0.041336,(27:0.043875,26:0.026680):0.071010):0.021693):0.020833):0.011043,((20:0.062933,5:0.098461):0.100789,(8:0.105708,(21:0.038825,((25:0.004739,(23:0.009171,24:0.008025):0.005802):0.017340,22:0.047665):0.015867):0. [...]
+   tree rep.6400000 = ((6:0.059948,3:0.142010):0.008905,((((13:0.097011,((8:0.090823,((22:0.029952,(23:0.016124,(25:0.009088,24:0.014106):0.002075):0.021528):0.023825,21:0.044800):0.035537):0.009789,(5:0.075136,20:0.108209):0.104726):0.009546):0.004191,((29:0.103257,(28:0.066297,(26:0.038294,27:0.018824):0.091611):0.028119):0.012479,((((4:0.131465,((18:0.038363,17:0.033293):0.027234,9:0.100104):0.012187):0.019613,(7:0.126676,(16:0.045415,15:0.049650):0.041565):0.008696):0.012062,10:0.086 [...]
+   tree rep.6420000 = ((6:0.068381,3:0.118196):0.002384,(((((((7:0.102802,(16:0.025481,15:0.032336):0.061678):0.006131,10:0.086843):0.004170,(4:0.133315,((18:0.039569,17:0.007595):0.079700,9:0.094710):0.010654):0.027046):0.029653,(((28:0.061321,(26:0.031810,27:0.029247):0.069461):0.022429,29:0.087211):0.010196,(14:0.071061,19:0.083311):0.040212):0.012205):0.002128,((8:0.075877,((((23:0.017554,24:0.005734):0.008877,25:0.002347):0.016778,22:0.030440):0.035336,21:0.043717):0.032283):0.05733 [...]
+   tree rep.6440000 = (6:0.100421,((2:0.212232,((((7:0.088612,(16:0.027784,15:0.052235):0.064073):0.021949,(((9:0.149825,(18:0.033627,17:0.027125):0.051032):0.033981,4:0.106744):0.008401,10:0.084561):0.005198):0.028744,(((29:0.096578,((26:0.013176,27:0.027432):0.071092,28:0.032601):0.022111):0.019736,(14:0.103608,19:0.075948):0.025757):0.013741,((((22:0.018091,(((24:0.007777,25:0.011921):0.005910,23:0.009131):0.032158,21:0.069256):0.005085):0.029126,8:0.079883):0.023760,13:0.075577):0.00 [...]
+   tree rep.6460000 = (3:0.078764,((2:0.176023,((12:0.058315,11:0.035644):0.038651,(((14:0.074606,19:0.089620):0.039452,(((20:0.117844,5:0.084149):0.088541,(((27:0.037741,26:0.025673):0.087011,28:0.046152):0.018418,29:0.082741):0.008229):0.005518,(13:0.062683,(8:0.082893,((25:0.015736,(23:0.009685,24:0.008750):0.005574):0.037593,(21:0.054419,22:0.034724):0.000288):0.045116):0.014952):0.014556):0.012081):0.011006,((((15:0.036340,16:0.019282):0.053164,10:0.082948):0.005128,(4:0.109917,((18 [...]
+   tree rep.6480000 = (((2:0.225449,((12:0.056396,11:0.027190):0.028568,(((((27:0.039689,26:0.032194):0.084527,28:0.048610):0.019512,29:0.098073):0.026981,((14:0.110281,19:0.097433):0.021261,((10:0.134340,(7:0.091673,(15:0.054043,16:0.025448):0.039437):0.007035):0.006176,(((18:0.031189,17:0.021584):0.048167,9:0.109345):0.033689,4:0.196494):0.003618):0.056587):0.008199):0.004917,(((20:0.088290,5:0.143978):0.133289,(8:0.111477,(21:0.054930,(22:0.047951,(25:0.004506,(23:0.016733,24:0.008697 [...]
+   tree rep.6500000 = (((2:0.175838,((((((25:0.006213,(24:0.019065,23:0.018846):0.007257):0.025700,22:0.028253):0.016166,21:0.058109):0.035053,8:0.095164):0.017960,(13:0.084455,((((28:0.042508,(27:0.053419,26:0.024412):0.068971):0.025968,(20:0.125729,5:0.143468):0.091414):0.011450,29:0.077155):0.003141,(((4:0.119790,(9:0.146075,(17:0.028320,18:0.046679):0.052539):0.028044):0.024618,(10:0.105639,((15:0.035654,16:0.024027):0.066886,7:0.100614):0.010280):0.001125):0.024116,(14:0.080826,19:0 [...]
+   tree rep.6520000 = ((2:0.172615,(((((21:0.047090,(((24:0.012185,23:0.009020):0.009388,25:0.009601):0.029233,22:0.035599):0.021779):0.042884,8:0.102027):0.013969,((((4:0.170397,(7:0.111220,(((18:0.039596,17:0.035994):0.047620,9:0.106430):0.035738,(16:0.021798,15:0.040969):0.052435):0.016217):0.003617):0.005698,10:0.080718):0.015522,(14:0.099726,19:0.083060):0.041394):0.025931,((28:0.042717,(26:0.017559,27:0.041324):0.067786):0.009523,29:0.091428):0.015871):0.020755):0.001565,(13:0.0765 [...]
+   tree rep.6540000 = (((((5:0.096073,20:0.092424):0.077650,((13:0.106763,((21:0.055381,((25:0.017767,(23:0.003414,24:0.015576):0.000506):0.013011,22:0.023241):0.019534):0.035469,8:0.087707):0.042858):0.013440,(((14:0.076707,19:0.067181):0.045396,(((26:0.024732,27:0.036013):0.076311,28:0.036142):0.022104,29:0.111221):0.024332):0.002351,(10:0.101987,((((18:0.039317,17:0.018609):0.047716,9:0.075785):0.038912,4:0.103556):0.008214,(7:0.118376,(16:0.029602,15:0.035710):0.042649):0.005168):0.0 [...]
+   tree rep.6560000 = (3:0.157088,(6:0.090820,((((((21:0.045984,((25:0.001156,(23:0.012967,24:0.010691):0.005415):0.027649,22:0.034138):0.030268):0.039310,8:0.095398):0.025510,(13:0.069599,(5:0.070806,20:0.103658):0.058543):0.016865):0.002192,((((10:0.105000,(7:0.096218,(16:0.024844,15:0.032276):0.045328):0.009536):0.002544,(4:0.125872,((18:0.031555,17:0.022503):0.036729,9:0.125828):0.020961):0.017605):0.039750,(((26:0.029185,27:0.033402):0.066589,28:0.046752):0.019967,29:0.087710):0.003 [...]
+   tree rep.6580000 = ((3:0.129629,6:0.101126):0.031689,(2:0.152466,((12:0.070096,11:0.033152):0.052665,((13:0.078672,(8:0.080325,(21:0.050265,((25:0.009626,(24:0.013422,23:0.009244):0.001188):0.036344,22:0.022987):0.017170):0.022053):0.024849):0.010821,(((14:0.098628,19:0.086387):0.018882,(((((18:0.043458,17:0.018911):0.046834,9:0.102547):0.027163,4:0.116913):0.017362,(10:0.088022,(7:0.087946,(15:0.043006,16:0.018579):0.032478):0.013111):0.006749):0.026806,(29:0.099742,(28:0.045561,(27: [...]
+   tree rep.6600000 = ((((12:0.091754,11:0.030795):0.055193,((((28:0.040982,(27:0.046892,26:0.016077):0.062449):0.039088,((14:0.089894,19:0.070008):0.028208,29:0.113495):0.005222):0.003281,((((15:0.037575,16:0.021071):0.052233,7:0.077999):0.001517,(4:0.123012,((18:0.024430,17:0.030052):0.050534,9:0.105946):0.025951):0.021651):0.005265,10:0.088856):0.034831):0.019571,(8:0.113918,(13:0.085448,((20:0.105439,5:0.094570):0.061616,(21:0.041243,(((23:0.013234,24:0.008840):0.003873,25:0.014159): [...]
+   tree rep.6620000 = (6:0.077520,(((((((((((9:0.108600,(17:0.012227,18:0.035383):0.059796):0.029814,4:0.117045):0.008258,(7:0.085257,(15:0.038282,16:0.018297):0.037578):0.019575):0.004639,10:0.096855):0.029186,(19:0.091114,14:0.090639):0.025496):0.010327,(28:0.054812,(27:0.048556,26:0.034258):0.079058):0.024701):0.001235,29:0.105670):0.005665,((8:0.079001,(21:0.041426,((25:0.010387,(24:0.012708,23:0.008363):0.008074):0.033642,22:0.032545):0.019760):0.045540):0.029877,((20:0.087096,5:0.0 [...]
+   tree rep.6640000 = (3:0.119625,((((12:0.058969,11:0.020732):0.052806,(((13:0.076091,(5:0.112423,20:0.058465):0.098862):0.001827,(8:0.082971,((22:0.026648,((24:0.007337,23:0.017006):0.005100,25:0.004256):0.030112):0.016775,21:0.052770):0.033890):0.014500):0.005125,((7:0.073980,(((4:0.093038,(9:0.112105,(18:0.037130,17:0.020103):0.037496):0.010160):0.026611,(16:0.025768,15:0.047818):0.038229):0.006724,10:0.079609):0.019581):0.021518,((((26:0.014293,27:0.026560):0.065252,28:0.041531):0.0 [...]
+   tree rep.6660000 = ((6:0.079265,3:0.175721):0.001628,(2:0.132686,((((13:0.082028,((((25:0.010160,(24:0.016008,23:0.016665):0.006222):0.022372,22:0.021598):0.018646,21:0.053316):0.013555,8:0.091205):0.013417):0.021360,(20:0.092298,5:0.092736):0.076486):0.007205,(((19:0.085540,14:0.100905):0.029274,(10:0.098090,(((9:0.147667,(17:0.022871,18:0.041137):0.042657):0.029217,4:0.142675):0.002342,(7:0.087991,(15:0.036285,16:0.037496):0.037007):0.019284):0.014231):0.025911):0.005282,(29:0.06920 [...]
+   tree rep.6680000 = (2:0.159541,(((11:0.035921,12:0.064093):0.062385,((((10:0.076609,((4:0.130460,((18:0.034848,17:0.014049):0.038733,9:0.110209):0.030023):0.019802,(16:0.037296,15:0.042240):0.054097):0.002174):0.010029,7:0.119544):0.034633,((29:0.113047,(28:0.051026,(26:0.024760,27:0.026115):0.065979):0.025821):0.011411,(19:0.090022,14:0.132305):0.051707):0.005945):0.001334,((5:0.106990,20:0.064567):0.118393,(13:0.082491,((21:0.040022,(((24:0.017148,23:0.007907):0.002128,25:0.005178): [...]
+   tree rep.6700000 = ((6:0.078482,(2:0.147192,((11:0.044839,12:0.071814):0.032016,(((((10:0.095360,((16:0.043212,15:0.034717):0.028061,7:0.097474):0.010601):0.008934,(4:0.123290,((18:0.034135,17:0.027508):0.066011,9:0.097146):0.020946):0.004725):0.026292,(19:0.089057,14:0.077122):0.019809):0.003374,(29:0.086682,((26:0.036950,27:0.035722):0.056110,28:0.039171):0.018657):0.026182):0.012001,(13:0.082400,((5:0.121761,20:0.074884):0.095605,(((22:0.045912,(25:0.002480,(24:0.005600,23:0.008576 [...]
+   tree rep.6720000 = ((6:0.093081,((((13:0.099431,(((((25:0.003268,(24:0.014631,23:0.011999):0.021347):0.029751,22:0.039030):0.011649,21:0.055046):0.054746,8:0.113154):0.006433,(5:0.120590,20:0.097981):0.105369):0.004276):0.010453,(((((26:0.012624,27:0.033721):0.072842,28:0.033654):0.012295,29:0.072148):0.020029,(19:0.103544,14:0.104057):0.016422):0.001846,((4:0.151433,((18:0.034527,17:0.040547):0.046602,9:0.090276):0.022554):0.011247,(10:0.094669,((16:0.027570,15:0.035842):0.045182,7:0 [...]
+   tree rep.6740000 = ((3:0.152813,6:0.097409):0.008821,(((((((17:0.024849,18:0.032711):0.035225,9:0.116089):0.030378,4:0.118994):0.024011,(((15:0.051611,16:0.035493):0.036857,7:0.137944):0.022772,10:0.070222):0.009833):0.033677,((((28:0.041294,(27:0.026479,26:0.041718):0.052497):0.030459,((19:0.074888,14:0.090126):0.028812,29:0.111426):0.017961):0.004754,((20:0.108436,5:0.111470):0.075773,13:0.075885):0.006307):0.000101,(8:0.101611,(21:0.040463,(22:0.025414,((23:0.014331,24:0.010337):0. [...]
+   tree rep.6760000 = (2:0.188652,(3:0.119134,(((11:0.028527,12:0.052419):0.063537,((((((19:0.105407,14:0.112007):0.033146,(((16:0.030497,15:0.034396):0.032709,(10:0.104718,7:0.090964):0.002532):0.007154,(4:0.140912,(9:0.116506,(18:0.030203,17:0.011079):0.041068):0.019473):0.013996):0.022611):0.000593,(28:0.062906,(26:0.034710,27:0.037829):0.076403):0.036720):0.002674,29:0.116671):0.006026,(5:0.143716,20:0.101350):0.105223):0.000016,((8:0.089229,((22:0.031852,(25:0.007316,(24:0.011126,23 [...]
+   tree rep.6780000 = ((((11:0.024031,12:0.066822):0.069610,((((4:0.114564,((9:0.118758,(18:0.036543,17:0.018527):0.036543):0.038994,(7:0.066666,((16:0.024774,15:0.044613):0.041225,10:0.099436):0.000452):0.006484):0.004848):0.031730,(19:0.078350,14:0.100626):0.038192):0.027338,(29:0.085263,((26:0.020983,27:0.038652):0.081372,28:0.043143):0.027775):0.014572):0.010296,((8:0.076936,(21:0.050649,(22:0.048357,(23:0.003868,(24:0.014915,25:0.010926):0.002884):0.033424):0.012562):0.036016):0.023 [...]
+   tree rep.6800000 = ((6:0.074154,(2:0.140736,((12:0.050775,11:0.037176):0.050897,((((21:0.040041,(22:0.031916,(25:0.004734,(23:0.014117,24:0.011577):0.003052):0.019922):0.013185):0.045495,8:0.075138):0.041435,((5:0.081463,20:0.117630):0.080126,13:0.059649):0.005508):0.011323,(((((9:0.094898,(18:0.027625,17:0.020201):0.044527):0.022209,4:0.179402):0.008250,((7:0.090804,(16:0.014186,15:0.027905):0.057746):0.009720,10:0.101038):0.002876):0.024400,(28:0.041801,(26:0.024605,27:0.020643):0.0 [...]
+   tree rep.6820000 = (6:0.094988,(3:0.132055,(((12:0.071340,11:0.019522):0.036511,((8:0.072512,(21:0.078461,(22:0.040788,(25:0.001366,(24:0.006486,23:0.026009):0.010970):0.045337):0.014027):0.027860):0.021888,(((((28:0.031519,(27:0.031432,26:0.025742):0.061545):0.016363,29:0.096447):0.010527,(20:0.072235,5:0.113927):0.087061):0.011687,((14:0.080970,19:0.059498):0.038713,((4:0.112373,((18:0.040776,17:0.021190):0.017410,9:0.109436):0.032765):0.019563,((7:0.104100,(15:0.028560,16:0.017616) [...]
+   tree rep.6840000 = (((2:0.129871,((11:0.031090,12:0.072758):0.049526,(((5:0.109925,20:0.084744):0.085644,(13:0.094380,(8:0.104168,((22:0.029348,(25:0.001517,(24:0.012590,23:0.015903):0.007629):0.027332):0.016206,21:0.028421):0.034631):0.013041):0.001157):0.000539,((((4:0.116185,(9:0.107014,(18:0.047408,17:0.013561):0.055104):0.020556):0.003018,(((16:0.026224,15:0.024088):0.033730,10:0.131807):0.006686,7:0.091856):0.001058):0.015111,(19:0.070142,14:0.087753):0.039699):0.018766,(((26:0. [...]
+   tree rep.6860000 = ((3:0.138262,6:0.072358):0.003471,((((((20:0.097938,5:0.103176):0.068166,13:0.095264):0.004181,((21:0.039542,((25:0.004401,(24:0.018852,23:0.008264):0.004561):0.028067,22:0.023477):0.008204):0.041584,8:0.106885):0.007118):0.004198,(((28:0.050224,(27:0.020911,26:0.026444):0.078469):0.013796,29:0.085456):0.013453,(((4:0.123396,((18:0.031175,17:0.020859):0.038092,9:0.099791):0.026475):0.000753,((7:0.086520,(15:0.043430,16:0.019729):0.062047):0.003672,10:0.103183):0.009 [...]
+   tree rep.6880000 = (6:0.080817,(3:0.111788,(((12:0.079540,11:0.041456):0.046542,((((14:0.097310,19:0.080411):0.029727,(10:0.089861,((4:0.114672,((18:0.030276,17:0.015941):0.050772,9:0.090425):0.021483):0.009339,((16:0.021128,15:0.037862):0.055354,7:0.100744):0.011730):0.004205):0.009916):0.006418,(29:0.082627,((26:0.017581,27:0.027741):0.074585,28:0.037077):0.023396):0.019881):0.015335,(13:0.116836,((5:0.084310,20:0.087659):0.114839,((((25:0.012246,(23:0.006444,24:0.009889):0.007286): [...]
+   tree rep.6900000 = (((((((19:0.092100,14:0.093717):0.032630,((28:0.052238,(26:0.018847,27:0.050441):0.072421):0.037076,29:0.099200):0.015070):0.004456,(((7:0.063165,(16:0.019997,15:0.031385):0.051759):0.006194,((9:0.123334,(18:0.043997,17:0.021529):0.063119):0.044832,4:0.101164):0.010492):0.012917,10:0.130305):0.027179):0.012519,((13:0.084550,(5:0.093080,20:0.080538):0.135473):0.010596,(8:0.083918,((22:0.036197,((23:0.011932,24:0.007131):0.005381,25:0.006301):0.031683):0.015668,21:0.0 [...]
+   tree rep.6920000 = ((3:0.133216,6:0.105907):0.004725,(2:0.176491,((((8:0.113351,((22:0.024216,(25:0.007729,(23:0.006190,24:0.017525):0.008255):0.018646):0.009936,21:0.032930):0.040004):0.016533,(13:0.076122,(5:0.119441,20:0.089310):0.091021):0.005257):0.000851,(((28:0.042555,(26:0.032654,27:0.037366):0.054600):0.030457,29:0.076895):0.010959,((19:0.062188,14:0.116437):0.058048,(((9:0.141535,(18:0.036433,17:0.018115):0.034027):0.016232,4:0.110265):0.012021,((7:0.094254,(16:0.031636,15:0 [...]
+   tree rep.6940000 = (6:0.085617,(3:0.128474,((((((19:0.088590,14:0.089330):0.021374,((10:0.085464,(((9:0.076100,(18:0.040705,17:0.022426):0.041686):0.045454,4:0.133112):0.001438,(16:0.027149,15:0.029671):0.052626):0.001369):0.005513,7:0.092214):0.020898):0.008262,(29:0.087503,((26:0.022175,27:0.035989):0.052302,28:0.035841):0.012044):0.016031):0.008024,((13:0.116577,(8:0.102776,(((25:0.009553,(24:0.020079,23:0.014257):0.001586):0.024929,22:0.029456):0.032545,21:0.026736):0.028204):0.02 [...]
+   tree rep.6960000 = (3:0.154577,((2:0.164855,(((((19:0.099149,14:0.098873):0.035623,((10:0.121740,(7:0.114499,(16:0.008402,15:0.051064):0.069002):0.008923):0.001113,(4:0.126839,(9:0.110632,(18:0.051152,17:0.037971):0.066705):0.025924):0.017457):0.024473):0.002768,(29:0.081376,((26:0.017037,27:0.029923):0.078415,28:0.049122):0.019523):0.004163):0.015650,(((5:0.084009,20:0.084513):0.082619,13:0.090196):0.010209,(8:0.102633,(21:0.060927,((25:0.018730,(23:0.012776,24:0.014292):0.006315):0. [...]
+   tree rep.6980000 = ((2:0.167217,((13:0.089876,((((((((18:0.024632,17:0.019066):0.065577,9:0.082113):0.019912,4:0.136664):0.020038,((16:0.025992,15:0.027467):0.046179,10:0.080372):0.008103):0.015381,7:0.100174):0.019222,(14:0.085951,19:0.080255):0.014189):0.009494,((29:0.105763,((26:0.023697,27:0.035144):0.061861,28:0.054219):0.036710):0.004844,(5:0.106631,20:0.098124):0.095660):0.002442):0.017158,((21:0.058506,(22:0.038340,((23:0.008067,24:0.009192):0.005627,25:0.004271):0.033405):0.0 [...]
+   tree rep.7000000 = (6:0.069005,(((((13:0.105406,((5:0.084463,20:0.084412):0.107337,((((24:0.013817,23:0.009410):0.007977,25:0.005513):0.029050,(21:0.056634,22:0.023034):0.003099):0.032194,8:0.130106):0.005565):0.017553):0.008453,(((10:0.107478,((16:0.019874,15:0.048546):0.082028,(((18:0.055249,17:0.013719):0.047138,9:0.114440):0.021760,4:0.126961):0.004266):0.003836):0.002310,7:0.082410):0.025602,((14:0.085248,19:0.070844):0.024094,(((26:0.036531,27:0.036319):0.083653,28:0.032558):0.0 [...]
+   tree rep.7020000 = ((((12:0.066271,11:0.030298):0.040901,((((((27:0.030939,26:0.022652):0.047094,28:0.040882):0.023349,29:0.082331):0.006948,(14:0.096487,19:0.108116):0.038220):0.002174,((13:0.075918,(8:0.094796,((22:0.035134,((24:0.008251,23:0.009157):0.002625,25:0.014924):0.025376):0.023974,21:0.038169):0.047602):0.016675):0.003863,(20:0.084323,5:0.084826):0.092081):0.008905):0.007510,((((15:0.047891,16:0.027486):0.029645,7:0.078007):0.007525,((9:0.123778,(18:0.039059,17:0.012934):0 [...]
+   tree rep.7040000 = (6:0.086444,(3:0.160729,(2:0.208091,((((((((17:0.021172,18:0.035438):0.055923,9:0.118397):0.011385,4:0.141604):0.014669,(10:0.143474,((15:0.036263,16:0.018434):0.042228,7:0.103213):0.008594):0.015559):0.034782,(((27:0.037052,26:0.032058):0.068127,28:0.047167):0.024866,29:0.094771):0.016737):0.000458,(14:0.079088,19:0.102113):0.041919):0.004867,((((22:0.060772,((23:0.006815,24:0.018389):0.003674,25:0.010491):0.021143):0.014957,21:0.032833):0.052740,8:0.097990):0.0137 [...]
+   tree rep.7060000 = ((((((((8:0.109646,(21:0.052056,((25:0.015180,(24:0.012539,23:0.013990):0.000513):0.027224,22:0.053611):0.024915):0.056840):0.030413,(5:0.099224,20:0.105644):0.080470):0.012104,13:0.118318):0.008420,((29:0.089391,(28:0.049029,(26:0.017925,27:0.022538):0.103144):0.019036):0.023467,((19:0.083013,14:0.075611):0.035899,((10:0.092518,((16:0.016673,15:0.036037):0.060243,(4:0.113075,(9:0.114250,(18:0.047000,17:0.023326):0.054073):0.015061):0.012716):0.003608):0.007112,7:0. [...]
+   tree rep.7080000 = ((3:0.129429,6:0.067990):0.012077,(2:0.163622,(((((5:0.099057,20:0.074482):0.086909,13:0.107486):0.004669,(((10:0.121456,((7:0.103222,(16:0.042099,15:0.038269):0.035190):0.015689,(((18:0.037405,17:0.015395):0.031476,9:0.141188):0.060472,4:0.170537):0.017112):0.004643):0.020087,(19:0.074225,14:0.075185):0.037467):0.003168,((28:0.068182,(26:0.029180,27:0.018645):0.058116):0.013930,29:0.081179):0.009259):0.011644):0.003430,(8:0.110259,((22:0.030748,(25:0.010487,(24:0.0 [...]
+   tree rep.7100000 = (3:0.146215,(6:0.095964,(((((20:0.123262,5:0.089086):0.098422,(((21:0.038572,(22:0.009503,(25:0.007085,(24:0.016433,23:0.004809):0.006401):0.030532):0.029215):0.043133,8:0.100415):0.007314,13:0.093968):0.009640):0.009146,((29:0.103370,(28:0.072714,(27:0.051423,26:0.035309):0.054585):0.012020):0.008122,(((((17:0.026583,18:0.042631):0.030327,9:0.076235):0.018262,4:0.138432):0.014314,(10:0.122395,((15:0.032952,16:0.032826):0.059631,7:0.112887):0.004663):0.006744):0.021 [...]
+   tree rep.7120000 = ((3:0.125489,6:0.105593):0.005089,((((8:0.103064,((((24:0.008237,23:0.010347):0.009558,25:0.008930):0.025042,22:0.035641):0.005300,21:0.075238):0.005730):0.020811,((13:0.088073,(5:0.085695,20:0.105540):0.084152):0.022614,(((((26:0.038907,27:0.032167):0.089487,28:0.060449):0.013032,29:0.100132):0.017102,(14:0.074381,19:0.069281):0.028872):0.001772,(((4:0.124758,((18:0.038846,17:0.015681):0.045367,9:0.096840):0.018681):0.015699,(7:0.113152,(16:0.025319,15:0.027884):0. [...]
+   tree rep.7140000 = ((6:0.078318,3:0.135912):0.013593,(2:0.145158,((12:0.083724,11:0.012632):0.073372,(((29:0.088254,(28:0.045506,(27:0.038485,26:0.014970):0.072699):0.017531):0.008165,((((4:0.147583,(9:0.092058,(18:0.037209,17:0.024539):0.037663):0.033204):0.013311,((15:0.026749,16:0.033396):0.059787,7:0.060652):0.009075):0.002338,10:0.092812):0.014566,(14:0.058314,19:0.088776):0.053385):0.002436):0.002934,((13:0.077504,(8:0.068029,((22:0.033734,(23:0.012763,(24:0.010500,25:0.006969): [...]
+   tree rep.7160000 = (((2:0.215709,((((((14:0.091612,19:0.080306):0.040651,29:0.110576):0.002108,((10:0.076872,((4:0.120276,((18:0.032686,17:0.026981):0.046077,9:0.118644):0.017069):0.025193,(16:0.023286,15:0.048963):0.045793):0.010222):0.009377,7:0.119233):0.039923):0.006437,(28:0.036489,(26:0.032101,27:0.028264):0.059465):0.022836):0.010129,(((21:0.038295,(22:0.031896,((23:0.011741,24:0.003619):0.001200,25:0.007993):0.027044):0.038065):0.033337,8:0.101643):0.016500,((5:0.114551,20:0.0 [...]
+   tree rep.7180000 = ((3:0.115765,6:0.069645):0.014537,((((((8:0.089788,(21:0.040090,(22:0.025384,((23:0.009393,24:0.011695):0.011625,25:0.001786):0.019499):0.009854):0.024381):0.021570,((29:0.102049,(28:0.048439,(26:0.028496,27:0.022327):0.070586):0.017211):0.010576,((7:0.099786,((((16:0.021786,15:0.038171):0.055745,4:0.120217):0.003411,((18:0.038998,17:0.017097):0.036007,9:0.102554):0.031895):0.000252,10:0.077836):0.009770):0.012737,(14:0.093599,19:0.061622):0.025793):0.013911):0.0202 [...]
+   tree rep.7200000 = (3:0.157197,((((((13:0.095291,(8:0.085243,(21:0.042559,(((25:0.009117,24:0.014405):0.005548,23:0.002554):0.031246,22:0.029520):0.015588):0.031322):0.025534):0.002898,(5:0.074873,20:0.093991):0.119031):0.012556,(((28:0.033880,(26:0.024173,27:0.025746):0.084709):0.035216,29:0.089455):0.032392,((7:0.114263,(10:0.092064,((4:0.101755,((18:0.031459,17:0.017857):0.047692,9:0.113071):0.026686):0.021157,(16:0.022585,15:0.035314):0.073935):0.004112):0.008331):0.010781,(14:0.0 [...]
+   tree rep.7220000 = ((3:0.108181,6:0.081827):0.014432,(((((((14:0.083079,19:0.081875):0.026328,29:0.101990):0.006181,((7:0.081602,((10:0.120641,(15:0.038985,16:0.037928):0.045435):0.000782,((9:0.106290,(18:0.038658,17:0.031578):0.055653):0.021404,4:0.132525):0.004587):0.011204):0.023104,(28:0.044046,(27:0.033663,26:0.025382):0.071945):0.020396):0.008562):0.014470,(13:0.079103,(20:0.114034,5:0.073191):0.083411):0.006967):0.003625,((21:0.064699,(22:0.030859,((24:0.021799,23:0.011349):0.0 [...]
+   tree rep.7240000 = (((6:0.123492,(((13:0.106733,((8:0.110328,(21:0.022534,(22:0.035780,((24:0.008477,23:0.010805):0.013407,25:0.006291):0.062473):0.024298):0.045447):0.034700,(5:0.115306,20:0.094183):0.117544):0.001355):0.003153,(((29:0.135150,(28:0.046630,(26:0.047024,27:0.028063):0.045120):0.027058):0.014541,(7:0.092835,(((9:0.103102,(18:0.032808,17:0.028714):0.054301):0.017891,4:0.129827):0.010109,(10:0.107601,(16:0.026100,15:0.033081):0.055046):0.008009):0.016429):0.009651):0.0020 [...]
+   tree rep.7260000 = ((3:0.200777,((((((20:0.108771,5:0.143270):0.101071,(13:0.101203,(8:0.113021,((22:0.057603,((23:0.011476,24:0.007870):0.009564,25:0.001283):0.014633):0.010460,21:0.050772):0.044217):0.035574):0.022703):0.004246,((29:0.075614,(14:0.085548,19:0.103611):0.040164):0.004547,((27:0.030320,26:0.011651):0.049414,28:0.052904):0.033513):0.015590):0.008866,(7:0.096926,((15:0.049607,16:0.039896):0.056045,(10:0.100447,(4:0.153053,(9:0.112082,(18:0.021619,17:0.026543):0.067960):0 [...]
+   tree rep.7280000 = (6:0.125155,(3:0.124879,((((((14:0.087074,19:0.060849):0.035196,(29:0.091310,(28:0.033561,(26:0.023633,27:0.040553):0.062786):0.032088):0.017788):0.021659,(((5:0.112008,20:0.042520):0.089280,13:0.081140):0.012292,(8:0.112761,((22:0.035533,((23:0.010128,24:0.014448):0.007267,25:0.016990):0.026146):0.015303,21:0.046492):0.030375):0.022628):0.007525):0.004151,((4:0.119636,(9:0.102023,(18:0.039354,17:0.030039):0.051806):0.008542):0.005925,((7:0.106057,(16:0.019203,15:0. [...]
+   tree rep.7300000 = ((3:0.076943,6:0.106017):0.015658,(2:0.173100,((11:0.022865,12:0.069409):0.050153,((8:0.072951,(21:0.046301,(22:0.031799,(23:0.018993,(24:0.012650,25:0.010230):0.001077):0.036474):0.026667):0.024953):0.013436,((13:0.067515,((29:0.082645,((27:0.030512,26:0.042426):0.068568,28:0.045608):0.027532):0.007531,(((10:0.117222,((15:0.036691,16:0.019944):0.053517,(4:0.104677,((17:0.020167,18:0.029029):0.046329,9:0.104809):0.025328):0.008993):0.002228):0.009126,7:0.104034):0.0 [...]
+   tree rep.7320000 = (6:0.092246,(3:0.114460,(((((13:0.077114,((((24:0.007655,23:0.009138):0.006606,25:0.006426):0.019724,(21:0.053739,22:0.038595):0.004723):0.065494,8:0.069026):0.007974):0.005202,(((4:0.108312,((18:0.041154,17:0.015651):0.056032,9:0.105953):0.018146):0.013981,((7:0.090431,(16:0.028237,15:0.031897):0.063010):0.010405,10:0.085621):0.005905):0.031460,((29:0.097473,(14:0.059688,19:0.087652):0.030200):0.007516,((26:0.021593,27:0.042496):0.069172,28:0.042271):0.019582):0.00 [...]
+   tree rep.7340000 = (6:0.075557,((2:0.129833,(((8:0.088939,(21:0.054958,(((23:0.015871,24:0.016762):0.006340,25:0.005937):0.022549,22:0.019850):0.021023):0.018053):0.023024,((((((10:0.096249,(16:0.023331,15:0.041285):0.042187):0.010552,(4:0.116254,(9:0.124907,(18:0.038110,17:0.018885):0.045244):0.034402):0.004858):0.010629,7:0.088692):0.018052,(29:0.075119,((26:0.013453,27:0.034470):0.064765,28:0.049804):0.015635):0.013800):0.001924,(14:0.080237,19:0.075978):0.026398):0.005484,(13:0.08 [...]
+   tree rep.7360000 = (2:0.165416,(((((((21:0.034312,(((24:0.011594,23:0.005622):0.001218,25:0.005884):0.019227,22:0.025821):0.011321):0.059273,8:0.088786):0.021124,(5:0.136096,20:0.082889):0.075537):0.000190,13:0.090280):0.005039,(((14:0.109743,19:0.097454):0.014778,((28:0.033833,(26:0.034447,27:0.028839):0.091260):0.013882,29:0.100939):0.018188):0.002596,((4:0.108345,(9:0.067022,(18:0.014653,17:0.021073):0.049757):0.035152):0.011229,((7:0.108191,(16:0.017224,15:0.043409):0.026247):0.00 [...]
+   tree rep.7380000 = (((((((((10:0.093507,((16:0.031874,15:0.043863):0.043573,7:0.100921):0.018071):0.003554,((9:0.115782,(18:0.027842,17:0.024726):0.065254):0.035798,4:0.146618):0.010749):0.022436,(((28:0.052134,(26:0.026432,27:0.049596):0.061911):0.016377,29:0.079239):0.013608,(14:0.113347,19:0.077149):0.085393):0.012251):0.005803,(((22:0.028263,(25:0.008897,(24:0.009159,23:0.012693):0.004803):0.035554):0.018271,21:0.043722):0.027291,8:0.087395):0.048780):0.001854,((5:0.092378,20:0.07 [...]
+   tree rep.7400000 = (2:0.196508,((3:0.122936,6:0.072580):0.026240,((12:0.063512,11:0.034245):0.046059,(((29:0.076048,((27:0.024680,26:0.050493):0.058892,28:0.030297):0.022849):0.013066,((19:0.079041,14:0.063907):0.035210,((((15:0.021659,16:0.030057):0.041009,(((18:0.042107,17:0.015863):0.051125,9:0.104978):0.017360,4:0.128328):0.006431):0.001739,10:0.104038):0.017140,7:0.133861):0.011073):0.008997):0.010987,(((8:0.092878,(((25:0.004378,(24:0.009827,23:0.012724):0.005547):0.016218,22:0. [...]
+   tree rep.7420000 = (3:0.108003,(6:0.082262,(((11:0.042289,12:0.047355):0.027880,(((14:0.086421,19:0.092593):0.040204,(((28:0.046648,(26:0.031375,27:0.024571):0.041291):0.027098,29:0.104825):0.034685,((((22:0.041515,(25:0.005484,(24:0.009213,23:0.006743):0.002811):0.038411):0.014891,21:0.066367):0.033592,8:0.088219):0.034588,((5:0.079443,20:0.124502):0.077411,13:0.117141):0.007698):0.014793):0.014398):0.006677,(10:0.102744,(((9:0.096303,(18:0.027222,17:0.012900):0.054307):0.030753,4:0. [...]
+   tree rep.7440000 = ((((12:0.074510,11:0.030482):0.043104,((((20:0.083695,5:0.093055):0.113176,13:0.093258):0.005457,(8:0.060422,((22:0.027448,((23:0.012550,24:0.012347):0.006148,25:0.011974):0.017135):0.030496,21:0.045759):0.038920):0.032984):0.009420,(((19:0.108770,14:0.146536):0.007112,(((7:0.091012,(15:0.039383,16:0.025115):0.058717):0.006659,10:0.127735):0.003068,(4:0.122221,((18:0.029500,17:0.016295):0.062304,9:0.100503):0.026873):0.011577):0.011889):0.008600,((28:0.044758,(27:0. [...]
+   tree rep.7460000 = ((3:0.096530,(2:0.160531,((12:0.061485,11:0.021134):0.071984,((((28:0.070264,(26:0.021010,27:0.068511):0.070534):0.021373,29:0.085710):0.015159,((14:0.109085,19:0.091622):0.030650,((((4:0.117201,((18:0.047508,17:0.015159):0.057069,9:0.117381):0.026241):0.017674,(16:0.031214,15:0.032962):0.056812):0.002900,10:0.099251):0.020297,7:0.091227):0.015683):0.002918):0.018344,(13:0.097503,((((22:0.044126,((24:0.011182,23:0.007227):0.003796,25:0.016405):0.022969):0.019326,21: [...]
+   tree rep.7480000 = ((3:0.136928,(2:0.180901,((11:0.034392,12:0.081270):0.040400,((((8:0.117995,(22:0.024378,(((24:0.009346,23:0.003374):0.002575,25:0.011324):0.038674,21:0.060449):0.003788):0.078833):0.011714,13:0.083930):0.000731,(5:0.113932,20:0.105883):0.096414):0.004419,(((14:0.147092,19:0.102766):0.023765,(7:0.142256,((((18:0.025576,17:0.036716):0.059272,9:0.099133):0.041172,(4:0.134733,10:0.101796):0.009599):0.000714,(16:0.033582,15:0.036506):0.062651):0.002418):0.033071):0.0100 [...]
+   tree rep.7500000 = ((6:0.093052,(2:0.178340,((((20:0.090616,5:0.102879):0.084609,((8:0.077040,(((25:0.008226,(24:0.013501,23:0.008551):0.006516):0.018661,22:0.037451):0.021235,21:0.066102):0.015832):0.027106,13:0.112067):0.000718):0.001561,(((((15:0.036958,16:0.021560):0.070562,(7:0.119618,10:0.088249):0.002532):0.004440,(4:0.128465,((17:0.027620,18:0.033484):0.042660,9:0.123609):0.010083):0.024501):0.022662,(19:0.097270,14:0.084974):0.031574):0.002830,(((27:0.032604,26:0.035548):0.06 [...]
+   tree rep.7520000 = (((2:0.130619,(((8:0.098863,((22:0.024202,(25:0.006827,(24:0.007155,23:0.007829):0.008161):0.054499):0.007687,21:0.047031):0.059088):0.018597,(((14:0.094577,19:0.065449):0.041397,((((27:0.039798,26:0.030400):0.084754,28:0.030392):0.031842,29:0.075192):0.012704,((10:0.078093,((4:0.117056,((17:0.019972,18:0.033880):0.043998,9:0.095556):0.007912):0.007742,(15:0.051963,16:0.020742):0.045753):0.000420):0.014103,7:0.117940):0.011230):0.002610):0.005383,((20:0.079562,5:0.0 [...]
+   tree rep.7540000 = (((((((((((9:0.094358,(18:0.049169,17:0.018757):0.036652):0.017393,4:0.110838):0.025647,(16:0.019867,15:0.047884):0.048583):0.005814,10:0.079805):0.009941,7:0.108550):0.011648,((28:0.045484,(26:0.032923,27:0.036031):0.050188):0.017912,29:0.093329):0.004633):0.001352,(14:0.087606,19:0.087038):0.031327):0.009370,((((22:0.053966,(25:0.004572,(23:0.009821,24:0.010523):0.003632):0.005639):0.014402,21:0.032831):0.027147,8:0.107264):0.024265,(13:0.102401,(5:0.087451,20:0.1 [...]
+   tree rep.7560000 = (((((13:0.112933,(5:0.108470,20:0.081569):0.054795):0.002622,(((29:0.096922,(28:0.037401,(26:0.023073,27:0.032964):0.072534):0.026131):0.011963,((14:0.126127,19:0.100225):0.022967,(((9:0.102877,(18:0.031061,17:0.014374):0.044957):0.018544,4:0.159446):0.011063,(10:0.097080,((16:0.041273,15:0.036529):0.062449,7:0.121610):0.002872):0.004028):0.021576):0.003769):0.012391,(8:0.069738,(((25:0.007266,(23:0.009170,24:0.009377):0.000601):0.010885,22:0.025121):0.004922,21:0.0 [...]
+   tree rep.7580000 = (((2:0.193933,((12:0.069383,11:0.025968):0.054553,(((14:0.101167,19:0.058362):0.076737,((10:0.119589,((16:0.016311,15:0.027255):0.045208,7:0.157786):0.007273):0.004947,(((18:0.050464,17:0.011828):0.054082,9:0.116219):0.024353,4:0.125001):0.017708):0.026995):0.000347,((((26:0.019600,27:0.025920):0.049524,28:0.046077):0.014035,29:0.084598):0.025110,((13:0.084902,(5:0.100138,20:0.093141):0.065875):0.011358,(8:0.084657,((22:0.025012,(25:0.007289,(24:0.009993,23:0.016953 [...]
+   tree rep.7600000 = (((((((4:0.110569,(9:0.088003,(18:0.050115,17:0.018350):0.060527):0.035620):0.010955,(((15:0.039216,16:0.031599):0.049095,7:0.111589):0.014036,10:0.086809):0.009763):0.031614,((29:0.093874,((27:0.034503,26:0.026340):0.059729,28:0.041309):0.011705):0.010521,(19:0.081772,14:0.115839):0.022676):0.000783):0.004644,(((20:0.096485,5:0.085796):0.105052,13:0.089299):0.005882,(8:0.099540,(((25:0.001673,(24:0.009273,23:0.008611):0.008511):0.021005,22:0.054327):0.042305,21:0.0 [...]
+   tree rep.7620000 = (2:0.184287,((((((20:0.108416,5:0.096080):0.087297,13:0.076858):0.015943,((((22:0.057099,(25:0.003659,(23:0.011268,24:0.016450):0.008278):0.016423):0.010911,21:0.053476):0.046573,8:0.107675):0.021340,(((((15:0.042035,16:0.017014):0.050245,7:0.083010):0.008075,10:0.092130):0.007871,(((17:0.015889,18:0.035119):0.050132,9:0.086909):0.035742,4:0.132167):0.007675):0.029552,((29:0.096147,((27:0.034448,26:0.025715):0.086412,28:0.053221):0.014943):0.013707,(14:0.077816,19:0 [...]
+   tree rep.7640000 = (6:0.099572,((((13:0.113808,(((21:0.069623,(((24:0.006507,23:0.010269):0.005459,25:0.009590):0.012215,22:0.030326):0.023224):0.033959,8:0.106283):0.028597,((20:0.096866,5:0.103000):0.123663,((10:0.091023,(((9:0.129291,(18:0.039660,17:0.012450):0.035739):0.045377,(7:0.125176,(15:0.028173,16:0.034819):0.033980):0.001655):0.001844,4:0.191238):0.004874):0.033118,((19:0.120786,14:0.106700):0.038533,(((27:0.033489,26:0.019009):0.086643,28:0.032973):0.051789,29:0.103991):0 [...]
+   tree rep.7660000 = (3:0.129789,(6:0.088063,(2:0.177466,(((((((24:0.010332,23:0.009259):0.006274,25:0.010696):0.035581,22:0.028788):0.017056,21:0.027962):0.033046,8:0.071189):0.041389,((((19:0.103446,14:0.110954):0.044929,(((27:0.028843,26:0.023877):0.056803,28:0.045276):0.017877,29:0.097009):0.016315):0.010371,((20:0.070007,5:0.116476):0.081135,(((10:0.089684,((9:0.120909,(18:0.038225,17:0.031749):0.038621):0.022791,4:0.169465):0.007717):0.005179,(15:0.037911,16:0.032707):0.050126):0. [...]
+   tree rep.7680000 = ((3:0.110647,6:0.070060):0.027170,(2:0.185339,((11:0.034567,12:0.057455):0.050890,(((13:0.086759,(5:0.102035,20:0.093190):0.088723):0.002659,(((22:0.031278,((24:0.014549,23:0.010561):0.013322,25:0.010497):0.022941):0.002598,21:0.040049):0.044764,8:0.082815):0.025890):0.002855,(((29:0.108768,((26:0.026025,27:0.024069):0.079514,28:0.053229):0.023334):0.009239,(14:0.094949,19:0.100017):0.038384):0.010890,(((7:0.070961,(16:0.037968,15:0.026699):0.097769):0.012346,((9:0. [...]
+   tree rep.7700000 = (3:0.137020,((2:0.258955,(((13:0.114879,(((19:0.082557,14:0.091350):0.041952,(((15:0.025991,16:0.032703):0.058308,(10:0.129090,7:0.073255):0.039264):0.008422,(((18:0.031364,17:0.034495):0.030878,9:0.095023):0.033472,4:0.157540):0.007774):0.018891):0.015843,(((28:0.064790,(27:0.033327,26:0.018787):0.068254):0.017710,29:0.115466):0.002722,(20:0.092956,5:0.142897):0.146541):0.005975):0.010916):0.002307,(8:0.115883,(21:0.055965,(22:0.031880,(25:0.007557,(24:0.010793,23: [...]
+   tree rep.7720000 = ((2:0.160860,((11:0.039743,12:0.070114):0.022005,(((10:0.114246,(7:0.083608,(15:0.034644,16:0.019070):0.047472):0.001338):0.009145,(((17:0.024806,18:0.032049):0.032060,9:0.110057):0.037157,4:0.118223):0.006134):0.033124,(((13:0.091677,(20:0.072064,5:0.116785):0.078578):0.025982,(8:0.089923,(21:0.059245,((25:0.007055,(24:0.004863,23:0.011676):0.012744):0.023984,22:0.031453):0.022462):0.009335):0.033153):0.002230,((14:0.094032,19:0.074360):0.041486,((28:0.047758,(27:0 [...]
+   tree rep.7740000 = ((((11:0.032551,12:0.088933):0.042801,(((((4:0.119751,(9:0.094335,(18:0.038546,17:0.018888):0.052189):0.010332):0.003460,((16:0.042988,15:0.041803):0.037185,7:0.113244):0.015963):0.009760,10:0.105336):0.025784,((14:0.108176,19:0.078363):0.014186,(((26:0.029502,27:0.045834):0.053986,28:0.059651):0.032439,29:0.101702):0.023347):0.011298):0.003942,((13:0.089663,(5:0.110536,20:0.074861):0.083005):0.003603,(8:0.141765,((22:0.016791,((23:0.008696,24:0.004095):0.002638,25: [...]
+   tree rep.7760000 = (3:0.152322,((((11:0.045494,12:0.073355):0.042025,((8:0.106564,(21:0.061101,((25:0.011451,(24:0.004894,23:0.012885):0.002541):0.027857,22:0.034510):0.029985):0.028067):0.016685,((((((9:0.081088,(18:0.033356,17:0.026144):0.055117):0.008090,4:0.107686):0.011127,(10:0.087426,((16:0.022538,15:0.030869):0.067620,7:0.076690):0.011447):0.007725):0.020716,(14:0.084260,19:0.068596):0.022353):0.008147,(((26:0.026000,27:0.020734):0.064574,28:0.039364):0.015201,29:0.105256):0.0 [...]
+   tree rep.7780000 = ((3:0.152315,6:0.109984):0.016950,(((11:0.023100,12:0.069138):0.055913,(((10:0.111938,(7:0.120648,(16:0.018802,15:0.049874):0.040720):0.012182):0.009864,((9:0.089927,(18:0.032117,17:0.012822):0.057150):0.025507,4:0.099190):0.012113):0.028240,(((8:0.119224,(((25:0.006512,(24:0.005601,23:0.009476):0.006691):0.023258,22:0.060758):0.005367,21:0.063248):0.019543):0.027409,13:0.099908):0.007175,(((5:0.106342,20:0.121469):0.155180,(14:0.079120,19:0.093590):0.028625):0.0113 [...]
+   tree rep.7800000 = ((2:0.177488,((12:0.075958,11:0.029298):0.039882,((13:0.078926,(8:0.080467,((22:0.037266,(25:0.007693,(23:0.010628,24:0.012135):0.011760):0.034915):0.014719,21:0.057218):0.041315):0.026262):0.016504,(((7:0.074332,(10:0.132889,((((18:0.031179,17:0.014335):0.051561,9:0.099366):0.009066,4:0.121852):0.020240,(15:0.036035,16:0.037478):0.097892):0.003815):0.020053):0.019512,((28:0.051085,(27:0.049411,26:0.021423):0.080321):0.041143,29:0.108398):0.020653):0.002373,((19:0.0 [...]
+   tree rep.7820000 = ((((12:0.068643,11:0.036348):0.040204,(((((((15:0.050765,16:0.016671):0.065740,7:0.121337):0.004969,(4:0.155288,((18:0.039245,17:0.020395):0.045032,9:0.097578):0.016386):0.021345):0.023940,10:0.094428):0.018168,(19:0.054450,14:0.111920):0.041466):0.007038,((28:0.037372,(27:0.034273,26:0.024514):0.108920):0.017381,29:0.105518):0.011740):0.012129,(13:0.079012,((8:0.103232,(((25:0.005626,(24:0.005829,23:0.008522):0.011716):0.017488,22:0.036964):0.023784,21:0.038044):0. [...]
+   tree rep.7840000 = (3:0.077515,(6:0.092606,(2:0.183131,((11:0.034318,12:0.060955):0.041279,((8:0.114427,(21:0.044215,(22:0.036593,(23:0.011450,(25:0.014719,24:0.010044):0.002859):0.016549):0.036281):0.026705):0.011729,((13:0.120132,(20:0.063344,5:0.084762):0.044177):0.008012,((((((15:0.033987,16:0.027206):0.054091,7:0.086955):0.008456,10:0.094880):0.009524,(4:0.109566,(9:0.103009,(17:0.031213,18:0.028475):0.043270):0.017062):0.026926):0.018778,(29:0.108265,(28:0.042648,(27:0.038541,26 [...]
+   tree rep.7860000 = (6:0.081877,((((12:0.050336,11:0.041470):0.046236,(((((22:0.019213,((23:0.010329,24:0.004069):0.002811,25:0.008412):0.037978):0.018846,21:0.033440):0.018786,8:0.106307):0.037311,(((19:0.066151,14:0.122796):0.028978,((10:0.080921,((4:0.116298,((18:0.048991,17:0.009863):0.066841,9:0.067018):0.013235):0.011837,(15:0.030756,16:0.030662):0.041615):0.006784):0.011067,7:0.080069):0.027089):0.002333,(29:0.069058,(28:0.038421,(27:0.035061,26:0.035675):0.081576):0.020630):0.0 [...]
+   tree rep.7880000 = ((3:0.107817,6:0.087187):0.005187,(((12:0.040294,11:0.024756):0.033239,((7:0.122021,(10:0.093430,((16:0.028112,15:0.038968):0.054688,(4:0.149863,((18:0.040918,17:0.013384):0.037573,9:0.122440):0.015669):0.014310):0.009572):0.009459):0.023926,((13:0.084334,((29:0.106204,((26:0.026900,27:0.050219):0.052602,28:0.067493):0.024496):0.002418,(14:0.110533,19:0.073279):0.046575):0.011874):0.006562,((5:0.117196,20:0.086352):0.083205,((((25:0.004144,(23:0.004223,24:0.007033): [...]
+   tree rep.7900000 = ((3:0.122964,6:0.073225):0.003136,(((12:0.060386,11:0.025529):0.052986,(((5:0.094328,20:0.077461):0.079324,((((22:0.030346,(25:0.009731,(24:0.015722,23:0.011292):0.005819):0.023902):0.014793,21:0.050269):0.035669,8:0.071104):0.029788,13:0.118044):0.003051):0.005972,(((28:0.046492,(26:0.030810,27:0.039231):0.086343):0.026774,29:0.101226):0.007424,((14:0.065119,19:0.076428):0.030835,(((((18:0.031627,17:0.030504):0.016603,9:0.093456):0.030356,4:0.122927):0.005212,((16: [...]
+   tree rep.7920000 = ((6:0.083331,(2:0.144021,((12:0.086845,11:0.010691):0.056051,(((20:0.052080,5:0.104780):0.086094,((((((24:0.007957,23:0.012536):0.001511,25:0.028383):0.027923,22:0.042059):0.015485,21:0.028131):0.046740,8:0.111839):0.018418,13:0.080971):0.006163):0.005804,(((19:0.071172,14:0.079742):0.026795,(((4:0.093946,((18:0.026597,17:0.006796):0.047275,9:0.117413):0.021320):0.016330,(15:0.055509,16:0.024387):0.055047):0.008489,(7:0.103952,10:0.095896):0.007536):0.015642):0.0096 [...]
+   tree rep.7940000 = ((3:0.134391,6:0.089548):0.003934,(((11:0.031925,12:0.069006):0.070333,(((((26:0.034344,27:0.025448):0.074393,28:0.061642):0.039810,29:0.107444):0.006038,((7:0.078382,(10:0.106053,((16:0.024101,15:0.039595):0.046755,(4:0.106790,((18:0.036092,17:0.012776):0.054274,9:0.110860):0.032571):0.017854):0.004350):0.025016):0.028521,(14:0.106835,19:0.067241):0.038568):0.009551):0.006051,((13:0.102989,(5:0.077165,20:0.108262):0.100025):0.009724,((21:0.038688,(22:0.055728,((24: [...]
+   tree rep.7960000 = ((3:0.152204,(2:0.193160,((12:0.060605,11:0.020551):0.067450,((8:0.115970,(21:0.052465,(((24:0.012595,23:0.016358):0.009542,25:0.014466):0.016452,22:0.032272):0.021499):0.029356):0.040108,(((5:0.093044,20:0.141655):0.083391,13:0.094739):0.014345,((14:0.082961,19:0.093599):0.018232,(((28:0.049288,(26:0.026112,27:0.037403):0.085173):0.024246,29:0.078444):0.025579,((10:0.132742,(((16:0.029296,15:0.035695):0.041956,4:0.143973):0.004849,((18:0.041056,17:0.025448):0.03101 [...]
+   tree rep.7980000 = ((2:0.156693,((12:0.074944,11:0.046933):0.047803,(((8:0.078311,(((25:0.005409,(23:0.005123,24:0.010303):0.000904):0.028720,22:0.034741):0.022736,21:0.057629):0.032353):0.017017,(13:0.099619,(20:0.086450,5:0.082920):0.097487):0.000281):0.004139,((19:0.093409,14:0.077587):0.029041,(((4:0.127433,((18:0.054339,17:0.011718):0.047421,9:0.113576):0.027134):0.017534,((10:0.070252,7:0.086712):0.009590,(15:0.054760,16:0.020630):0.054066):0.003960):0.021058,(29:0.075946,(28:0. [...]
+   tree rep.8000000 = ((6:0.096687,(2:0.162273,((12:0.044867,11:0.037978):0.029625,((8:0.075276,((22:0.032179,(25:0.011555,(24:0.012298,23:0.018309):0.012525):0.033481):0.023966,21:0.048398):0.008737):0.018093,((((((10:0.099500,(15:0.038204,16:0.041798):0.043380):0.012214,7:0.116440):0.005283,(((18:0.025897,17:0.022301):0.048830,9:0.136396):0.036092,4:0.133253):0.007984):0.028229,((28:0.069723,(27:0.041520,26:0.016178):0.064428):0.018319,29:0.073665):0.010032):0.002142,(19:0.080988,14:0. [...]
+   tree rep.8020000 = (6:0.091029,(3:0.148413,((((5:0.144142,20:0.093125):0.101679,(((((22:0.036426,(23:0.005081,(24:0.006162,25:0.015905):0.003225):0.024865):0.006746,21:0.048483):0.055874,8:0.061768):0.021041,13:0.122251):0.011869,((19:0.068442,14:0.091367):0.026371,((((26:0.019495,27:0.031636):0.092881,28:0.037277):0.018090,29:0.109727):0.019159,((((16:0.027218,15:0.049317):0.044216,(4:0.122042,((18:0.038229,17:0.015575):0.037257,9:0.094234):0.026467):0.014419):0.004207,10:0.079453):0 [...]
+   tree rep.8040000 = ((3:0.115775,6:0.060377):0.007568,(2:0.156783,(((((((26:0.039044,27:0.030991):0.070084,28:0.040728):0.020076,29:0.075775):0.018044,((14:0.108808,19:0.086884):0.030615,((4:0.141995,((18:0.042018,17:0.025343):0.046556,9:0.094623):0.023429):0.006788,(((16:0.047564,15:0.038688):0.049233,7:0.085903):0.002246,10:0.081612):0.006741):0.020057):0.012012):0.025738,(((((24:0.013890,23:0.009753):0.004481,25:0.002926):0.023435,22:0.032336):0.022475,21:0.062092):0.028530,8:0.0884 [...]
+   tree rep.8060000 = ((2:0.139005,((((5:0.118000,20:0.078827):0.083871,13:0.069540):0.009527,((8:0.104773,((22:0.021794,((23:0.015363,24:0.008888):0.003457,25:0.014396):0.037329):0.010509,21:0.047231):0.019713):0.019895,(((((((16:0.023736,15:0.036152):0.039975,(((18:0.030202,17:0.020497):0.041981,9:0.091230):0.022757,4:0.094590):0.022169):0.011013,10:0.102448):0.003708,7:0.120597):0.023398,(14:0.089326,19:0.069401):0.039420):0.013029,(28:0.046131,(26:0.020378,27:0.044997):0.071832):0.03 [...]
+   tree rep.8080000 = ((3:0.094434,6:0.099612):0.010094,((((((14:0.078360,19:0.064756):0.029683,(10:0.120855,((7:0.073991,(16:0.021625,15:0.032906):0.050280):0.020421,(((18:0.026085,17:0.026372):0.033195,9:0.107784):0.024081,4:0.126234):0.018999):0.002734):0.017174):0.010581,(29:0.097605,(28:0.056629,(26:0.025151,27:0.028729):0.070133):0.013958):0.015010):0.013775,((13:0.076698,(5:0.079679,20:0.114776):0.075327):0.022364,(8:0.098441,((22:0.021234,(25:0.013870,(23:0.013945,24:0.006065):0. [...]
+   tree rep.8100000 = ((6:0.108202,3:0.148896):0.013963,((((5:0.113246,20:0.118214):0.089256,(((((18:0.026634,17:0.022727):0.045575,9:0.135071):0.028197,4:0.181000):0.007372,(10:0.115251,(7:0.115306,(16:0.025542,15:0.061501):0.059993):0.012114):0.006673):0.026382,(((28:0.051740,(26:0.039108,27:0.030581):0.065852):0.015811,29:0.135740):0.022865,(((8:0.107999,(21:0.042141,((25:0.014545,(23:0.013877,24:0.024026):0.007876):0.015030,22:0.043016):0.013693):0.031999):0.032687,13:0.077942):0.010 [...]
+   tree rep.8120000 = ((((((((22:0.036741,((23:0.007764,24:0.013133):0.008890,25:0.004441):0.022538):0.002189,21:0.041247):0.031960,8:0.060532):0.068542,13:0.093549):0.002297,((((5:0.080202,20:0.083817):0.087551,(29:0.113111,((26:0.031991,27:0.029342):0.060862,28:0.064275):0.023487):0.003826):0.010515,(19:0.081370,14:0.089919):0.037162):0.000173,(((7:0.096105,(16:0.024637,15:0.038420):0.044463):0.009155,10:0.110250):0.011624,((9:0.108483,(18:0.024958,17:0.025295):0.032653):0.019073,4:0.1 [...]
+   tree rep.8140000 = ((2:0.172399,(((((5:0.082059,20:0.120624):0.089326,13:0.096073):0.006234,(((22:0.040738,(25:0.016670,(23:0.007159,24:0.008896):0.000058):0.029697):0.020358,21:0.034718):0.040035,8:0.099176):0.015791):0.001488,((29:0.085283,((26:0.016158,27:0.038297):0.091186,28:0.042428):0.019569):0.017668,(((4:0.102217,(9:0.085951,(18:0.042970,17:0.017894):0.039049):0.018814):0.014930,(((16:0.008990,15:0.044303):0.056035,7:0.095160):0.004527,10:0.116775):0.007605):0.030374,(19:0.07 [...]
+   tree rep.8160000 = ((6:0.114499,((((((21:0.056397,(25:0.008434,(24:0.015370,23:0.013515):0.004420):0.047062):0.011374,22:0.047809):0.019988,8:0.113409):0.020526,((((14:0.095172,19:0.101086):0.021117,(29:0.097875,((27:0.032898,26:0.031412):0.061585,28:0.048360):0.029629):0.012689):0.002405,(((7:0.112346,(15:0.038634,16:0.037389):0.064574):0.006439,(4:0.129219,((18:0.040525,17:0.028300):0.040985,9:0.104129):0.025869):0.027508):0.004590,10:0.120374):0.019679):0.006490,((20:0.108037,5:0.1 [...]
+   tree rep.8180000 = ((3:0.132561,6:0.072224):0.004980,((((((4:0.122296,(9:0.128116,(18:0.029632,17:0.026956):0.051063):0.014717):0.016485,(10:0.120990,(7:0.082735,(16:0.014326,15:0.062100):0.068871):0.007678):0.001207):0.030528,((29:0.111298,(((26:0.020385,27:0.045581):0.051594,28:0.053889):0.023595,(19:0.066276,14:0.087362):0.031285):0.003584):0.005380,(5:0.091929,20:0.101606):0.085090):0.005824):0.013234,(((21:0.067968,((25:0.003039,(23:0.008934,24:0.011492):0.004474):0.027094,22:0.0 [...]
+   tree rep.8200000 = ((6:0.077533,3:0.110545):0.007552,(((12:0.063456,11:0.036619):0.065076,((((14:0.090100,19:0.076626):0.031990,(29:0.077618,((27:0.038774,26:0.042024):0.049568,28:0.049161):0.030340):0.010601):0.005694,(((7:0.103886,(15:0.030929,16:0.031201):0.027026):0.023109,(((18:0.043542,17:0.021362):0.016192,9:0.131173):0.028575,4:0.115323):0.020341):0.001911,10:0.084988):0.020429):0.006988,((20:0.076945,5:0.143164):0.075092,((8:0.077672,(((25:0.005726,(24:0.009423,23:0.008059):0 [...]
+   tree rep.8220000 = ((3:0.126108,6:0.089899):0.009508,((((5:0.118687,20:0.074357):0.124703,((((29:0.112469,(28:0.049503,(26:0.035156,27:0.032180):0.081046):0.011345):0.011701,(((((18:0.043624,17:0.015730):0.069149,9:0.093179):0.019192,4:0.140783):0.002252,(((16:0.022756,15:0.054515):0.046116,7:0.110088):0.001653,10:0.110812):0.009870):0.029808,(19:0.080873,14:0.103545):0.028108):0.012914):0.009866,13:0.090557):0.009291,(8:0.095488,(21:0.036636,(22:0.035755,(25:0.010672,(23:0.006204,24: [...]
+   tree rep.8240000 = (6:0.053993,((((12:0.060100,11:0.046280):0.044480,((((27:0.034166,26:0.019713):0.061928,28:0.044299):0.014624,(29:0.123309,((14:0.096692,19:0.089923):0.034651,(10:0.124298,(((15:0.025288,16:0.036053):0.044254,7:0.096715):0.007825,((9:0.093077,(18:0.039455,17:0.027183):0.050646):0.026816,4:0.136349):0.005343):0.001047):0.033015):0.009301):0.007271):0.008619,(13:0.084943,((20:0.094199,5:0.090768):0.100759,(8:0.097723,(21:0.053508,(22:0.053373,(25:0.004410,(23:0.011668 [...]
+   tree rep.8260000 = ((6:0.083043,(2:0.131154,(((((5:0.090778,20:0.110477):0.131182,(((19:0.077741,14:0.084275):0.036643,((((16:0.035221,15:0.041281):0.035723,7:0.089613):0.010532,10:0.103047):0.004069,((9:0.081491,(18:0.029817,17:0.019960):0.067963):0.031377,4:0.126642):0.006786):0.029277):0.004524,(29:0.110199,(28:0.054779,(26:0.020934,27:0.048710):0.087138):0.018361):0.017350):0.008841):0.001600,13:0.095671):0.006431,(8:0.087398,(((25:0.008351,(23:0.004229,24:0.005348):0.018486):0.01 [...]
+   tree rep.8280000 = (3:0.142924,(6:0.060029,(((11:0.027605,12:0.063717):0.055362,(((((18:0.040947,17:0.035259):0.037793,9:0.106013):0.028053,4:0.104397):0.006741,(((16:0.022566,15:0.045826):0.053174,7:0.094014):0.010638,10:0.106240):0.008541):0.028080,(((5:0.056018,20:0.105860):0.093881,((21:0.049336,(22:0.019789,((24:0.006037,23:0.008141):0.009274,25:0.000373):0.027236):0.013915):0.026599,8:0.081794):0.025202):0.005604,(13:0.099450,(((19:0.107736,14:0.096499):0.019155,(28:0.029306,(26 [...]
+   tree rep.8300000 = ((3:0.101964,6:0.073291):0.015632,(((((13:0.096016,(20:0.113752,5:0.094804):0.082711):0.002506,(((22:0.029705,((24:0.008493,23:0.011925):0.007887,25:0.001845):0.014247):0.009418,21:0.054854):0.036225,8:0.069958):0.032589):0.011611,((29:0.122058,((27:0.026247,26:0.035402):0.057903,28:0.035560):0.023036):0.001241,((19:0.056837,14:0.087251):0.049533,((((15:0.033414,16:0.028018):0.034904,10:0.109246):0.006610,(4:0.138361,(9:0.121855,(17:0.016977,18:0.020722):0.039607):0 [...]
+   tree rep.8320000 = ((6:0.078041,((((((5:0.098475,20:0.129478):0.101995,(((7:0.097349,((10:0.088429,(4:0.147704,(9:0.099445,(18:0.033316,17:0.023763):0.026759):0.022651):0.022643):0.001595,(16:0.029079,15:0.038965):0.060558):0.006122):0.035033,(19:0.083512,14:0.093740):0.025190):0.012777,(29:0.112760,((26:0.029929,27:0.036651):0.077456,28:0.050557):0.022921):0.010023):0.006387):0.011098,13:0.085535):0.002656,((21:0.032456,((23:0.006386,(25:0.010119,24:0.009159):0.005667):0.021836,22:0. [...]
+   tree rep.8340000 = (((((((7:0.097289,(16:0.017141,15:0.035976):0.049456):0.011460,(4:0.119416,(9:0.134078,(18:0.032504,17:0.021722):0.032347):0.044162):0.016752):0.007121,10:0.099168):0.035456,((8:0.119251,(21:0.049709,((25:0.012589,(24:0.006273,23:0.010004):0.002239):0.034842,22:0.029418):0.025004):0.051854):0.027537,(((19:0.106609,14:0.113935):0.023632,(29:0.080658,((26:0.026812,27:0.047432):0.048751,28:0.056859):0.023567):0.008099):0.003128,((5:0.111828,20:0.076022):0.120996,13:0.0 [...]
+   tree rep.8360000 = ((2:0.194833,(((12:0.068408,11:0.025753):0.065408,((8:0.082192,(21:0.024884,(((24:0.007123,23:0.011494):0.006221,25:0.003511):0.038996,22:0.014553):0.013962):0.033654):0.032869,((((7:0.116378,(10:0.099332,(((9:0.112286,(18:0.041837,17:0.017756):0.024302):0.019544,4:0.156047):0.009636,(16:0.024095,15:0.029409):0.064822):0.017616):0.009356):0.021072,(19:0.087486,14:0.090070):0.020596):0.007767,(29:0.082886,((26:0.021779,27:0.034075):0.067040,28:0.051452):0.020825):0.0 [...]
+   tree rep.8380000 = ((3:0.131619,(2:0.150778,((12:0.075377,11:0.031054):0.025234,(((13:0.081594,(8:0.054972,(21:0.023946,(22:0.036194,(25:0.004213,(24:0.011448,23:0.009682):0.008227):0.023677):0.009750):0.037708):0.017939):0.007982,(5:0.098541,20:0.098616):0.076863):0.004167,(((((26:0.022853,27:0.038363):0.061685,28:0.025158):0.033031,29:0.092602):0.013803,(19:0.086424,14:0.090704):0.051425):0.000892,(((9:0.104041,(18:0.046178,17:0.016900):0.056050):0.020615,4:0.124618):0.005199,(((16: [...]
+   tree rep.8400000 = ((3:0.146010,(((11:0.034842,12:0.046781):0.030779,(((4:0.149160,((18:0.038641,17:0.013306):0.044056,9:0.118193):0.037392):0.003690,((7:0.089336,(16:0.020502,15:0.054463):0.043317):0.006903,10:0.099644):0.004173):0.019573,((5:0.127446,20:0.086974):0.079326,(((((22:0.024272,((23:0.006762,24:0.009305):0.004840,25:0.003828):0.019585):0.020601,21:0.048735):0.034888,8:0.085388):0.008971,13:0.073735):0.011055,((((26:0.024324,27:0.023511):0.092985,28:0.046738):0.031857,29:0 [...]
+   tree rep.8420000 = ((3:0.127763,6:0.069797):0.005974,(((11:0.032735,12:0.043200):0.040834,((((5:0.114780,20:0.096110):0.078352,(13:0.088545,((21:0.041004,(((23:0.012195,24:0.005367):0.008416,25:0.009010):0.011576,22:0.037782):0.041790):0.011937,8:0.066765):0.016296):0.001728):0.002987,((19:0.086893,14:0.076507):0.063476,(((26:0.026419,27:0.030888):0.086409,28:0.023332):0.026980,29:0.091364):0.018161):0.011947):0.010997,((4:0.123033,((18:0.037083,17:0.002377):0.068008,9:0.102110):0.018 [...]
+   tree rep.8440000 = ((6:0.081702,3:0.102178):0.004192,(2:0.160138,((11:0.022363,12:0.074891):0.049137,((13:0.099632,(((21:0.040265,(((24:0.009268,23:0.003853):0.000928,25:0.019937):0.020421,22:0.045237):0.017477):0.039396,8:0.083888):0.038648,(5:0.097866,20:0.111418):0.105396):0.001565):0.001848,((29:0.088693,((26:0.024133,27:0.053930):0.063720,28:0.029511):0.017337):0.016721,((19:0.084517,14:0.094852):0.030633,((7:0.098784,((4:0.114359,((18:0.047795,17:0.027626):0.047569,9:0.095236):0 [...]
+   tree rep.8460000 = (3:0.132893,(6:0.091213,(2:0.164501,(((((27:0.029314,26:0.022518):0.061860,28:0.039762):0.029413,29:0.099098):0.035353,(((((25:0.020014,24:0.017092):0.007262,23:0.013601):0.021432,(21:0.084340,22:0.041654):0.019123):0.048040,8:0.069989):0.037704,(((((4:0.149686,(9:0.123511,(17:0.031956,18:0.023276):0.055792):0.006501):0.016249,(7:0.127443,(15:0.041453,16:0.026559):0.049310):0.019108):0.015948,10:0.128739):0.029288,((20:0.075841,5:0.152723):0.083994,13:0.114713):0.00 [...]
+   tree rep.8480000 = (((2:0.170645,((12:0.058136,11:0.046921):0.036470,(((10:0.094956,(7:0.125483,(16:0.033532,15:0.045953):0.052828):0.014109):0.008660,((9:0.110807,(18:0.039632,17:0.011828):0.038981):0.027550,4:0.125449):0.019832):0.022423,((((19:0.071526,14:0.076352):0.037631,(((26:0.026492,27:0.031171):0.067166,28:0.030978):0.015526,29:0.079159):0.011225):0.009775,(((22:0.037039,((23:0.017197,24:0.014500):0.014807,25:0.002078):0.031731):0.014003,21:0.023766):0.029106,8:0.109105):0.0 [...]
+   tree rep.8500000 = (((3:0.097976,6:0.077671):0.006290,((12:0.055749,11:0.032064):0.047665,((5:0.098041,20:0.070752):0.075764,((13:0.105540,(((28:0.049158,(26:0.026149,27:0.030735):0.087695):0.016306,29:0.075002):0.015222,(((10:0.116937,((16:0.021147,15:0.048340):0.048705,(4:0.115466,(9:0.092358,(18:0.047183,17:0.014607):0.029252):0.023512):0.001176):0.015268):0.004479,7:0.102027):0.018305,(19:0.081103,14:0.092665):0.012602):0.008248):0.009811):0.002187,(8:0.102219,(21:0.068632,(22:0.0 [...]
+   tree rep.8520000 = ((6:0.066460,(2:0.166545,((11:0.024843,12:0.067860):0.073689,(((13:0.086074,((((25:0.008754,(24:0.012276,23:0.011984):0.008999):0.028019,22:0.023829):0.011366,21:0.041306):0.028530,8:0.088890):0.022665):0.006012,(5:0.101745,20:0.067092):0.093311):0.004043,(((19:0.092584,14:0.094760):0.027366,((4:0.083481,(9:0.090102,(18:0.028658,17:0.019344):0.060719):0.019851):0.008005,(((16:0.021802,15:0.030829):0.033970,7:0.102479):0.011405,10:0.099926):0.011157):0.027885):0.0212 [...]
+   tree rep.8540000 = ((2:0.214646,((((19:0.088026,14:0.088350):0.030855,(((((15:0.043915,16:0.025738):0.042171,7:0.113925):0.008839,(((18:0.034497,17:0.039288):0.032006,9:0.100200):0.024176,4:0.106559):0.014882):0.004822,10:0.108467):0.047386,(((27:0.038071,26:0.022623):0.091462,28:0.035680):0.018834,29:0.091259):0.034935):0.001499):0.007793,((13:0.091236,(20:0.096015,5:0.150760):0.097469):0.006489,((((23:0.019120,(25:0.022506,24:0.006484):0.003019):0.017904,22:0.026262):0.030870,21:0.0 [...]
+   tree rep.8560000 = (((2:0.193539,(((13:0.091229,((((4:0.111181,((18:0.038361,17:0.021676):0.029688,9:0.088268):0.032662):0.011461,(10:0.094803,(16:0.017864,15:0.043316):0.065721):0.001412):0.009162,7:0.105702):0.021716,(5:0.099269,20:0.097412):0.096423):0.009021):0.006207,(((29:0.091519,(28:0.031194,(26:0.017408,27:0.037395):0.054685):0.020857):0.022894,(19:0.113799,14:0.085109):0.035810):0.005857,((21:0.057225,(22:0.038312,((23:0.015287,24:0.014710):0.006429,25:0.004605):0.012778):0. [...]
+   tree rep.8580000 = (3:0.163521,(6:0.088070,(((((5:0.085650,20:0.099925):0.096470,(((19:0.084237,14:0.100591):0.038626,((4:0.127059,((18:0.039231,17:0.025623):0.041686,9:0.107949):0.025521):0.014558,(((16:0.039404,15:0.037091):0.046553,7:0.086880):0.014186,10:0.109721):0.000895):0.031181):0.013581,(29:0.096026,((26:0.021326,27:0.028214):0.062161,28:0.043389):0.032658):0.008196):0.010194):0.014123,(13:0.093873,(8:0.129965,((22:0.021340,((24:0.012988,23:0.011646):0.011072,25:0.010660):0. [...]
+   tree rep.8600000 = (((((13:0.072602,((19:0.069620,14:0.084192):0.039359,((7:0.139140,(((((17:0.029292,18:0.028598):0.043011,9:0.082468):0.031582,4:0.130026):0.009438,(15:0.023467,16:0.030975):0.045762):0.016984,10:0.081008):0.016031):0.019602,((29:0.096773,(28:0.036662,(27:0.039388,26:0.031551):0.060389):0.028064):0.004079,(20:0.095643,5:0.104468):0.071866):0.012358):0.001125):0.010546):0.000531,(8:0.056528,(21:0.046548,(22:0.026317,((25:0.008380,24:0.006120):0.002663,23:0.005490):0.0 [...]
+   tree rep.8620000 = (3:0.137193,((((((8:0.074571,((22:0.034529,((24:0.009100,25:0.016703):0.009056,23:0.005109):0.012351):0.013181,21:0.042755):0.044259):0.029207,((20:0.097877,5:0.097341):0.076913,13:0.093761):0.018873):0.000392,(((19:0.084781,14:0.092946):0.030180,(((4:0.116263,((17:0.012927,18:0.048305):0.041840,9:0.124747):0.013587):0.003341,((15:0.036736,16:0.033161):0.056932,10:0.117300):0.005559):0.002938,7:0.112364):0.025175):0.003524,((28:0.044676,(27:0.023204,26:0.020032):0.0 [...]
+   tree rep.8640000 = ((6:0.102704,3:0.125638):0.030042,(((12:0.055582,11:0.036648):0.070617,(((8:0.104371,((((24:0.016406,23:0.010037):0.015172,25:0.003701):0.043281,22:0.040825):0.004322,21:0.056333):0.018160):0.022872,(5:0.092922,20:0.108491):0.116434):0.005995,(13:0.120266,(((10:0.123084,((((18:0.032368,17:0.027114):0.062133,9:0.097778):0.021211,4:0.108610):0.015923,((16:0.027689,15:0.045497):0.066297,7:0.104939):0.006542):0.003896):0.021485,(14:0.083166,19:0.104331):0.024896):0.0036 [...]
+   tree rep.8660000 = ((3:0.158428,6:0.080512):0.005059,(((((19:0.075841,14:0.143625):0.019480,((10:0.082412,((4:0.128758,((17:0.011915,18:0.054880):0.042578,9:0.097740):0.018396):0.001807,(7:0.129862,(15:0.051303,16:0.036367):0.051558):0.010434):0.005184):0.034785,(29:0.102173,(28:0.060512,(27:0.033629,26:0.019468):0.081690):0.020330):0.025947):0.002409):0.017602,((13:0.083383,(20:0.092802,5:0.080972):0.071401):0.009772,(((((24:0.005370,23:0.008431):0.006005,25:0.006177):0.032381,22:0.0 [...]
+   tree rep.8680000 = ((3:0.179543,6:0.064350):0.007674,(2:0.160618,((12:0.089098,11:0.021131):0.047004,((((14:0.108033,19:0.095574):0.032722,(10:0.103431,((7:0.122410,(15:0.050123,16:0.025554):0.046643):0.005087,((9:0.097085,(18:0.040511,17:0.022282):0.039866):0.026338,4:0.150885):0.010928):0.007322):0.016351):0.015167,(29:0.132142,(28:0.037359,(27:0.034655,26:0.014958):0.078405):0.026795):0.016246):0.007336,((8:0.078666,(21:0.029022,(((24:0.009695,23:0.005505):0.012428,25:0.001738):0.0 [...]
+   tree rep.8700000 = ((2:0.129717,((12:0.058559,11:0.031430):0.042416,((((((25:0.008903,(24:0.006224,23:0.013568):0.006076):0.018886,22:0.029844):0.016138,21:0.038130):0.063989,8:0.094632):0.027967,((20:0.114640,5:0.084379):0.066024,13:0.062714):0.013567):0.002964,((28:0.052046,(27:0.013647,26:0.023610):0.064094):0.017379,(((10:0.091238,(((15:0.054039,16:0.023124):0.022493,((9:0.120164,(18:0.039025,17:0.018247):0.045833):0.012910,4:0.126978):0.006290):0.004680,7:0.114847):0.000484):0.02 [...]
+   tree rep.8720000 = (6:0.055032,((2:0.184496,(13:0.097216,(((21:0.042179,(22:0.028788,((23:0.006999,24:0.006515):0.006354,25:0.002974):0.027107):0.011294):0.050611,8:0.113309):0.002598,((((19:0.073774,14:0.099710):0.028908,((((((18:0.035637,17:0.015287):0.053551,9:0.104661):0.027270,4:0.104959):0.009138,((16:0.027539,15:0.037764):0.053501,10:0.093664):0.002964):0.017142,7:0.110116):0.005397,(12:0.074070,11:0.043000):0.064777):0.003980):0.013848,(29:0.104265,((26:0.037573,27:0.026494):0 [...]
+   tree rep.8740000 = (2:0.175246,((6:0.087790,3:0.094909):0.013107,((12:0.060023,11:0.025668):0.040610,((((7:0.074365,(15:0.036216,16:0.031328):0.043092):0.011604,10:0.106514):0.003082,(4:0.113547,(9:0.108564,(18:0.044395,17:0.024467):0.033279):0.012738):0.004112):0.027113,(((8:0.082103,(21:0.059002,(((23:0.010398,24:0.019337):0.007399,25:0.011735):0.025904,22:0.024132):0.025146):0.027402):0.019298,((20:0.094516,5:0.089533):0.091004,13:0.088831):0.004092):0.006067,((29:0.082204,(28:0.04 [...]
+   tree rep.8760000 = ((2:0.214425,((13:0.093649,(((19:0.073300,14:0.100681):0.044343,((29:0.089243,(28:0.052048,(27:0.022052,26:0.037642):0.073171):0.029303):0.008027,(10:0.125705,((7:0.120766,(15:0.044763,16:0.018988):0.046847):0.008537,(((17:0.023410,18:0.026805):0.043637,9:0.099734):0.019428,4:0.122770):0.014851):0.009339):0.019026):0.000899):0.012009,((8:0.090751,((22:0.040546,(25:0.003683,(23:0.005744,24:0.017486):0.007808):0.027971):0.006758,21:0.056934):0.030635):0.021934,(20:0.0 [...]
+   tree rep.8780000 = ((2:0.162373,(((((19:0.109257,14:0.127211):0.016780,((((((18:0.033608,17:0.018337):0.044474,9:0.128911):0.013701,4:0.138716):0.004248,(16:0.022053,15:0.045284):0.070657):0.004955,10:0.080292):0.004611,7:0.103824):0.008859):0.015324,(29:0.073525,(28:0.064537,(26:0.028096,27:0.045545):0.056718):0.027345):0.005778):0.014027,(((5:0.111416,20:0.136890):0.067731,(8:0.107349,((22:0.033069,(25:0.008368,(23:0.011423,24:0.004076):0.009579):0.022977):0.008704,21:0.053711):0.03 [...]
+   tree rep.8800000 = (6:0.081618,((((12:0.054569,11:0.030261):0.034392,((10:0.107952,(((15:0.037254,16:0.020937):0.030440,7:0.089298):0.006969,((9:0.095799,(18:0.053902,17:0.011741):0.035282):0.029465,4:0.133506):0.007583):0.004590):0.025538,((((((22:0.028233,(25:0.007153,(23:0.014819,24:0.013140):0.010768):0.030534):0.004471,21:0.065944):0.033248,8:0.079887):0.026448,13:0.055789):0.011656,(20:0.083540,5:0.094910):0.068671):0.006013,((28:0.037611,(27:0.055908,26:0.008692):0.081490):0.01 [...]
+   tree rep.8820000 = (3:0.123923,((((11:0.025284,12:0.063034):0.036443,(((((26:0.023593,27:0.030036):0.061036,28:0.047110):0.012377,29:0.080668):0.009473,((19:0.060913,14:0.097437):0.027637,((((16:0.029814,15:0.037278):0.041891,7:0.145710):0.018412,((9:0.107318,(18:0.036604,17:0.016091):0.046758):0.024875,4:0.116340):0.003111):0.004941,10:0.080540):0.017499):0.003459):0.015394,((13:0.081876,(8:0.089013,(21:0.054750,(((24:0.007933,23:0.009486):0.014090,25:0.005303):0.035949,22:0.032144): [...]
+   tree rep.8840000 = ((6:0.109491,((((((22:0.037359,(23:0.015651,(25:0.014246,24:0.011451):0.006754):0.035196):0.014915,21:0.047941):0.048566,8:0.122645):0.032424,((5:0.121581,20:0.123372):0.051050,(((7:0.085975,(((16:0.023628,15:0.035468):0.061734,(4:0.143998,((18:0.033874,17:0.017628):0.062913,9:0.103869):0.018495):0.013178):0.011650,10:0.082988):0.005081):0.025766,(((26:0.042518,27:0.018244):0.062570,28:0.061569):0.014318,((19:0.070060,14:0.084027):0.041591,29:0.095074):0.007933):0.0 [...]
+   tree rep.8860000 = (6:0.075487,((2:0.187683,((12:0.057603,11:0.035354):0.042727,((13:0.092312,(((22:0.017397,(25:0.003325,(24:0.007744,23:0.019922):0.009487):0.017639):0.018505,21:0.069225):0.016243,8:0.093042):0.031957):0.009504,(((14:0.084221,19:0.090158):0.019853,((28:0.046053,(27:0.032085,26:0.031458):0.054759):0.024516,(29:0.133168,(20:0.106224,5:0.116595):0.092850):0.004523):0.010709):0.002353,(7:0.090424,(((((18:0.032157,17:0.026897):0.038165,9:0.080092):0.024077,4:0.099696):0. [...]
+   tree rep.8880000 = ((3:0.124157,(((((8:0.096451,((22:0.025149,((23:0.007972,24:0.017681):0.002752,25:0.006538):0.018161):0.024221,21:0.033227):0.014971):0.034415,((20:0.103145,5:0.090462):0.092805,13:0.071765):0.004522):0.011665,(((19:0.097449,14:0.106154):0.030625,((10:0.098461,((15:0.041121,16:0.027512):0.063527,(4:0.114508,((17:0.022815,18:0.043165):0.040037,9:0.115195):0.039809):0.005987):0.000138):0.006193,7:0.088894):0.022737):0.016848,(29:0.112159,((27:0.054276,26:0.009652):0.0 [...]
+   tree rep.8900000 = (6:0.082857,((((11:0.014449,12:0.078704):0.056201,((13:0.093123,((8:0.080039,((22:0.031533,(25:0.010965,(24:0.009978,23:0.004666):0.005154):0.021713):0.015036,21:0.031126):0.037913):0.023054,(5:0.093198,20:0.073858):0.080274):0.012258):0.011169,((29:0.094934,((26:0.026282,27:0.022077):0.073358,28:0.043900):0.022397):0.004055,((((((18:0.031375,17:0.029247):0.036501,9:0.088290):0.013961,4:0.117597):0.008932,(7:0.092009,(16:0.023255,15:0.026495):0.040012):0.006734):0.0 [...]
+   tree rep.8920000 = (3:0.103904,(((((((((9:0.108599,(18:0.048139,17:0.014766):0.051795):0.016616,4:0.126687):0.020078,(10:0.081544,((16:0.028050,15:0.026063):0.044999,7:0.098075):0.019277):0.016184):0.017803,(14:0.059787,19:0.077454):0.026387):0.006096,(29:0.084785,(28:0.062884,(26:0.017064,27:0.032585):0.073018):0.026139):0.009220):0.007840,((5:0.105384,20:0.059473):0.100166,((8:0.091901,(21:0.037150,(22:0.034377,(25:0.000510,(24:0.020022,23:0.012837):0.009490):0.043646):0.028019):0.0 [...]
+   tree rep.8940000 = ((6:0.146593,((((((29:0.111979,(28:0.056780,(26:0.014495,27:0.040818):0.068189):0.029571):0.007221,(((4:0.159216,((18:0.050455,17:0.010152):0.030885,9:0.083464):0.021919):0.011062,(7:0.082277,(16:0.017532,15:0.032325):0.066356):0.018219):0.012012,10:0.078494):0.021286):0.006564,(14:0.105899,19:0.065334):0.047863):0.018112,((5:0.097123,20:0.104399):0.099237,(13:0.124714,(8:0.069773,((21:0.062184,22:0.025408):0.009791,(25:0.004726,(24:0.016657,23:0.017755):0.009519):0 [...]
+   tree rep.8960000 = ((6:0.114652,3:0.177655):0.016449,(2:0.257586,(((13:0.103160,((8:0.107591,(((25:0.016721,(24:0.011230,23:0.011957):0.000166):0.044053,21:0.072430):0.002325,22:0.041595):0.039507):0.027374,(20:0.121658,5:0.101207):0.119671):0.012755):0.020837,((19:0.088751,14:0.078395):0.042106,((((((17:0.011198,18:0.047353):0.073476,9:0.111054):0.029022,(7:0.117917,(15:0.039156,16:0.038803):0.045392):0.012021):0.021296,4:0.209378):0.007189,10:0.113817):0.028119,((28:0.052081,(27:0.0 [...]
+   tree rep.8980000 = ((6:0.112622,3:0.155984):0.018952,(((((((((16:0.036460,15:0.022206):0.040602,7:0.153341):0.016579,(((18:0.038261,17:0.018714):0.052158,9:0.125232):0.028005,4:0.115647):0.026436):0.008886,10:0.104838):0.015672,((5:0.143019,20:0.099351):0.143354,(29:0.144590,(28:0.063918,(26:0.013874,27:0.031493):0.063133):0.027026):0.004595):0.007283):0.000485,(14:0.116490,19:0.095431):0.043456):0.012270,((8:0.074628,((((23:0.020323,24:0.010468):0.003947,25:0.007398):0.016269,22:0.03 [...]
+   tree rep.9000000 = (3:0.125893,(6:0.089973,((((((19:0.105276,14:0.097194):0.038053,(((7:0.087361,(16:0.020671,15:0.043229):0.044859):0.014294,((9:0.103570,(18:0.036598,17:0.021452):0.046408):0.013556,4:0.127489):0.015183):0.003636,10:0.091029):0.012762):0.005745,((29:0.077064,((26:0.027875,27:0.036609):0.083844,28:0.050680):0.008210):0.016450,(5:0.105764,20:0.094603):0.130850):0.004233):0.002800,(((21:0.062532,((25:0.003068,(23:0.011690,24:0.008648):0.003339):0.020952,22:0.028665):0.0 [...]
+   tree rep.9020000 = ((3:0.139244,6:0.069014):0.004077,(2:0.146520,(((13:0.090291,(8:0.102488,(21:0.026442,(22:0.041863,((25:0.012688,24:0.013108):0.000486,23:0.013879):0.025316):0.027665):0.038613):0.023570):0.005043,((5:0.096941,20:0.066319):0.079180,((29:0.092593,(28:0.052606,(26:0.029418,27:0.028234):0.046442):0.014796):0.002928,((14:0.108651,19:0.095201):0.016065,((((((18:0.038685,17:0.022394):0.037852,9:0.126869):0.039286,4:0.118827):0.007971,(16:0.015912,15:0.035501):0.059441):0. [...]
+   tree rep.9040000 = ((2:0.157385,(((((8:0.120729,(21:0.035030,(((23:0.010192,24:0.009067):0.008396,25:0.008875):0.031249,22:0.017575):0.019682):0.045841):0.023618,13:0.079960):0.013669,(5:0.100894,20:0.098616):0.091577):0.006505,(((28:0.038405,(26:0.039500,27:0.034753):0.054074):0.023391,29:0.111366):0.012799,((19:0.083566,14:0.107164):0.025099,(((((18:0.037347,17:0.012347):0.053976,9:0.103573):0.022955,4:0.121821):0.015821,(7:0.073933,(16:0.029922,15:0.024508):0.058748):0.009962):0.00 [...]
+   tree rep.9060000 = ((((12:0.089646,11:0.030832):0.039752,((((5:0.090749,20:0.101229):0.093280,13:0.078729):0.016564,(8:0.106095,(21:0.045043,(((24:0.013691,23:0.012212):0.009062,25:0.012993):0.008793,22:0.036393):0.030232):0.018111):0.035401):0.016031,(((14:0.091265,19:0.093181):0.033350,(10:0.093351,(((16:0.024073,15:0.040997):0.039888,7:0.107087):0.011194,((9:0.080341,(18:0.035015,17:0.018031):0.055956):0.031621,4:0.124916):0.014688):0.001739):0.017691):0.024562,(29:0.071571,(28:0.0 [...]
+   tree rep.9080000 = ((6:0.116948,3:0.135537):0.004528,(2:0.155986,((((29:0.097976,(28:0.050490,(27:0.037226,26:0.018610):0.053657):0.014254):0.006385,((14:0.088864,19:0.076437):0.010923,((10:0.114266,((15:0.027849,16:0.030560):0.030405,7:0.107454):0.020729):0.010292,(4:0.131178,((18:0.051110,17:0.031445):0.055430,9:0.099955):0.032565):0.008927):0.008388):0.006966):0.009863,(((20:0.095867,5:0.117154):0.073787,13:0.100244):0.000947,(8:0.111307,((21:0.070608,(25:0.005615,(24:0.013071,23:0 [...]
+   tree rep.9100000 = (((((13:0.096043,(8:0.077158,(21:0.050120,(22:0.031176,(25:0.004109,(24:0.012594,23:0.011340):0.015477):0.030082):0.014015):0.032626):0.030352):0.000436,((20:0.096649,5:0.101939):0.082107,(((((7:0.070540,(15:0.041935,16:0.021839):0.051916):0.016392,(4:0.131624,((18:0.030758,17:0.017156):0.039479,9:0.104060):0.027275):0.003970):0.003239,10:0.085934):0.025418,((28:0.042706,29:0.082402):0.015882,(27:0.023172,26:0.028716):0.050684):0.024116):0.004453,(14:0.089801,19:0.0 [...]
+   tree rep.9120000 = ((6:0.081442,3:0.146841):0.010655,(((12:0.065951,11:0.016766):0.059747,(((5:0.095260,20:0.093853):0.083004,13:0.087498):0.009150,(((21:0.043424,(((24:0.016487,23:0.016191):0.000645,25:0.007190):0.028697,22:0.031474):0.009476):0.025007,8:0.061626):0.057839,((29:0.120727,((14:0.093246,19:0.078703):0.028066,(((9:0.113430,(18:0.034157,17:0.024328):0.054934):0.020839,4:0.146327):0.007477,(10:0.115760,(7:0.093142,(16:0.031861,15:0.032140):0.030526):0.006199):0.006960):0.0 [...]
+   tree rep.9140000 = (2:0.231746,(((12:0.086682,11:0.012732):0.036760,(((21:0.060591,(22:0.035054,((24:0.007967,23:0.011807):0.002027,25:0.006879):0.031809):0.020537):0.027271,8:0.130180):0.038161,((13:0.114056,(5:0.125456,20:0.086957):0.120258):0.003416,(((14:0.073043,19:0.064020):0.022472,(29:0.107197,(28:0.033334,(26:0.029541,27:0.030037):0.068524):0.039691):0.020569):0.009301,((10:0.129971,((16:0.026270,15:0.041764):0.031210,7:0.090040):0.004600):0.006202,(4:0.118092,((18:0.041684,1 [...]
+   tree rep.9160000 = ((3:0.134370,6:0.088953):0.007513,(((((((14:0.094604,19:0.077458):0.027607,((4:0.132913,((7:0.093873,(15:0.055043,16:0.020574):0.032421):0.006708,10:0.090514):0.000250):0.000582,((17:0.020386,18:0.023361):0.043130,9:0.108570):0.027596):0.034080):0.002813,(29:0.069633,((27:0.018664,26:0.039076):0.069113,28:0.037856):0.019337):0.032729):0.015801,((20:0.099571,5:0.085583):0.090207,13:0.079847):0.001603):0.016952,((21:0.046281,((25:0.008722,(23:0.008888,24:0.014620):0.0 [...]
+   tree rep.9180000 = (6:0.101729,((((11:0.035567,12:0.075363):0.043718,(13:0.141686,((((7:0.105590,((4:0.104060,((17:0.025521,18:0.055082):0.039552,9:0.091956):0.019574):0.009435,(10:0.125489,(15:0.060816,16:0.021153):0.052808):0.004530):0.008227):0.031986,(14:0.117348,19:0.078142):0.027018):0.001752,(29:0.143704,((28:0.044990,(27:0.026517,26:0.029265):0.071916):0.021518,(20:0.089379,5:0.114900):0.096198):0.004490):0.004138):0.012295,(8:0.101244,((22:0.034325,(25:0.009817,(24:0.004630,2 [...]
+   tree rep.9200000 = ((6:0.071656,(((((((14:0.093801,19:0.074892):0.009743,(7:0.100446,((4:0.150915,((18:0.033313,17:0.019644):0.036212,9:0.099332):0.029178):0.013694,((15:0.036869,16:0.037662):0.049613,10:0.076019):0.015767):0.008223):0.011174):0.002904,(29:0.084023,((27:0.062526,26:0.012416):0.075274,28:0.045903):0.012638):0.028888):0.001786,(20:0.119295,5:0.072507):0.093151):0.006166,(13:0.084136,((21:0.040372,(22:0.031261,((23:0.025821,24:0.015592):0.007307,25:0.009601):0.021432):0. [...]
+   tree rep.9220000 = ((2:0.161350,((11:0.026089,12:0.066600):0.041110,((((29:0.087081,((26:0.018163,27:0.033245):0.066169,28:0.033850):0.024451):0.019916,((14:0.106704,19:0.065329):0.031165,((((18:0.043823,17:0.008142):0.050460,9:0.113499):0.033256,4:0.119282):0.017291,(10:0.068790,(7:0.087741,(16:0.023757,15:0.027607):0.031360):0.001521):0.008774):0.024393):0.001327):0.009514,(13:0.100526,(5:0.114049,20:0.103096):0.071057):0.018531):0.000823,((21:0.040619,(22:0.032015,((23:0.010402,24: [...]
+   tree rep.9240000 = ((((12:0.061100,11:0.030007):0.061234,(((13:0.096903,(5:0.127565,20:0.125996):0.086701):0.005998,(((14:0.094788,19:0.081647):0.031902,((4:0.137786,((18:0.035103,17:0.022822):0.050551,9:0.107784):0.023307):0.018059,(10:0.093692,(7:0.085635,(16:0.021438,15:0.044422):0.064147):0.010410):0.015514):0.032809):0.002171,(((26:0.035360,27:0.037202):0.081003,28:0.051618):0.019193,29:0.123329):0.007438):0.004497):0.003828,(8:0.080323,(((23:0.006524,(24:0.011634,25:0.008806):0. [...]
+   tree rep.9260000 = ((2:0.161906,((11:0.025659,12:0.080086):0.054626,(((((20:0.092330,5:0.071279):0.099685,13:0.102589):0.002163,((21:0.038599,((25:0.008324,(24:0.008045,23:0.006829):0.003666):0.026214,22:0.031718):0.033518):0.036362,8:0.081435):0.020329):0.006821,((14:0.095135,19:0.071007):0.032252,(((27:0.036253,26:0.037565):0.078879,28:0.031582):0.015445,29:0.116280):0.022799):0.001844):0.009570,(((7:0.100691,10:0.100105):0.002294,(15:0.023390,16:0.025482):0.056153):0.021244,((9:0.1 [...]
+   tree rep.9280000 = (6:0.108283,(3:0.186742,(((((14:0.105849,19:0.153360):0.024935,(((29:0.098000,((27:0.050541,26:0.012320):0.146235,28:0.062402):0.034717):0.019046,(((7:0.105823,(15:0.033915,16:0.018058):0.051236):0.011274,(4:0.186193,((18:0.067515,17:0.020213):0.080893,9:0.160797):0.035962):0.006690):0.002443,10:0.131213):0.033279):0.000684,(20:0.127897,5:0.145643):0.136123):0.004281):0.016356,(13:0.152374,(8:0.105949,((22:0.029945,(25:0.001820,(24:0.021916,23:0.013140):0.015267):0. [...]
+   tree rep.9300000 = ((2:0.172785,(((((((24:0.017458,23:0.015989):0.003786,25:0.009795):0.027406,22:0.032534):0.013151,21:0.046375):0.049553,8:0.082697):0.032915,((13:0.119538,(5:0.088205,20:0.086896):0.089894):0.004109,(((((((18:0.032492,17:0.022058):0.051670,9:0.130441):0.033736,4:0.115885):0.010154,(7:0.110672,(16:0.028165,15:0.048170):0.058038):0.007336):0.001897,10:0.115304):0.037183,(14:0.097635,19:0.075317):0.034367):0.004207,(((26:0.023250,27:0.055141):0.068512,28:0.061365):0.01 [...]
+   tree rep.9320000 = ((6:0.088891,(2:0.179175,((((5:0.094371,20:0.121961):0.071133,(((21:0.052081,(22:0.044867,((24:0.011151,25:0.009084):0.004139,23:0.006390):0.026748):0.007484):0.026915,8:0.097578):0.025648,13:0.093932):0.010962):0.003857,((7:0.088729,((((18:0.027139,17:0.027947):0.048540,9:0.084064):0.026114,4:0.120879):0.031079,((16:0.014740,15:0.042029):0.049165,10:0.119156):0.011789):0.009617):0.011809,((((26:0.032053,27:0.032673):0.063748,28:0.039606):0.022919,29:0.112195):0.001 [...]
+   tree rep.9340000 = ((3:0.118563,6:0.115807):0.005761,(2:0.173740,((12:0.069169,11:0.019654):0.041436,(((((18:0.039667,17:0.008728):0.030760,9:0.098256):0.029208,4:0.130714):0.014578,(10:0.100469,((15:0.038644,16:0.020984):0.032918,7:0.103292):0.012640):0.007066):0.015554,((20:0.093325,5:0.064410):0.086348,(13:0.097986,((((22:0.049693,(23:0.006949,(24:0.009960,25:0.012897):0.001723):0.023911):0.010068,21:0.033451):0.049729,8:0.094907):0.021338,(((27:0.051778,26:0.023791):0.059832,28:0. [...]
+   tree rep.9360000 = ((3:0.125182,6:0.084603):0.014248,(2:0.143671,((((22:0.042140,(21:0.054498,((25:0.013143,24:0.004145):0.008047,23:0.004915):0.019839):0.004591):0.062904,8:0.097722):0.031367,((((((18:0.034146,17:0.022159):0.033050,9:0.118384):0.021518,4:0.131310):0.014086,(((16:0.017974,15:0.052619):0.065540,7:0.115437):0.008044,10:0.090623):0.003758):0.032765,((((26:0.019319,27:0.047808):0.052313,28:0.052423):0.027524,29:0.115086):0.016421,(14:0.088443,19:0.115354):0.015518):0.0042 [...]
+   tree rep.9380000 = ((3:0.180498,6:0.048690):0.012562,(2:0.153799,(((((29:0.089468,((14:0.099993,19:0.086980):0.042486,(((((9:0.093168,(18:0.031166,17:0.031045):0.048556):0.031861,4:0.114147):0.014975,(16:0.031280,15:0.032695):0.055063):0.001526,10:0.109503):0.020342,7:0.089986):0.025400):0.014650):0.004787,(28:0.036673,(26:0.020573,27:0.028909):0.067642):0.047490):0.008660,((5:0.109723,20:0.129675):0.098736,13:0.086378):0.005499):0.009482,(((21:0.057871,((23:0.007204,24:0.006105):0.00 [...]
+   tree rep.9400000 = ((6:0.097952,3:0.121391):0.007009,((((((5:0.096613,20:0.075189):0.097200,13:0.081832):0.004565,((((25:0.008932,(24:0.009754,23:0.009824):0.004847):0.022822,22:0.024870):0.011558,21:0.024367):0.048315,8:0.108477):0.026973):0.015681,((((26:0.037199,27:0.020434):0.072330,28:0.054675):0.016462,29:0.097855):0.017245,((10:0.121461,((7:0.100250,(16:0.012367,15:0.066494):0.049695):0.007389,(((18:0.029936,17:0.015863):0.032679,9:0.140627):0.031908,4:0.151543):0.007000):0.005 [...]
+   tree rep.9420000 = (6:0.085812,(3:0.134678,(2:0.183319,(((5:0.107884,20:0.068942):0.085195,((13:0.076463,(((22:0.020047,(25:0.004817,(24:0.010568,23:0.013940):0.015155):0.025828):0.010741,21:0.063513):0.028547,8:0.102988):0.027834):0.002802,((((10:0.069236,(7:0.077383,(16:0.022068,15:0.040387):0.042183):0.007268):0.005735,(4:0.129205,((18:0.037915,17:0.014219):0.057000,9:0.083811):0.017125):0.005374):0.030242,(14:0.114192,19:0.069709):0.018072):0.010761,(29:0.095546,((26:0.034520,27:0 [...]
+   tree rep.9440000 = (6:0.078188,(3:0.141070,(2:0.165397,((((13:0.083400,(5:0.142447,20:0.078994):0.081122):0.001381,((21:0.049163,((25:0.006142,(23:0.008122,24:0.014655):0.001137):0.019136,22:0.041629):0.028925):0.015155,8:0.100340):0.029590):0.008135,((29:0.086788,((26:0.031189,27:0.040197):0.056282,28:0.070621):0.021127):0.008208,((14:0.087900,19:0.093405):0.023922,(7:0.101560,(((9:0.123553,(18:0.037403,17:0.020336):0.051526):0.029610,4:0.112634):0.008411,((16:0.015885,15:0.055175):0 [...]
+   tree rep.9460000 = (6:0.096852,(3:0.125821,(2:0.160246,((((((20:0.124401,5:0.077800):0.126002,13:0.087561):0.006855,((21:0.037568,(((24:0.024432,23:0.007135):0.008340,25:0.014176):0.030368,22:0.045985):0.010589):0.026855,8:0.083212):0.028424):0.010326,(((((18:0.044983,17:0.016884):0.024127,9:0.113320):0.017376,4:0.134332):0.022757,(10:0.079234,(15:0.051116,16:0.020486):0.059029):0.002197):0.013825,7:0.110792):0.019919):0.002062,((14:0.135132,19:0.091713):0.019074,((28:0.029747,(27:0.0 [...]
+   tree rep.9480000 = ((3:0.112238,6:0.100844):0.019762,(2:0.158927,((11:0.039092,12:0.057170):0.048677,((21:0.031184,(((23:0.007380,24:0.007096):0.004110,25:0.003773):0.031361,22:0.028061):0.015262):0.055293,(((20:0.106975,5:0.091773):0.081671,((8:0.095690,((19:0.064405,14:0.086830):0.014526,(((27:0.038896,26:0.020284):0.062782,28:0.048507):0.040376,29:0.085940):0.011580):0.011045):0.002074,13:0.106818):0.008955):0.006794,(((10:0.086240,(15:0.031531,16:0.020678):0.038192):0.008469,(((17 [...]
+   tree rep.9500000 = ((6:0.074192,(2:0.236707,((((((14:0.075524,19:0.110787):0.018932,(((26:0.025377,27:0.036143):0.074942,28:0.033252):0.034286,29:0.088996):0.009676):0.006582,(7:0.104596,((((9:0.139155,(18:0.032822,17:0.044193):0.029865):0.034863,4:0.113269):0.007086,10:0.102402):0.001589,(16:0.035535,15:0.032858):0.060323):0.009822):0.030491):0.017846,(13:0.087892,(5:0.098120,20:0.124272):0.077295):0.001352):0.000498,(8:0.104315,((22:0.039892,21:0.056801):0.000954,(25:0.010656,(23:0. [...]
+   tree rep.9520000 = ((3:0.130797,6:0.101744):0.031400,(2:0.138083,((((21:0.036682,(22:0.040574,(25:0.011050,(24:0.018923,23:0.012686):0.007171):0.026984):0.016044):0.029019,8:0.070502):0.038574,(13:0.090166,((20:0.109568,5:0.109051):0.068933,((((((15:0.031835,16:0.034909):0.054205,10:0.102769):0.005717,(4:0.108675,(9:0.092140,(18:0.034813,17:0.022869):0.032980):0.015123):0.005022):0.015541,7:0.089500):0.015132,(14:0.073026,19:0.108641):0.043175):0.008457,((28:0.037926,(27:0.040418,26:0 [...]
+   tree rep.9540000 = (6:0.069201,((2:0.184493,(13:0.087311,(((((25:0.002192,(23:0.005448,24:0.005581):0.011824):0.016570,22:0.038614):0.028676,21:0.039984):0.015729,8:0.141009):0.024121,(((14:0.121021,19:0.094703):0.044360,(((12:0.076782,11:0.036839):0.071357,((4:0.128760,(9:0.066886,(18:0.040693,17:0.023353):0.040682):0.028906):0.005518,(10:0.094598,(7:0.106301,(15:0.029370,16:0.030667):0.044999):0.008622):0.006309):0.024716):0.011961,(((27:0.040257,26:0.038383):0.062637,28:0.051980):0 [...]
+   tree rep.9560000 = ((6:0.083996,((((((((26:0.027908,27:0.026699):0.100639,28:0.060477):0.035190,29:0.090167):0.019111,((10:0.160412,((16:0.023207,15:0.035864):0.044489,7:0.116172):0.004251):0.011958,((9:0.111932,(18:0.032611,17:0.040369):0.046168):0.038483,4:0.127736):0.016664):0.027209):0.002307,(14:0.085322,19:0.068192):0.052593):0.009076,((8:0.084327,(21:0.084810,(22:0.041597,(25:0.009293,(24:0.007860,23:0.009025):0.003834):0.021784):0.020477):0.050256):0.031761,((5:0.161836,20:0.0 [...]
+   tree rep.9580000 = ((3:0.133616,6:0.060016):0.016488,(((12:0.058100,11:0.040140):0.035479,((((22:0.035175,((24:0.018714,23:0.006027):0.019746,25:0.005452):0.041942):0.012451,21:0.072039):0.042246,8:0.068681):0.022701,((((((14:0.102825,19:0.071389):0.047299,(((((18:0.031963,17:0.012042):0.050401,9:0.096827):0.014027,4:0.115452):0.017647,((15:0.031903,16:0.023908):0.037564,7:0.113214):0.007425):0.014876,10:0.096911):0.009584):0.011054,((27:0.051740,26:0.014461):0.083515,28:0.061917):0.0 [...]
+   tree rep.9600000 = ((3:0.085382,6:0.099699):0.018832,(2:0.200531,((((5:0.092718,20:0.075592):0.078965,(((28:0.028896,(26:0.022784,27:0.036340):0.078613):0.011195,29:0.065804):0.041021,((14:0.101673,19:0.074691):0.018216,(((9:0.100535,(18:0.040858,17:0.036765):0.048633):0.015429,4:0.113140):0.032335,(10:0.087939,(7:0.076095,(16:0.027792,15:0.033826):0.050354):0.009942):0.002431):0.023694):0.002091):0.021848):0.002920,(13:0.089783,((21:0.054495,(((23:0.008674,24:0.006470):0.002029,25:0. [...]
+   tree rep.9620000 = (((((((21:0.037990,(22:0.026593,(25:0.003272,(24:0.007749,23:0.011420):0.007651):0.030433):0.024088):0.026182,8:0.086326):0.036654,(13:0.088051,(20:0.072928,5:0.091398):0.090794):0.003592):0.008991,((29:0.091638,(14:0.089501,19:0.085126):0.031808):0.006947,(((7:0.093265,((15:0.052071,16:0.020703):0.051188,(4:0.097266,((18:0.043323,17:0.017939):0.043828,9:0.089693):0.031910):0.017862):0.010253):0.014405,10:0.139291):0.026097,((27:0.028661,26:0.034302):0.050612,28:0.0 [...]
+   tree rep.9640000 = (6:0.072180,((2:0.157318,((12:0.042799,11:0.044720):0.058840,((5:0.099013,20:0.074081):0.085387,(((7:0.108095,((16:0.027707,15:0.048656):0.069950,((4:0.102099,((18:0.027183,17:0.019273):0.034918,9:0.101374):0.009670):0.008612,10:0.095979):0.000583):0.009453):0.046157,((14:0.059282,19:0.074587):0.044498,(29:0.076832,(28:0.021362,(26:0.021255,27:0.039529):0.083571):0.031755):0.021716):0.004364):0.002807,((8:0.071441,((((23:0.007090,24:0.021195):0.002202,25:0.004362):0 [...]
+   tree rep.9660000 = (3:0.126216,(6:0.088668,(2:0.198407,(((((5:0.066109,20:0.128293):0.096090,13:0.089888):0.006825,((21:0.054694,(22:0.043422,(23:0.007862,(24:0.012427,25:0.008810):0.003487):0.026401):0.011543):0.031727,8:0.105544):0.036769):0.009235,((14:0.080361,19:0.089635):0.047954,((29:0.093727,(28:0.067886,(26:0.020287,27:0.037518):0.069581):0.012249):0.015483,(((4:0.159668,((18:0.051195,17:0.033257):0.049813,9:0.094385):0.022783):0.015147,(7:0.098711,10:0.077530):0.002868):0.00 [...]
+   tree rep.9680000 = ((6:0.077359,3:0.128882):0.013161,(((11:0.031684,12:0.071769):0.041119,(((22:0.036035,(21:0.055332,(25:0.006878,(24:0.011391,23:0.012667):0.006237):0.027649):0.002237):0.045608,8:0.112589):0.027979,((((7:0.093471,(10:0.118746,((((18:0.044408,17:0.022649):0.040608,9:0.135448):0.024267,4:0.106340):0.008751,(16:0.031120,15:0.039701):0.051947):0.002102):0.004100):0.021354,(14:0.088187,19:0.080038):0.024817):0.017495,(29:0.104413,(28:0.061860,(26:0.024383,27:0.030872):0. [...]
+   tree rep.9700000 = (3:0.143667,(6:0.088544,(2:0.154673,(((13:0.087599,((20:0.060076,5:0.102327):0.070280,(((19:0.060882,14:0.118091):0.043052,(((7:0.159553,(15:0.038071,16:0.025915):0.047257):0.004415,10:0.086777):0.006666,(((17:0.036348,18:0.027995):0.047536,9:0.085744):0.021326,4:0.146768):0.006538):0.015749):0.004127,(29:0.095215,(28:0.031500,(27:0.038418,26:0.023768):0.063693):0.024679):0.016050):0.006022):0.003035):0.001239,(8:0.082962,(21:0.056215,(((23:0.011093,24:0.021989):0.0 [...]
+   tree rep.9720000 = ((((((19:0.070964,14:0.132360):0.041699,((13:0.115397,(8:0.102691,((22:0.030047,((23:0.014532,24:0.016785):0.009892,25:0.002437):0.031475):0.012308,21:0.050177):0.022625):0.034356):0.006276,(29:0.099075,(28:0.068452,(27:0.047429,26:0.027113):0.064427):0.027294):0.030195):0.006560):0.004480,(((20:0.117166,5:0.085756):0.107247,7:0.096280):0.010309,(((15:0.050002,16:0.017657):0.067070,10:0.097699):0.013418,(((17:0.024297,18:0.048913):0.053208,9:0.124549):0.021526,4:0.1 [...]
+   tree rep.9740000 = (((((11:0.021841,12:0.052017):0.066402,(((13:0.082092,(20:0.122185,5:0.066679):0.099228):0.007873,(((10:0.084448,((4:0.098393,((17:0.019853,18:0.035256):0.055732,9:0.122461):0.022869):0.008074,((15:0.035604,16:0.017875):0.074501,7:0.088717):0.001046):0.009161):0.021000,(19:0.077412,14:0.078350):0.022800):0.014394,(29:0.086009,(28:0.039778,(27:0.035907,26:0.013577):0.079955):0.013263):0.009681):0.010043):0.002542,(8:0.110787,(21:0.048091,(((23:0.010655,24:0.015052):0 [...]
+   tree rep.9760000 = ((((((((22:0.029457,((23:0.012333,24:0.008346):0.006544,25:0.010842):0.022537):0.028725,21:0.054744):0.017471,8:0.100259):0.052743,((14:0.114621,19:0.096219):0.037159,(((26:0.021479,27:0.031482):0.087114,28:0.036877):0.022684,29:0.108400):0.005171):0.012318):0.000450,(((((18:0.045069,17:0.021522):0.041210,9:0.128500):0.037268,4:0.143522):0.008792,((7:0.090557,(16:0.020931,15:0.032494):0.028860):0.020077,10:0.105326):0.020307):0.032185,((5:0.090927,20:0.128743):0.091 [...]
+   tree rep.9780000 = ((2:0.228409,(((((14:0.094776,19:0.096230):0.034939,(((20:0.093376,5:0.114469):0.090798,7:0.080874):0.010787,(((15:0.029140,16:0.027014):0.038231,10:0.105099):0.006516,(4:0.117546,(9:0.082083,(18:0.041023,17:0.016619):0.053585):0.044923):0.019001):0.012798):0.014798):0.001898,(((27:0.033669,26:0.033209):0.072864,28:0.038344):0.045383,29:0.112010):0.006569):0.012075,(13:0.089115,((((25:0.002392,(24:0.005693,23:0.007113):0.014478):0.020561,22:0.040009):0.013977,21:0.0 [...]
+   tree rep.9800000 = ((((12:0.075084,11:0.021915):0.056549,(((14:0.086338,19:0.075081):0.025116,((29:0.117175,((26:0.025955,27:0.041089):0.059466,28:0.041501):0.029394):0.001340,(7:0.107457,((10:0.096106,(4:0.181259,(9:0.105738,(18:0.045505,17:0.027602):0.049235):0.017102):0.003730):0.000055,(16:0.029947,15:0.028785):0.060129):0.009796):0.033262):0.002582):0.006509,((8:0.121712,(((25:0.016921,(24:0.007873,23:0.006767):0.002408):0.028088,22:0.047385):0.024515,21:0.049126):0.025817):0.010 [...]
+   tree rep.9820000 = (((((12:0.062939,11:0.027491):0.050739,(((13:0.107861,(20:0.099948,5:0.106809):0.091706):0.013026,((21:0.060257,(22:0.024556,((24:0.005234,23:0.005348):0.007730,25:0.012034):0.028336):0.010444):0.046630,8:0.099827):0.025655):0.006585,((29:0.094912,((27:0.046438,26:0.019789):0.078886,28:0.050389):0.029091):0.012103,((14:0.086149,19:0.072446):0.034958,(((9:0.091891,(18:0.033409,17:0.014995):0.045869):0.008351,4:0.130094):0.003099,(10:0.109005,(7:0.125301,(15:0.040774, [...]
+   tree rep.9840000 = ((2:0.147754,((12:0.075410,11:0.029049):0.047455,((8:0.119710,(21:0.052416,(((23:0.018688,24:0.011082):0.007805,25:0.005661):0.024769,22:0.027384):0.007266):0.035442):0.026622,(((5:0.131706,20:0.100640):0.078466,13:0.070597):0.011582,((((26:0.027242,27:0.044174):0.056186,28:0.053447):0.020126,(((10:0.090643,((16:0.019170,15:0.037332):0.055445,7:0.105396):0.003348):0.004432,((9:0.087999,(18:0.025751,17:0.022331):0.034332):0.022435,4:0.107803):0.022451):0.020872,(14:0 [...]
+   tree rep.9860000 = (3:0.163594,(6:0.097475,((((8:0.090381,(21:0.045702,(((24:0.008327,23:0.010453):0.008205,25:0.012712):0.016389,22:0.037508):0.021317):0.061677):0.020921,((((7:0.076462,((10:0.083464,((9:0.136315,(17:0.023016,18:0.031695):0.024300):0.020415,4:0.120754):0.016532):0.008856,(15:0.044819,16:0.015871):0.060030):0.011387):0.028019,(19:0.083557,14:0.089563):0.037264):0.014056,((28:0.040087,(27:0.043571,26:0.027967):0.060372):0.020538,29:0.109432):0.011266):0.012643,(13:0.05 [...]
+   tree rep.9880000 = ((6:0.053892,3:0.135246):0.020940,(((((((25:0.005482,(24:0.002937,23:0.008451):0.012371):0.031545,22:0.024359):0.014077,21:0.072511):0.018923,8:0.095876):0.031501,(((((((16:0.028325,15:0.035669):0.052805,(4:0.128469,(9:0.100080,(18:0.050752,17:0.012528):0.050885):0.036589):0.004301):0.003968,10:0.098995):0.005283,7:0.101954):0.033289,(14:0.100593,19:0.088897):0.020213):0.008407,(((26:0.030327,27:0.028390):0.072096,28:0.037427):0.023527,29:0.109319):0.013985):0.00627 [...]
+   tree rep.9900000 = (6:0.100364,(3:0.187520,(2:0.203907,(((20:0.149937,5:0.079504):0.096907,((8:0.120639,((((23:0.014954,24:0.011239):0.004815,25:0.013647):0.038828,22:0.031765):0.016725,21:0.046630):0.028018):0.010407,((((19:0.074353,14:0.114059):0.014193,((4:0.130177,((17:0.030657,18:0.045683):0.045772,9:0.117722):0.027483):0.016622,((7:0.102533,(15:0.033454,16:0.037033):0.037421):0.003223,10:0.134641):0.011652):0.016950):0.016572,(((27:0.028598,26:0.040439):0.068485,28:0.040943):0.0 [...]
+   tree rep.9920000 = ((6:0.100505,((((((28:0.055712,(27:0.022899,26:0.034870):0.063710):0.014398,29:0.117494):0.008845,((19:0.119292,14:0.059991):0.058951,((9:0.107670,(17:0.010509,18:0.041842):0.084909):0.038012,(4:0.162349,(10:0.089508,(7:0.113516,(15:0.047262,16:0.025566):0.033761):0.012049):0.012991):0.002659):0.015562):0.012478):0.009486,((13:0.118991,(20:0.105647,5:0.107312):0.126278):0.007920,(8:0.108361,(((25:0.004676,(23:0.011638,24:0.016938):0.007708):0.016838,22:0.045465):0.0 [...]
+   tree rep.9940000 = ((6:0.051799,3:0.131982):0.018773,(((((8:0.117775,(21:0.038126,(22:0.048039,(25:0.011249,(24:0.020675,23:0.008136):0.013243):0.033440):0.007292):0.033727):0.028604,(13:0.083137,(20:0.054548,5:0.099902):0.105708):0.000517):0.006449,(((7:0.121006,((4:0.142606,((18:0.041026,17:0.026966):0.028278,9:0.091422):0.018302):0.010047,((15:0.052715,16:0.021304):0.065009,10:0.089594):0.005731):0.005312):0.031449,(14:0.075359,19:0.092924):0.022001):0.002151,(29:0.084894,(28:0.035 [...]
+   tree rep.9960000 = (3:0.156443,(6:0.119623,(2:0.149826,((11:0.024835,12:0.077956):0.053271,(((((((23:0.012261,24:0.006360):0.000308,25:0.007684):0.036805,22:0.040370):0.017315,21:0.036354):0.036400,8:0.097656):0.025464,((((9:0.120739,(17:0.014384,18:0.041393):0.044234):0.019291,4:0.110446):0.002847,((7:0.094450,(15:0.050204,16:0.014490):0.044862):0.014648,10:0.111568):0.014529):0.021300,(((20:0.101568,5:0.078607):0.087004,((28:0.049866,(27:0.052690,26:0.018475):0.085377):0.011119,29:0 [...]
+   tree rep.9980000 = (3:0.123478,(6:0.092549,(2:0.155944,((11:0.038353,12:0.053135):0.041358,((((21:0.043190,((25:0.004986,(24:0.015327,23:0.012440):0.013553):0.016699,22:0.028677):0.015863):0.035290,8:0.074432):0.011914,((5:0.096092,20:0.143492):0.073179,13:0.066548):0.009165):0.000604,(((10:0.080744,((4:0.121113,((18:0.036809,17:0.028585):0.049787,9:0.111497):0.022644):0.011850,(7:0.097118,(16:0.026901,15:0.036981):0.038609):0.010177):0.001371):0.033079,(14:0.090364,19:0.096683):0.014 [...]
+   tree rep.10000000 = ((6:0.083822,3:0.127865):0.019346,(((11:0.038370,12:0.050840):0.048096,(((13:0.081538,(20:0.078476,5:0.121380):0.090290):0.004961,((21:0.032927,(((23:0.004039,24:0.012120):0.006320,25:0.003074):0.012649,22:0.042816):0.012833):0.037752,8:0.098844):0.014247):0.003241,((29:0.159147,(19:0.080844,14:0.070849):0.038463):0.000966,(((4:0.122264,(9:0.109032,(17:0.028020,18:0.019084):0.077372):0.017771):0.015809,(((15:0.045150,16:0.023836):0.054437,7:0.084885):0.012480,10:0. [...]
+   tree rep.10020000 = (6:0.055233,((2:0.203178,((11:0.022504,12:0.106811):0.067537,(((13:0.095195,(20:0.081248,5:0.117501):0.107142):0.004411,(((7:0.103602,((4:0.135279,10:0.099498):0.003472,((9:0.080442,(17:0.013411,18:0.045022):0.051923):0.025730,(15:0.046957,16:0.037336):0.060200):0.007589):0.007662):0.009832,(19:0.067206,14:0.086432):0.015122):0.004399,((28:0.039757,(27:0.033179,26:0.017185):0.064690):0.026003,29:0.090319):0.009910):0.007848):0.004674,((((25:0.006433,(23:0.012891,24 [...]
+   tree rep.10040000 = ((3:0.122932,(2:0.135867,(((((7:0.098232,((((9:0.088395,(18:0.032869,17:0.021542):0.061607):0.022903,(15:0.030740,16:0.015986):0.059028):0.006933,4:0.160752):0.006031,10:0.079681):0.005209):0.029896,(14:0.070975,19:0.080892):0.028208):0.002281,(29:0.095622,((27:0.042023,26:0.018012):0.084166,28:0.050786):0.013459):0.009320):0.011022,(((8:0.074339,(21:0.030988,(22:0.013530,((23:0.012503,24:0.005803):0.006137,25:0.016654):0.021968):0.035184):0.040707):0.024551,13:0.0 [...]
+   tree rep.10060000 = ((2:0.215746,((((29:0.105658,(28:0.076952,(26:0.031918,27:0.039940):0.075227):0.016387):0.009046,((14:0.102445,19:0.101869):0.028722,((10:0.110814,(7:0.117456,(16:0.019843,15:0.035849):0.052761):0.006922):0.008066,((9:0.095435,(18:0.041415,17:0.016815):0.057558):0.023821,4:0.111477):0.016234):0.029454):0.008041):0.005642,((13:0.108678,(5:0.114164,20:0.101908):0.079439):0.008021,((((25:0.006839,(23:0.019393,24:0.008849):0.005329):0.021675,22:0.040942):0.027745,21:0. [...]
+   tree rep.10080000 = ((((((22:0.037251,((23:0.006483,24:0.017293):0.010941,25:0.014643):0.033860):0.003900,21:0.085995):0.029098,8:0.097255):0.022897,((((14:0.080905,19:0.077543):0.057418,(((10:0.110405,((16:0.025607,15:0.036903):0.043022,7:0.103449):0.004768):0.009150,(4:0.137843,((18:0.044242,17:0.020001):0.040489,9:0.112839):0.035345):0.013652):0.011500,(12:0.074531,11:0.020413):0.074165):0.024080):0.007341,(((26:0.027538,27:0.036399):0.073164,28:0.049094):0.029972,29:0.095909):0.02 [...]
+   tree rep.10100000 = ((2:0.194132,((((5:0.158030,20:0.046939):0.101634,13:0.082622):0.014634,((((((24:0.017519,23:0.019499):0.006128,25:0.009071):0.040419,22:0.066667):0.016023,21:0.046390):0.039599,8:0.083716):0.034833,((((10:0.126398,(16:0.029193,15:0.036345):0.090731):0.001003,7:0.120695):0.015678,(((18:0.037968,17:0.017789):0.050156,9:0.093965):0.015095,4:0.133723):0.008296):0.051652,((14:0.084920,19:0.076996):0.043879,(((26:0.039196,27:0.032715):0.064195,28:0.077597):0.024246,29:0 [...]
+   tree rep.10120000 = ((6:0.084282,(2:0.181664,(((((21:0.052472,(((24:0.020914,23:0.011373):0.012213,25:0.003546):0.025668,22:0.034578):0.007672):0.029841,8:0.119281):0.019426,(((7:0.094097,(15:0.025485,16:0.028651):0.050851):0.010386,((((18:0.035218,17:0.032208):0.044460,9:0.104253):0.027410,4:0.126418):0.018134,10:0.073557):0.002250):0.031562,((((27:0.029612,26:0.036145):0.069084,28:0.044425):0.036640,29:0.100065):0.013465,(14:0.101211,19:0.076487):0.034467):0.000146):0.021389):0.0001 [...]
+   tree rep.10140000 = (3:0.126227,((((12:0.068515,11:0.029091):0.054335,(((13:0.075674,((21:0.038540,((25:0.011147,(24:0.009280,23:0.010193):0.010179):0.027165,22:0.035000):0.024296):0.040827,8:0.095560):0.013645):0.003191,(20:0.132482,5:0.079318):0.069511):0.005828,(((7:0.098117,(((15:0.036494,16:0.023101):0.057475,(((18:0.038537,17:0.017182):0.038814,9:0.164848):0.029461,4:0.101164):0.011094):0.000174,10:0.106954):0.005581):0.012995,(14:0.101083,19:0.105279):0.029921):0.006437,(((27:0 [...]
+   tree rep.10160000 = (3:0.146389,((((11:0.034415,12:0.083140):0.054930,((((7:0.105608,(((4:0.163031,(9:0.169071,(17:0.033598,18:0.037169):0.065127):0.010868):0.012025,(15:0.055677,16:0.026296):0.045626):0.017121,10:0.112164):0.007878):0.021442,(19:0.083779,14:0.100939):0.042136):0.013014,(29:0.099167,((27:0.043230,26:0.046120):0.093395,28:0.048587):0.031985):0.004212):0.019057,((8:0.078529,(21:0.056957,(((23:0.016643,24:0.026154):0.016888,25:0.004726):0.027616,22:0.027816):0.005690):0. [...]
+   tree rep.10180000 = ((((12:0.066808,11:0.021022):0.031120,((13:0.097298,(20:0.103307,5:0.068662):0.068936):0.008879,((((((4:0.111624,((18:0.042574,17:0.012321):0.034371,9:0.101389):0.015590):0.014745,(15:0.036913,16:0.015688):0.066863):0.008053,10:0.078322):0.009212,7:0.112495):0.009978,((14:0.088173,19:0.066395):0.030242,(29:0.088596,(28:0.053179,(27:0.037235,26:0.030212):0.055296):0.012261):0.001917):0.001356):0.005481,(8:0.125725,((22:0.021290,((24:0.013711,23:0.011904):0.006420,25 [...]
+   tree rep.10200000 = (6:0.082505,((((((29:0.090172,(28:0.048010,(26:0.027081,27:0.024616):0.046294):0.030121):0.010288,((19:0.094350,14:0.071460):0.025714,((((16:0.013354,15:0.046634):0.068580,(4:0.107057,((18:0.043111,17:0.021706):0.027532,9:0.085646):0.026911):0.008794):0.003925,10:0.058698):0.011639,7:0.088719):0.013370):0.009020):0.011666,((13:0.096251,(8:0.078583,((25:0.008826,(24:0.011147,23:0.010617):0.007122):0.018524,(21:0.053248,22:0.040227):0.004294):0.037387):0.031576):0.00 [...]
+   tree rep.10220000 = (((((((19:0.071274,14:0.094608):0.038899,(29:0.100366,(28:0.062653,(26:0.019800,27:0.034002):0.059225):0.025554):0.012941):0.008275,(((((18:0.023220,17:0.022758):0.037019,9:0.116971):0.029603,4:0.118787):0.020358,(10:0.133591,((16:0.018723,15:0.051192):0.041130,7:0.133283):0.005364):0.010155):0.014356,(5:0.092152,20:0.089579):0.070717):0.020261):0.002010,(13:0.109594,((((25:0.014403,(24:0.007558,23:0.009514):0.003229):0.026405,22:0.042959):0.010768,21:0.037956):0.0 [...]
+   tree rep.10240000 = (3:0.122877,(6:0.077880,(2:0.155463,((11:0.038389,12:0.081171):0.069610,((10:0.124620,((7:0.103869,(15:0.040400,16:0.030110):0.055602):0.009669,((9:0.123183,(17:0.050245,18:0.043309):0.043423):0.025636,4:0.168569):0.015087):0.021012):0.023439,(((((27:0.040420,26:0.025895):0.069081,28:0.042652):0.013741,29:0.082256):0.027464,(19:0.096230,14:0.110000):0.031907):0.008364,((8:0.102919,(21:0.051182,((25:0.013289,(24:0.005766,23:0.009687):0.006056):0.032185,22:0.047535): [...]
+   tree rep.10260000 = ((3:0.153304,6:0.056707):0.003901,(2:0.162102,(((((29:0.086009,(5:0.115089,20:0.123237):0.097410):0.004237,(28:0.040991,(26:0.022971,27:0.039825):0.048042):0.025959):0.009436,((19:0.062871,14:0.072554):0.029388,(((10:0.090588,(4:0.120364,((18:0.035736,17:0.022712):0.031490,9:0.104650):0.037411):0.005014):0.003036,(16:0.030956,15:0.034546):0.052929):0.012492,7:0.076875):0.019680):0.006960):0.010296,(((21:0.032270,(22:0.038927,(25:0.008425,(23:0.014039,24:0.010023):0 [...]
+   tree rep.10280000 = (6:0.104315,((((11:0.039139,12:0.074039):0.038069,(((29:0.098310,((26:0.031039,27:0.036356):0.069116,28:0.068096):0.020451):0.012867,((19:0.107197,14:0.114323):0.023724,((((16:0.022089,15:0.044150):0.050067,7:0.096497):0.005491,10:0.106165):0.004190,(((18:0.032956,17:0.009928):0.030326,9:0.107376):0.009098,4:0.115108):0.019616):0.015284):0.004592):0.019747,((5:0.110007,20:0.089539):0.077327,(((21:0.040714,(22:0.031687,((23:0.007475,24:0.018799):0.010603,25:0.006963 [...]
+   tree rep.10300000 = (3:0.119446,((2:0.174067,(((((5:0.118216,20:0.121886):0.053088,13:0.088805):0.006278,(8:0.100099,(21:0.055290,(((23:0.019962,25:0.012844):0.000819,24:0.022692):0.035008,22:0.040917):0.028833):0.021417):0.014597):0.003421,((((((4:0.090222,((18:0.035799,17:0.024587):0.046934,9:0.107953):0.041433):0.019560,(16:0.024527,15:0.032860):0.056729):0.011601,10:0.117439):0.006973,7:0.102663):0.029614,(19:0.070549,14:0.114325):0.048805):0.004888,(29:0.098941,(28:0.052172,(26:0 [...]
+   tree rep.10320000 = ((3:0.112267,6:0.070454):0.006154,(2:0.167726,((11:0.033609,12:0.075211):0.077465,(13:0.081198,(((5:0.086250,20:0.068017):0.089372,(8:0.083327,((22:0.036764,(25:0.007225,(23:0.026594,24:0.009870):0.004751):0.031706):0.011817,21:0.064938):0.024389):0.012125):0.015266,((29:0.084739,((26:0.046974,27:0.025810):0.088144,28:0.035625):0.019074):0.019556,((19:0.073224,14:0.075771):0.031588,(((7:0.079074,(16:0.022445,15:0.034715):0.029111):0.012201,10:0.106727):0.011647,((9 [...]
+   tree rep.10340000 = ((2:0.208990,((11:0.031319,12:0.077379):0.044664,(((((15:0.041413,16:0.030049):0.061424,7:0.117017):0.007278,10:0.103428):0.007151,((9:0.109914,(17:0.020826,18:0.044240):0.063503):0.014840,4:0.165644):0.005652):0.022015,((((19:0.093755,14:0.109691):0.035728,29:0.124287):0.002602,((27:0.016716,26:0.040678):0.057265,28:0.046136):0.030897):0.013062,((20:0.124036,5:0.138549):0.058813,((8:0.112690,(21:0.046756,(22:0.025117,((23:0.011439,24:0.027313):0.008904,25:0.011675 [...]
+   tree rep.10360000 = (3:0.106019,(6:0.083853,(((11:0.032318,12:0.055563):0.038987,((((13:0.082357,(20:0.095628,5:0.114092):0.077424):0.004122,(((25:0.003134,(24:0.015583,23:0.003648):0.006494):0.028186,(21:0.057273,22:0.041362):0.012767):0.040078,8:0.073066):0.019695):0.010220,((29:0.113561,((27:0.036897,26:0.036438):0.063259,28:0.045996):0.013545):0.007948,(19:0.063306,14:0.079010):0.039254):0.006824):0.005067,(10:0.068294,((7:0.089615,(15:0.037871,16:0.023392):0.043657):0.007542,((9: [...]
+   tree rep.10380000 = ((2:0.172215,((11:0.059175,12:0.053609):0.044740,((((20:0.101700,5:0.097465):0.099220,13:0.079175):0.017400,((29:0.103089,((27:0.064009,26:0.037286):0.065688,28:0.038401):0.020233):0.005411,((7:0.106150,(((9:0.113681,(17:0.017429,18:0.028660):0.034621):0.054666,4:0.115699):0.033048,(10:0.109645,(15:0.029061,16:0.048276):0.051343):0.000148):0.013624):0.014486,(19:0.072297,14:0.079536):0.045175):0.025314):0.015430):0.000623,(8:0.116506,(21:0.054724,(((23:0.009753,24: [...]
+   tree rep.10400000 = ((2:0.221789,((((((14:0.087822,19:0.062726):0.059848,((10:0.125551,(7:0.089992,(15:0.033984,16:0.030671):0.032884):0.010165):0.008300,(4:0.098041,(9:0.107526,(18:0.044380,17:0.014976):0.032671):0.036127):0.022165):0.011091):0.000881,((28:0.053401,(27:0.017868,26:0.032136):0.078525):0.023791,29:0.108710):0.029144):0.016385,((20:0.161187,5:0.086712):0.063739,13:0.088208):0.007225):0.013367,(8:0.098808,(((25:0.007458,(23:0.014369,24:0.006489):0.012600):0.016683,22:0.0 [...]
+   tree rep.10420000 = ((((12:0.055148,11:0.030260):0.057180,(((((5:0.092522,20:0.091634):0.058707,7:0.098295):0.026916,(10:0.099308,((9:0.123124,(18:0.060670,17:0.028879):0.039986):0.039012,((16:0.034639,15:0.035021):0.082592,4:0.139521):0.005917):0.011154):0.018920):0.021529,((29:0.098080,(28:0.048926,(26:0.022509,27:0.049248):0.071242):0.012788):0.013187,(19:0.115187,14:0.104624):0.023778):0.000527):0.010962,((((22:0.021201,((23:0.006314,24:0.020539):0.007108,25:0.008702):0.033346):0. [...]
+   tree rep.10440000 = ((2:0.199730,(((((21:0.051898,(22:0.016701,((25:0.006034,24:0.008378):0.005162,23:0.009407):0.029262):0.026565):0.017775,8:0.064206):0.032231,(13:0.065498,(20:0.069731,5:0.083339):0.127535):0.000584):0.004995,(((10:0.150248,(4:0.124631,(9:0.104927,(18:0.014644,17:0.039106):0.042940):0.014569):0.013168):0.000064,(7:0.087536,(15:0.045560,16:0.026494):0.036317):0.018687):0.026901,(((28:0.052675,(27:0.031908,26:0.023636):0.057575):0.015335,29:0.101919):0.016865,(14:0.0 [...]
+   tree rep.10460000 = ((6:0.066719,3:0.125413):0.010428,(2:0.144665,((11:0.034745,12:0.059005):0.058931,(((29:0.077723,(28:0.036284,(27:0.024009,26:0.019076):0.079384):0.012003):0.026252,((19:0.085785,14:0.084129):0.025072,(((4:0.121351,(9:0.108156,(17:0.038353,18:0.038015):0.038977):0.016144):0.007917,(7:0.133180,(15:0.025541,16:0.024380):0.045537):0.015069):0.017819,10:0.097983):0.021259):0.013185):0.007195,((20:0.114070,5:0.108541):0.119637,(((21:0.051153,(22:0.026813,(25:0.002509,(2 [...]
+   tree rep.10480000 = ((3:0.111336,6:0.095567):0.015241,(2:0.159337,(((((28:0.043324,(26:0.026701,27:0.030020):0.067661):0.031603,((10:0.102030,7:0.122280):0.005778,((4:0.137386,(9:0.094320,(18:0.021473,17:0.027800):0.047332):0.013730):0.028566,(16:0.029534,15:0.041137):0.037728):0.012341):0.040376):0.004280,(29:0.116716,(14:0.083470,19:0.101237):0.027760):0.009053):0.003690,(((5:0.095223,20:0.087263):0.067440,(((22:0.023191,((23:0.005323,24:0.007828):0.010934,25:0.006211):0.021477):0.0 [...]
+   tree rep.10500000 = ((3:0.167836,(((11:0.027254,12:0.067732):0.043061,((((((28:0.045686,(27:0.037819,26:0.027431):0.065746):0.021993,29:0.078418):0.013873,(19:0.060871,14:0.126057):0.065262):0.010417,8:0.135940):0.006114,(((((25:0.012300,(24:0.012616,23:0.006805):0.006978):0.024976,22:0.025970):0.011551,21:0.039594):0.041876,(20:0.093807,5:0.119733):0.097926):0.012687,13:0.097657):0.012997):0.000980,(((9:0.083073,(17:0.021124,18:0.023585):0.059650):0.021585,4:0.124827):0.015688,(10:0. [...]
+   tree rep.10520000 = (6:0.072848,((((11:0.019983,12:0.048859):0.049775,((((((((9:0.110834,(17:0.021394,18:0.046614):0.031919):0.027778,4:0.109553):0.003254,10:0.090384):0.002465,(15:0.029668,16:0.028946):0.037170):0.006265,7:0.087491):0.011268,(19:0.067249,14:0.106037):0.019430):0.006140,(29:0.114560,((27:0.031187,26:0.037477):0.059503,28:0.040339):0.021552):0.010021):0.008818,(((21:0.040075,((25:0.011506,(23:0.014595,24:0.010419):0.007203):0.018891,22:0.029380):0.013907):0.057772,8:0. [...]
+   tree rep.10540000 = ((3:0.158956,(((12:0.067094,11:0.028260):0.052144,((((14:0.085857,19:0.066531):0.019413,(29:0.085893,((27:0.039563,26:0.013988):0.066772,28:0.047358):0.012294):0.014321):0.011330,(((20:0.092650,5:0.080518):0.086583,13:0.085338):0.007623,(8:0.095276,(21:0.033389,(22:0.026489,((24:0.008280,23:0.010527):0.009480,25:0.008941):0.030837):0.011615):0.018475):0.020115):0.003525):0.007086,((((18:0.035488,17:0.028190):0.029110,9:0.114781):0.022794,4:0.105989):0.010684,(10:0. [...]
+   tree rep.10560000 = (((((11:0.040725,12:0.069397):0.040333,((13:0.092188,((21:0.047151,(22:0.038051,(25:0.003597,(24:0.014230,23:0.006598):0.011123):0.023018):0.007620):0.028290,8:0.063754):0.032063):0.010124,((20:0.064264,5:0.089340):0.086439,((29:0.097698,((27:0.037079,26:0.013796):0.059909,28:0.038872):0.017435):0.025972,((19:0.086463,14:0.079482):0.034802,(((7:0.087559,(15:0.044594,16:0.032390):0.046607):0.021814,((9:0.071476,(17:0.012180,18:0.042113):0.060944):0.014567,4:0.093299 [...]
+   tree rep.10580000 = ((6:0.097130,3:0.175162):0.003242,(((12:0.059832,11:0.036507):0.043208,(((((8:0.073426,(((25:0.006853,(23:0.013011,24:0.019189):0.005257):0.020975,22:0.053617):0.008089,21:0.026105):0.029898):0.042389,(5:0.088588,20:0.101087):0.079978):0.020169,13:0.080316):0.011240,((19:0.101893,14:0.072021):0.042977,(29:0.097942,((26:0.030512,27:0.029645):0.066503,28:0.043822):0.022327):0.025754):0.008943):0.016694,(4:0.150282,((((16:0.018122,15:0.035438):0.057884,7:0.095277):0.0 [...]
+   tree rep.10600000 = ((2:0.201007,((12:0.050295,11:0.031545):0.078749,(((((26:0.022032,27:0.027905):0.072185,28:0.034375):0.031536,29:0.074874):0.023418,((((9:0.089514,(18:0.028164,17:0.024617):0.060305):0.025238,4:0.158870):0.011876,((7:0.105357,(16:0.018006,15:0.040029):0.060061):0.004319,10:0.076352):0.013088):0.024462,(19:0.096768,14:0.069180):0.020935):0.002219):0.018543,((13:0.092457,((21:0.036115,(((24:0.007675,23:0.013570):0.006694,25:0.004708):0.022721,22:0.037281):0.015121):0 [...]
+   tree rep.10620000 = (6:0.084201,(3:0.145175,(2:0.192107,((12:0.064516,11:0.033112):0.063308,(((((21:0.064285,((25:0.007731,(23:0.015164,24:0.012085):0.007167):0.026756,22:0.043149):0.011273):0.035010,8:0.088622):0.034967,13:0.101704):0.003617,(5:0.085786,20:0.098958):0.084800):0.004661,((((10:0.095497,((4:0.176160,(9:0.087973,(18:0.052366,17:0.014168):0.039751):0.020935):0.011790,(16:0.017345,15:0.050522):0.071053):0.010904):0.020222,7:0.093826):0.029852,(19:0.096453,14:0.101909):0.02 [...]
+   tree rep.10640000 = (((2:0.177285,(((((8:0.081841,(((25:0.001939,(23:0.007186,24:0.011282):0.007975):0.031532,22:0.042443):0.011316,21:0.043944):0.037846):0.005762,13:0.110447):0.004444,(5:0.101464,20:0.083429):0.096010):0.008477,(((28:0.053752,(26:0.031521,27:0.042780):0.060401):0.025278,29:0.085058):0.016764,((14:0.093695,19:0.079954):0.040921,((4:0.114055,((18:0.037562,17:0.019514):0.030930,9:0.100977):0.020489):0.038083,(((16:0.027642,15:0.028176):0.063433,7:0.096124):0.004146,10: [...]
+   tree rep.10660000 = (3:0.125004,(6:0.097121,(2:0.158619,((12:0.058004,11:0.033250):0.048027,((((21:0.048881,(((24:0.012932,23:0.008746):0.001125,25:0.005706):0.027267,22:0.028272):0.026569):0.025387,8:0.066198):0.022013,(13:0.089857,(5:0.069940,20:0.088801):0.109061):0.002542):0.002998,(((28:0.037795,(26:0.030132,27:0.028230):0.082798):0.024222,29:0.080703):0.005756,((19:0.099808,14:0.118569):0.019440,(((10:0.102140,(16:0.026217,15:0.042684):0.047748):0.002384,7:0.093678):0.015629,((9 [...]
+   tree rep.10680000 = ((2:0.130685,((12:0.056245,11:0.051181):0.045343,(((13:0.080213,((21:0.040857,(22:0.044361,(25:0.004782,(23:0.008126,24:0.006291):0.001425):0.031945):0.015931):0.032719,8:0.087053):0.008935):0.000867,(5:0.105295,20:0.101075):0.056516):0.012010,((29:0.090989,((26:0.028044,27:0.030288):0.045502,28:0.052913):0.022499):0.012307,((19:0.067357,14:0.086344):0.019210,(7:0.123885,(10:0.086191,((((18:0.040839,17:0.012259):0.053298,9:0.068692):0.027280,4:0.105976):0.012434,(1 [...]
+   tree rep.10700000 = ((2:0.185589,((12:0.071532,11:0.023890):0.044187,((((((21:0.041317,((25:0.004025,(23:0.005758,24:0.013046):0.014481):0.025409,22:0.037738):0.011883):0.043282,8:0.075841):0.048863,13:0.091231):0.016278,(20:0.124935,5:0.092198):0.069044):0.012539,((14:0.068285,19:0.089173):0.041387,(((27:0.026863,26:0.020193):0.069293,28:0.057161):0.019265,29:0.085529):0.010075):0.012099):0.006304,((((15:0.025182,16:0.029123):0.058902,7:0.089499):0.011257,10:0.130766):0.006278,((9:0. [...]
+   tree rep.10720000 = ((((12:0.074652,11:0.024605):0.053615,(((((28:0.068905,(26:0.036685,27:0.022673):0.086107):0.020670,29:0.093205):0.021174,((7:0.103127,(10:0.091859,(16:0.029660,15:0.044267):0.055389):0.003377):0.018561,(4:0.119585,((18:0.050072,17:0.023625):0.048126,9:0.108271):0.034668):0.027063):0.022409):0.002160,(19:0.078171,14:0.077646):0.039680):0.014471,(((5:0.087086,20:0.107265):0.111330,13:0.117725):0.010289,(8:0.106696,(21:0.050369,(22:0.019754,(25:0.003142,(23:0.007050, [...]
+   tree rep.10740000 = (6:0.057527,((2:0.175083,((12:0.065283,11:0.041098):0.030326,(((13:0.080787,(20:0.083707,5:0.086010):0.085989):0.003878,((((25:0.014293,(24:0.010716,23:0.010107):0.006035):0.027964,22:0.024385):0.036585,21:0.047696):0.016380,8:0.078995):0.031696):0.006704,(((19:0.051517,14:0.081124):0.015310,(7:0.112250,(((((18:0.037967,17:0.026374):0.041342,9:0.090942):0.023402,4:0.128134):0.015933,10:0.095762):0.005169,(15:0.035513,16:0.014217):0.055907):0.007779):0.035926):0.011 [...]
+   tree rep.10760000 = ((3:0.164446,6:0.060341):0.010221,(((12:0.044269,11:0.038348):0.040070,(((19:0.063859,14:0.079533):0.035826,(((28:0.040893,(26:0.029545,27:0.038223):0.056448):0.009878,29:0.075003):0.010820,((7:0.086209,(5:0.114030,20:0.096856):0.074785):0.003783,((10:0.078666,(16:0.018580,15:0.040426):0.050856):0.000149,(4:0.121141,((18:0.044017,17:0.011882):0.067582,9:0.084243):0.029696):0.013098):0.013882):0.015152):0.001237):0.003409,((8:0.078810,(21:0.057652,(22:0.025526,(25:0 [...]
+   tree rep.10780000 = ((((((((5:0.089972,20:0.094183):0.082041,(8:0.094983,((((24:0.015438,23:0.009793):0.005259,25:0.008994):0.025144,22:0.040925):0.021763,21:0.051992):0.015928):0.025566):0.030075,(((((16:0.015164,15:0.050341):0.040727,7:0.117876):0.009525,10:0.087182):0.001859,(((18:0.066739,17:0.013421):0.063809,9:0.087568):0.015198,4:0.115651):0.014740):0.025890,((19:0.066616,14:0.088886):0.044457,(29:0.115236,(28:0.054585,(26:0.020953,27:0.034583):0.058971):0.030052):0.013853):0.0 [...]
+   tree rep.10800000 = (6:0.086622,(3:0.160165,(2:0.200739,(((((19:0.084190,14:0.082490):0.021781,(29:0.076150,(28:0.049874,(26:0.034342,27:0.027248):0.068099):0.028923):0.018275):0.008200,((5:0.092097,20:0.057282):0.068878,(13:0.101062,(8:0.084967,((22:0.033784,((24:0.007254,23:0.015254):0.006856,25:0.001118):0.026109):0.036653,21:0.039595):0.010855):0.027467):0.007663):0.007871):0.013795,((4:0.124743,((18:0.048625,17:0.018291):0.058167,9:0.081512):0.020686):0.016565,(10:0.110852,(7:0.0 [...]
+   tree rep.10820000 = (6:0.103026,(3:0.177054,((((5:0.089401,20:0.110699):0.080562,((((19:0.098969,14:0.105560):0.032845,((28:0.062373,(26:0.008482,27:0.044017):0.095365):0.018124,29:0.116964):0.016158):0.004647,(7:0.136685,(10:0.122596,(((9:0.107413,(18:0.042064,17:0.024378):0.058973):0.016590,4:0.182798):0.024304,(16:0.027283,15:0.039059):0.061161):0.010022):0.008755):0.028092):0.005540,((8:0.115796,(21:0.062070,(22:0.038375,((23:0.009280,24:0.013458):0.003820,25:0.007305):0.024095):0 [...]
+   tree rep.10840000 = (6:0.081028,((((12:0.068826,11:0.056077):0.046578,(((29:0.092395,((27:0.035675,26:0.030262):0.041735,28:0.044262):0.011657):0.025377,((19:0.062374,14:0.093336):0.026967,((4:0.165955,((15:0.058248,16:0.024081):0.067436,((9:0.102947,(18:0.030409,17:0.031234):0.041324):0.055244,10:0.106969):0.008304):0.002567):0.010199,7:0.094117):0.021534):0.010722):0.008403,((20:0.095288,5:0.108843):0.096529,(13:0.073544,(8:0.101666,((22:0.045959,((24:0.015795,23:0.012837):0.003035, [...]
+   tree rep.10860000 = ((3:0.169465,6:0.101052):0.000081,(((11:0.016370,12:0.071076):0.049502,(((((17:0.022566,18:0.034867):0.038566,9:0.158546):0.015167,4:0.120501):0.021706,(10:0.137461,((15:0.037303,16:0.032363):0.036898,7:0.091763):0.007243):0.002304):0.014763,(13:0.091550,((20:0.081896,5:0.108158):0.080395,((8:0.125711,((22:0.013811,((24:0.024251,23:0.005535):0.001617,25:0.012174):0.035199):0.023027,21:0.069439):0.032285):0.038520,(((28:0.046217,(27:0.046767,26:0.013382):0.067279):0 [...]
+   tree rep.10880000 = (6:0.088739,(3:0.104667,(((12:0.047595,11:0.028510):0.042178,(((((27:0.025976,26:0.021950):0.094257,28:0.054313):0.027596,29:0.092152):0.005119,((7:0.096640,(10:0.097581,(((15:0.033637,16:0.028713):0.063945,((18:0.040572,17:0.013334):0.037958,9:0.076683):0.042725):0.000703,4:0.136850):0.015894):0.008547):0.007724,(19:0.101916,14:0.120679):0.023975):0.002639):0.001446,((13:0.081985,(8:0.072784,(21:0.045945,((25:0.005017,(23:0.012970,24:0.011716):0.006539):0.017148,2 [...]
+   tree rep.10900000 = (6:0.085166,((2:0.147698,((12:0.076964,11:0.031187):0.037221,((((22:0.025938,(25:0.008576,(24:0.003820,23:0.016928):0.001252):0.019470):0.016772,21:0.032865):0.035160,8:0.091580):0.011587,(13:0.088663,(((29:0.090689,((26:0.027959,27:0.032475):0.066793,28:0.042561):0.004737):0.012994,((19:0.061374,14:0.086216):0.024927,((7:0.082084,(16:0.035982,15:0.035893):0.034820):0.011142,(((9:0.090413,(18:0.051637,17:0.015876):0.069266):0.017016,4:0.087637):0.015274,10:0.082072 [...]
+   tree rep.10920000 = ((6:0.095836,3:0.136389):0.004078,(((11:0.050610,12:0.057098):0.050621,(((((28:0.025701,(27:0.036922,26:0.022609):0.081567):0.021548,29:0.104762):0.020493,(7:0.091206,(((4:0.153259,(9:0.099338,(17:0.018332,18:0.025508):0.041184):0.032871):0.013376,(15:0.028043,16:0.028642):0.052776):0.011633,10:0.091048):0.008457):0.027222):0.002472,(19:0.067685,14:0.069198):0.014785):0.003009,((8:0.081409,(21:0.049518,(22:0.023538,((24:0.013175,23:0.013298):0.003375,25:0.012475):0 [...]
+   tree rep.10940000 = ((6:0.098980,3:0.117612):0.017185,(2:0.198373,((11:0.032704,12:0.075532):0.031861,((((26:0.025423,27:0.036725):0.059955,28:0.059155):0.017426,(((19:0.111914,14:0.096384):0.051974,29:0.075450):0.008888,((((16:0.035381,15:0.040814):0.071485,(4:0.119335,(9:0.129655,(18:0.045852,17:0.023454):0.076702):0.013279):0.006800):0.010974,10:0.106608):0.014597,7:0.123702):0.019238):0.000123):0.006465,((5:0.094946,20:0.063980):0.121234,(13:0.080602,(8:0.089836,(21:0.047311,(22:0 [...]
+   tree rep.10960000 = (6:0.096269,(3:0.145242,(2:0.202813,((12:0.090364,11:0.014761):0.082244,(((8:0.100683,((((24:0.008441,23:0.006268):0.005290,25:0.011032):0.018368,22:0.037002):0.010073,21:0.049749):0.024197):0.007788,(13:0.079172,(5:0.072452,20:0.105985):0.063409):0.007356):0.007558,(((19:0.096413,14:0.076500):0.035137,((((16:0.030734,15:0.036713):0.049165,7:0.088327):0.016348,10:0.101329):0.006281,(4:0.136293,(9:0.123464,(18:0.042986,17:0.024716):0.043809):0.024283):0.005499):0.01 [...]
+   tree rep.10980000 = ((6:0.074625,3:0.112329):0.005026,(2:0.122718,((12:0.063771,11:0.020297):0.048883,((((14:0.092170,19:0.069593):0.036251,(7:0.101396,((10:0.078201,(16:0.018205,15:0.044951):0.054652):0.001210,((9:0.078840,(18:0.031555,17:0.033680):0.034047):0.021819,4:0.127928):0.010654):0.018254):0.013877):0.000952,(29:0.085322,((26:0.024870,27:0.026160):0.073789,28:0.052848):0.032122):0.024747):0.017877,((13:0.079587,(5:0.145553,20:0.086089):0.049760):0.009709,(8:0.120208,(22:0.02 [...]
+   tree rep.11000000 = ((3:0.116725,6:0.143034):0.023813,(2:0.148061,(((((29:0.114468,(28:0.073000,(26:0.030266,27:0.036919):0.073976):0.021815):0.023086,(5:0.089332,20:0.086184):0.112379):0.006353,((10:0.101950,((4:0.158029,(9:0.109008,(18:0.040482,17:0.011731):0.033932):0.031685):0.013055,(7:0.086568,(16:0.032197,15:0.039517):0.069461):0.004986):0.001299):0.036747,(19:0.057680,14:0.095253):0.027224):0.018567):0.007111,((8:0.056920,((22:0.065603,(23:0.007421,(24:0.009454,25:0.025048):0. [...]
+   tree rep.11020000 = (3:0.134109,(6:0.053592,(((11:0.029309,12:0.054377):0.033404,(((((21:0.024640,(22:0.043551,(25:0.008524,(23:0.012509,24:0.015285):0.006945):0.025532):0.015306):0.054584,8:0.112622):0.008497,13:0.064184):0.002157,(20:0.068659,5:0.091788):0.098304):0.014590,((((((15:0.031000,16:0.026931):0.049364,7:0.097212):0.000818,(4:0.135031,(9:0.103043,(17:0.015286,18:0.025553):0.069000):0.011291):0.005576):0.011486,10:0.079098):0.017970,(19:0.067873,14:0.097622):0.019304):0.001 [...]
+   tree rep.11040000 = ((6:0.093599,(2:0.256927,((((((((16:0.020094,15:0.038610):0.067611,7:0.085664):0.024556,((9:0.115777,(18:0.030461,17:0.018998):0.052434):0.023316,4:0.126576):0.020590):0.006200,10:0.083515):0.018032,(14:0.113272,19:0.093973):0.029184):0.002706,(((26:0.023229,27:0.029991):0.049191,28:0.091806):0.022082,29:0.133533):0.012368):0.009286,(((5:0.110319,20:0.130743):0.125419,13:0.081704):0.010787,((21:0.063384,(22:0.030887,((23:0.013154,24:0.010441):0.001944,25:0.008286): [...]
+   tree rep.11060000 = (3:0.096549,(6:0.079095,(((((((26:0.030162,27:0.040483):0.054739,28:0.044906):0.020327,29:0.094430):0.010299,((14:0.066171,19:0.096422):0.039821,(7:0.114752,((16:0.023361,15:0.024042):0.050077,((((18:0.030296,17:0.026107):0.039696,9:0.084565):0.021804,4:0.092220):0.032093,10:0.104073):0.000320):0.009229):0.015953):0.011537):0.010721,((13:0.091420,(5:0.123718,20:0.085021):0.061479):0.000933,(((22:0.027505,(25:0.002505,(23:0.007694,24:0.006909):0.007085):0.021029):0. [...]
+   tree rep.11080000 = ((2:0.168481,((13:0.119287,((((22:0.051621,((24:0.018795,23:0.006182):0.019412,25:0.001362):0.022716):0.005794,21:0.039352):0.018386,8:0.087758):0.019484,((5:0.047478,20:0.102635):0.095143,((29:0.111257,((26:0.035619,27:0.034666):0.052679,28:0.051941):0.014634):0.019229,((14:0.111245,19:0.074785):0.020942,((((16:0.018680,15:0.024272):0.052718,7:0.072236):0.009565,10:0.090143):0.008637,(4:0.107333,(9:0.102620,(18:0.044538,17:0.017453):0.049136):0.039692):0.003280):0 [...]
+   tree rep.11100000 = (((2:0.158881,(((((5:0.060166,20:0.119033):0.079220,13:0.116448):0.010985,(8:0.069148,(21:0.043501,(22:0.052644,((23:0.007743,24:0.011472):0.002273,25:0.019558):0.018365):0.014233):0.048710):0.052570):0.003378,((10:0.116488,(7:0.105312,((16:0.030519,15:0.033232):0.062602,(((18:0.038742,17:0.025959):0.067827,9:0.074457):0.015665,4:0.137561):0.028144):0.016580):0.008758):0.023871,((19:0.103250,14:0.119009):0.018784,((28:0.053676,(26:0.024057,27:0.035156):0.072832):0. [...]
+   tree rep.11120000 = ((6:0.075687,(((11:0.025965,12:0.081611):0.031596,((((20:0.095410,5:0.104776):0.117945,(29:0.112312,((27:0.020934,26:0.038706):0.053263,28:0.045538):0.028277):0.014532):0.004765,((((7:0.069107,(15:0.031539,16:0.029289):0.042626):0.006886,(4:0.131426,(9:0.108921,(17:0.020653,18:0.052727):0.036908):0.012246):0.012168):0.005288,10:0.091303):0.013598,(19:0.079970,14:0.088823):0.039347):0.009358):0.006105,(((21:0.055757,(22:0.018108,((24:0.015857,25:0.009580):0.001734,2 [...]
+   tree rep.11140000 = ((6:0.064403,3:0.105969):0.021758,(2:0.152402,((12:0.054637,11:0.042902):0.044797,(((((23:0.007203,(25:0.002382,24:0.016691):0.003464):0.015620,22:0.047030):0.017798,21:0.054563):0.029839,8:0.080694):0.024982,((13:0.097427,(20:0.071585,5:0.089566):0.090926):0.007070,(((29:0.058663,((27:0.020080,26:0.019903):0.066095,28:0.054022):0.014791):0.022447,((((18:0.053827,17:0.017447):0.047728,9:0.070296):0.018858,4:0.115180):0.021522,(10:0.070642,((15:0.040057,16:0.019882) [...]
+   tree rep.11160000 = ((3:0.121947,(((11:0.023351,12:0.071821):0.030042,(((8:0.105787,(21:0.039031,(22:0.050159,(25:0.007135,(24:0.015832,23:0.016079):0.008508):0.027804):0.008853):0.058758):0.023149,(13:0.081161,(20:0.106743,5:0.094251):0.092663):0.016730):0.002028,((29:0.097684,((27:0.035000,26:0.023403):0.076506,28:0.053219):0.017824):0.015824,((19:0.064840,14:0.074325):0.042138,((10:0.112071,((15:0.035687,16:0.016138):0.056989,7:0.110565):0.005617):0.002671,(4:0.170838,(9:0.103858,( [...]
+   tree rep.11180000 = ((3:0.131144,6:0.134141):0.003584,(((12:0.079442,11:0.029726):0.049785,((8:0.096651,((22:0.055787,(25:0.005903,(23:0.006399,24:0.010815):0.008173):0.019267):0.023973,21:0.045837):0.016897):0.032023,((((14:0.095833,19:0.077335):0.039916,(20:0.105820,5:0.094717):0.105696):0.005110,(29:0.100004,((27:0.024719,26:0.024883):0.066995,28:0.048331):0.023959):0.031010):0.001977,(((10:0.084409,((15:0.037905,16:0.033145):0.044417,7:0.082806):0.007877):0.003158,((9:0.132385,(18 [...]
+   tree rep.11200000 = (((((((((23:0.012048,24:0.006744):0.006762,25:0.003312):0.017170,22:0.020877):0.006315,21:0.048302):0.029102,8:0.088195):0.031827,((13:0.096729,(5:0.110184,20:0.105105):0.085666):0.005745,(((29:0.111920,(28:0.045525,(26:0.035777,27:0.034674):0.073750):0.029124):0.007429,(((7:0.104882,(16:0.019460,15:0.030410):0.041482):0.005445,(4:0.173662,(9:0.094660,(18:0.039118,17:0.022969):0.038688):0.036037):0.009241):0.004949,10:0.117630):0.024728):0.007158,(19:0.065947,14:0. [...]
+   tree rep.11220000 = (6:0.076864,(3:0.166642,(((11:0.040106,12:0.088710):0.068545,(((((10:0.099575,((15:0.035015,16:0.024072):0.050551,(((17:0.021041,18:0.046871):0.055020,9:0.114811):0.023414,4:0.104504):0.017867):0.000075):0.009508,7:0.073831):0.032856,((29:0.118918,((27:0.047306,26:0.019738):0.071688,28:0.052743):0.019190):0.023636,(19:0.061713,14:0.060573):0.022898):0.000427):0.007254,13:0.096425):0.000653,((8:0.121771,(((25:0.001146,(24:0.010453,23:0.011375):0.007582):0.023419,22: [...]
+   tree rep.11240000 = ((2:0.174323,(((((((21:0.049972,((25:0.003235,(23:0.024688,24:0.010301):0.009049):0.029257,22:0.034748):0.003864):0.031552,8:0.077528):0.043621,(5:0.110558,20:0.091360):0.079289):0.003689,13:0.095962):0.012523,((19:0.083873,14:0.070980):0.017954,(29:0.066230,((26:0.027332,27:0.051485):0.072657,28:0.033664):0.034635):0.012716):0.016968):0.001170,((4:0.111889,(9:0.098081,(18:0.055537,17:0.008954):0.050459):0.006105):0.020446,((7:0.125081,(16:0.019060,15:0.038830):0.0 [...]
+   tree rep.11260000 = ((2:0.186818,((12:0.058959,11:0.029014):0.035506,(((29:0.117470,(28:0.064372,(26:0.030540,27:0.030694):0.087715):0.033317):0.012190,((19:0.110786,14:0.116885):0.023998,((((18:0.030640,17:0.021599):0.051632,9:0.107172):0.029519,4:0.117251):0.001406,(10:0.082352,((16:0.024982,15:0.029667):0.059391,7:0.083164):0.007854):0.006831):0.040617):0.009697):0.007722,((5:0.128452,20:0.116016):0.079565,(13:0.086523,(((((23:0.008294,24:0.006209):0.007180,25:0.002278):0.059867,22 [...]
+   tree rep.11280000 = ((2:0.204347,((12:0.063790,11:0.049094):0.024501,((((((25:0.005361,(23:0.006678,24:0.006105):0.001666):0.015892,22:0.036468):0.024373,21:0.045402):0.046081,8:0.088557):0.027289,((5:0.105871,20:0.092950):0.110903,13:0.084498):0.001776):0.004723,((29:0.125017,(28:0.069635,(26:0.034313,27:0.029228):0.067005):0.022219):0.022111,((19:0.116794,14:0.080151):0.021319,((((4:0.114936,((18:0.050501,17:0.022485):0.040241,9:0.121148):0.030662):0.005490,(16:0.026870,15:0.053109) [...]
+   tree rep.11300000 = (((((12:0.082592,11:0.024774):0.067398,((((29:0.105891,(19:0.081745,14:0.083045):0.046600):0.003513,(((((18:0.035232,17:0.019935):0.054874,9:0.101129):0.027217,4:0.121779):0.004787,((16:0.017298,15:0.053203):0.045587,7:0.098596):0.013298):0.002379,10:0.084672):0.010989):0.008444,(28:0.036715,(26:0.023593,27:0.039406):0.093395):0.022625):0.007364,(((21:0.029101,(((23:0.003062,24:0.008734):0.006704,25:0.001664):0.023589,22:0.035603):0.018909):0.030737,8:0.108950):0.0 [...]
+   tree rep.11320000 = ((6:0.100865,((((((9:0.105213,(17:0.017064,18:0.035028):0.075210):0.016865,4:0.116238):0.007353,(((15:0.017955,16:0.047734):0.042930,7:0.088322):0.014442,10:0.114200):0.005938):0.016497,(((8:0.107484,(21:0.052305,(((24:0.018831,23:0.010813):0.002980,25:0.003962):0.034307,22:0.029130):0.016030):0.033386):0.015863,13:0.120863):0.002927,(((19:0.096442,14:0.061693):0.050377,((28:0.030204,(27:0.037775,26:0.027785):0.088317):0.012206,29:0.099251):0.009627):0.014055,(20:0 [...]
+   tree rep.11340000 = ((2:0.243441,(((((((22:0.039854,((25:0.018775,24:0.014776):0.002225,23:0.004559):0.014649):0.011412,21:0.050488):0.002610,8:0.109439):0.025176,(5:0.087816,20:0.113348):0.094174):0.019588,13:0.098591):0.009854,((7:0.112443,(((4:0.122515,(9:0.103582,(18:0.035872,17:0.012595):0.055485):0.015000):0.010128,(16:0.016380,15:0.050740):0.069171):0.001137,10:0.085746):0.011366):0.019495,((29:0.115717,((26:0.026710,27:0.031569):0.060395,28:0.045894):0.007807):0.015869,(19:0.0 [...]
+   tree rep.11360000 = ((((((13:0.072003,((20:0.088842,5:0.082331):0.082122,(29:0.125920,(((10:0.083440,((((18:0.036860,17:0.019263):0.054708,9:0.108920):0.020100,4:0.128923):0.010760,((15:0.041150,16:0.016880):0.069207,7:0.069889):0.007460):0.003207):0.024771,(14:0.088306,19:0.099380):0.047377):0.014273,(28:0.065206,(27:0.043678,26:0.021578):0.059646):0.033188):0.000825):0.008388):0.003923):0.005069,((21:0.048684,((25:0.013323,(23:0.010513,24:0.011381):0.006463):0.035435,22:0.038510):0. [...]
+   tree rep.11380000 = ((((((((20:0.097718,5:0.084978):0.095887,13:0.084623):0.004176,(((7:0.078032,(((15:0.042387,16:0.031114):0.068089,10:0.086270):0.006634,(((18:0.039955,17:0.027835):0.039831,9:0.103799):0.029956,4:0.120270):0.010399):0.003874):0.015545,((28:0.049008,(27:0.030542,26:0.028053):0.053202):0.035317,29:0.091666):0.020341):0.003610,(14:0.076516,19:0.081311):0.037246):0.008998):0.007184,((21:0.032221,((25:0.012832,(24:0.008299,23:0.007953):0.001690):0.019679,22:0.031796):0. [...]
+   tree rep.11400000 = (6:0.089163,(((((((((25:0.007171,(23:0.005678,24:0.012931):0.014338):0.036458,22:0.024496):0.027879,21:0.053218):0.023009,8:0.088279):0.019740,(5:0.091821,20:0.097712):0.114496):0.000064,(13:0.102450,((10:0.108301,(((16:0.021761,15:0.035888):0.068759,((9:0.107975,(18:0.052373,17:0.019172):0.035590):0.018516,4:0.151133):0.008830):0.004078,7:0.089903):0.004806):0.032149,((19:0.097579,14:0.070424):0.030582,(29:0.094492,(28:0.051910,(26:0.022642,27:0.032862):0.094413): [...]
+   tree rep.11420000 = ((2:0.146620,((12:0.051656,11:0.023967):0.041181,((10:0.096751,(((16:0.013957,15:0.042001):0.051747,7:0.128242):0.022497,(((18:0.034736,17:0.024594):0.022411,9:0.152513):0.048728,4:0.148357):0.010773):0.007092):0.018860,(((8:0.103695,(21:0.051823,((25:0.001745,(23:0.012583,24:0.015571):0.007757):0.018450,22:0.041066):0.018930):0.043394):0.017234,((5:0.099769,20:0.077214):0.093632,13:0.094864):0.006203):0.012996,((19:0.098187,14:0.110209):0.041233,((28:0.046277,(26: [...]
+   tree rep.11440000 = ((2:0.133735,((13:0.104114,((((((15:0.036217,16:0.024207):0.035641,7:0.104341):0.015291,(4:0.159968,(9:0.078493,(17:0.024398,18:0.045044):0.062569):0.029307):0.013152):0.002415,10:0.077897):0.026371,(((28:0.048219,(27:0.025036,26:0.027074):0.066720):0.038439,29:0.091608):0.000086,(19:0.053751,14:0.107761):0.056209):0.002407):0.007745,((20:0.081707,5:0.136615):0.094508,(8:0.102254,((((23:0.009305,24:0.008853):0.008587,25:0.005605):0.032846,22:0.023801):0.030976,21:0 [...]
+   tree rep.11460000 = (6:0.097952,(3:0.099531,(2:0.156576,((12:0.064099,11:0.023002):0.060526,((((19:0.066233,14:0.094543):0.023702,(((29:0.089544,((26:0.036303,27:0.022834):0.066281,28:0.046963):0.033337):0.006706,(((7:0.107687,(16:0.019009,15:0.045995):0.044536):0.008510,(4:0.116301,((18:0.041118,17:0.020862):0.046013,9:0.099066):0.030728):0.008870):0.002619,10:0.075535):0.017729):0.005545,(5:0.070773,20:0.095267):0.087802):0.000851):0.015678,(8:0.081416,((22:0.045583,(25:0.007611,(23 [...]
+   tree rep.11480000 = (6:0.089859,(3:0.143183,(((12:0.072858,11:0.045221):0.041725,((13:0.062350,((21:0.032600,(22:0.018996,(25:0.015269,(23:0.009501,24:0.009121):0.001305):0.029802):0.021130):0.024193,8:0.065691):0.012482):0.006970,(((((18:0.025386,17:0.034017):0.052103,9:0.106986):0.011039,4:0.133968):0.003554,(10:0.104158,((15:0.034684,16:0.032800):0.052090,7:0.088333):0.010758):0.002566):0.037108,(((29:0.086185,((27:0.037297,26:0.022844):0.066430,28:0.044161):0.042119):0.000632,(20: [...]
+   tree rep.11500000 = ((6:0.121552,(((12:0.069520,11:0.015395):0.036174,((((21:0.053551,((25:0.005244,(23:0.007574,24:0.011062):0.006207):0.026189,22:0.016986):0.014282):0.041492,8:0.110360):0.026628,((5:0.111274,20:0.083190):0.111132,13:0.086144):0.004797):0.007039,((29:0.104961,((26:0.018152,27:0.041402):0.088657,28:0.047070):0.028134):0.022464,(((((16:0.024242,15:0.038184):0.044950,7:0.132156):0.013383,(4:0.140811,((18:0.031539,17:0.021849):0.056672,9:0.133536):0.013205):0.007334):0. [...]
+   tree rep.11520000 = ((6:0.076359,3:0.125311):0.004015,(2:0.171177,((12:0.064211,11:0.023221):0.041299,(((13:0.067485,(5:0.101612,20:0.114167):0.069998):0.005144,(((((23:0.004806,24:0.010126):0.008048,25:0.002567):0.027425,22:0.056603):0.024304,21:0.038076):0.037632,8:0.079024):0.015873):0.005459,(((28:0.047720,(26:0.013744,27:0.033307):0.069216):0.017969,29:0.070052):0.006079,((14:0.104403,19:0.063092):0.022031,(7:0.115014,(((16:0.022109,15:0.035939):0.066688,10:0.097727):0.001298,(4: [...]
+   tree rep.11540000 = ((2:0.154424,((12:0.092170,11:0.009965):0.044994,((((22:0.025215,((24:0.012400,23:0.008823):0.005685,25:0.002799):0.038622):0.026490,21:0.035760):0.009530,8:0.097658):0.032683,((13:0.108568,((29:0.094873,(28:0.071857,(26:0.014262,27:0.041610):0.058107):0.017700):0.025013,((7:0.092989,(((9:0.118467,(18:0.030790,17:0.011961):0.033835):0.034819,4:0.128585):0.008046,(10:0.118086,(16:0.033788,15:0.035260):0.058659):0.004381):0.009434):0.016395,(14:0.086811,19:0.077403): [...]
+   tree rep.11560000 = ((6:0.088554,3:0.113540):0.008282,((((((29:0.066652,(28:0.060955,(27:0.030708,26:0.016596):0.050672):0.016535):0.009130,(19:0.076151,14:0.084195):0.023494):0.008317,((((((24:0.008159,23:0.010402):0.004164,25:0.004175):0.024246,22:0.029378):0.010395,21:0.031130):0.046090,8:0.092896):0.022826,(13:0.083850,(20:0.096128,5:0.130640):0.095152):0.003358):0.005655):0.017343,((10:0.101732,((15:0.028340,16:0.039489):0.042058,7:0.092350):0.015347):0.007905,(((18:0.027310,17:0 [...]
+   tree rep.11580000 = ((3:0.116856,6:0.097459):0.034718,(2:0.172217,(((((((14:0.078251,19:0.080420):0.041845,((((16:0.033162,15:0.034643):0.047375,(4:0.120324,((18:0.051513,17:0.009728):0.054036,9:0.127087):0.018095):0.021482):0.020951,10:0.097347):0.014648,7:0.099130):0.021845):0.014526,(29:0.074603,((26:0.035097,27:0.036191):0.090791,28:0.055777):0.019397):0.021705):0.010265,13:0.136965):0.001908,(5:0.101266,20:0.088063):0.112993):0.012389,(8:0.090226,(((25:0.003604,(23:0.010918,24:0. [...]
+   tree rep.11600000 = ((2:0.155270,(((((((26:0.010696,27:0.057995):0.082353,28:0.050428):0.014113,29:0.082283):0.028715,(((((16:0.026838,15:0.035434):0.062293,(4:0.146034,(9:0.098965,(18:0.034726,17:0.024004):0.043486):0.015673):0.012627):0.002192,10:0.084669):0.019743,7:0.094102):0.008862,(14:0.113350,19:0.059645):0.036766):0.007656):0.020626,(13:0.108575,(5:0.070305,20:0.092347):0.109906):0.010467):0.002444,(8:0.093089,(((23:0.007392,24:0.008616):0.008343,25:0.013316):0.016309,(22:0.0 [...]
+   tree rep.11620000 = ((3:0.114444,6:0.100272):0.007594,(2:0.144791,((((((4:0.162211,(9:0.095287,(18:0.038147,17:0.007705):0.041554):0.026654):0.006655,((7:0.097377,(16:0.050192,15:0.022273):0.047343):0.014485,10:0.109774):0.004014):0.029139,(14:0.083650,19:0.065002):0.020682):0.004031,(29:0.117682,((26:0.020716,27:0.031614):0.074663,28:0.049657):0.022056):0.013131):0.010074,(((5:0.075286,20:0.097598):0.099478,13:0.082576):0.003952,((21:0.048915,((25:0.004344,(23:0.008455,24:0.010235):0 [...]
+   tree rep.11640000 = ((6:0.090387,((((((28:0.057638,(27:0.023932,26:0.036644):0.057915):0.024140,29:0.101301):0.028368,((19:0.086826,14:0.113476):0.017355,(((7:0.121865,(15:0.036653,16:0.028806):0.047487):0.011029,10:0.108590):0.005192,(((18:0.042710,17:0.018301):0.046670,9:0.078296):0.026530,4:0.132161):0.013910):0.030185):0.007327):0.012162,((((((24:0.019753,23:0.007563):0.004608,25:0.002884):0.020988,22:0.044387):0.013205,21:0.048104):0.030885,8:0.135559):0.028407,(13:0.092788,(20:0 [...]
+   tree rep.11660000 = ((((11:0.026392,12:0.061740):0.042925,((8:0.113712,(22:0.033035,(((23:0.015035,24:0.006649):0.018873,25:0.003188):0.029250,21:0.092747):0.006007):0.053520):0.042051,(13:0.076403,((((((27:0.039616,26:0.017266):0.070465,28:0.046176):0.018569,29:0.109223):0.017677,(14:0.096085,19:0.089835):0.037060):0.008103,((10:0.102350,((15:0.059152,16:0.019906):0.069335,7:0.086461):0.012168):0.008602,(((18:0.043536,17:0.007769):0.041505,9:0.107663):0.014670,4:0.135415):0.006599):0 [...]
+   tree rep.11680000 = (((((((16:0.024089,15:0.035658):0.051046,7:0.081471):0.006256,(((9:0.145857,(18:0.028520,17:0.019131):0.052679):0.009795,4:0.161934):0.014031,10:0.094249):0.006950):0.028127,((((((22:0.031195,((23:0.009466,24:0.013705):0.004420,25:0.007735):0.023487):0.019921,21:0.052484):0.030207,8:0.100260):0.031914,13:0.100407):0.012428,((((26:0.028199,27:0.032011):0.091467,28:0.035907):0.030140,29:0.087530):0.007366,(14:0.083260,19:0.097409):0.041015):0.009931):0.001878,(5:0.10 [...]
+   tree rep.11700000 = ((6:0.093396,((((((((28:0.046071,(27:0.029988,26:0.021512):0.054454):0.012335,29:0.096857):0.009928,((((9:0.110632,(18:0.036083,17:0.010278):0.036867):0.022206,4:0.152198):0.008848,((15:0.031500,16:0.029339):0.041736,7:0.108632):0.001861):0.015891,10:0.094850):0.027653):0.000470,(19:0.073927,14:0.092508):0.028121):0.007837,(8:0.121296,(21:0.043306,((25:0.013176,(24:0.007770,23:0.014808):0.001856):0.031438,22:0.022613):0.010894):0.051066):0.023387):0.001883,((20:0.0 [...]
+   tree rep.11720000 = (((2:0.174788,((12:0.064931,11:0.027536):0.046808,((((19:0.059266,14:0.121300):0.011953,(((10:0.131712,(15:0.037939,16:0.035304):0.046186):0.010380,(4:0.113856,(9:0.099540,(18:0.037655,17:0.010101):0.058554):0.021744):0.014606):0.018367,7:0.096736):0.013051):0.006272,(((27:0.029689,26:0.021314):0.080365,28:0.049125):0.013715,29:0.092924):0.009631):0.012882,((8:0.068255,(21:0.047234,(22:0.027647,((24:0.013306,23:0.008854):0.006293,25:0.009853):0.014822):0.009559):0. [...]
+   tree rep.11740000 = (((2:0.210595,(((((8:0.083452,(21:0.051704,((25:0.006845,(23:0.007267,24:0.006430):0.011787):0.021179,22:0.033384):0.011199):0.055842):0.033950,13:0.107019):0.006727,(5:0.109113,20:0.083586):0.118027):0.000488,(((((9:0.127923,(18:0.042049,17:0.025849):0.028391):0.011885,4:0.114352):0.009496,(((16:0.056132,15:0.017080):0.073576,7:0.093133):0.010032,10:0.145603):0.002707):0.017647,(14:0.106782,19:0.091004):0.028996):0.011731,(29:0.127109,(28:0.046658,(26:0.030208,27: [...]
+   tree rep.11760000 = ((((12:0.078987,11:0.031824):0.074371,((((9:0.162549,(18:0.044919,17:0.026771):0.045010):0.027679,4:0.111990):0.012685,((7:0.100520,(15:0.031636,16:0.024444):0.053139):0.007822,10:0.130626):0.014200):0.023919,((((28:0.035242,(27:0.032555,26:0.035350):0.071817):0.024639,29:0.104351):0.013658,(19:0.095241,14:0.128137):0.012235):0.007745,(((20:0.067953,5:0.107348):0.099498,(8:0.090204,(21:0.042204,(22:0.038949,(25:0.008015,(23:0.016934,24:0.012669):0.014360):0.050048) [...]
+   tree rep.11780000 = (((((((5:0.113662,20:0.086730):0.060782,13:0.086737):0.001202,((((((16:0.016362,15:0.040533):0.034258,7:0.126370):0.009789,10:0.085993):0.008810,((9:0.123848,(18:0.042555,17:0.018663):0.053072):0.018993,4:0.139721):0.007057):0.015902,(14:0.096563,19:0.109195):0.021100):0.012007,((28:0.039226,(26:0.021823,27:0.046923):0.095355):0.020745,29:0.087840):0.018009):0.009176):0.010698,(((22:0.034309,((24:0.006164,23:0.004995):0.005715,25:0.013057):0.023891):0.014273,21:0.0 [...]
+   tree rep.11800000 = ((3:0.149006,6:0.054737):0.010077,(2:0.190071,((((13:0.095039,(20:0.090461,5:0.100834):0.095340):0.005192,(((((24:0.017614,25:0.015412):0.004219,23:0.009533):0.037271,22:0.029423):0.017307,21:0.043243):0.041678,8:0.073499):0.030310):0.004491,((((27:0.021678,26:0.033048):0.076696,28:0.038854):0.030117,29:0.108955):0.009211,(((10:0.104211,(7:0.120051,(15:0.041157,16:0.025222):0.042081):0.006557):0.004158,(4:0.121168,(9:0.105106,(18:0.043751,17:0.029154):0.040693):0.0 [...]
+   tree rep.11820000 = ((2:0.191458,(((((((24:0.014584,23:0.009969):0.007961,25:0.002591):0.031025,22:0.042619):0.012461,21:0.046888):0.046637,8:0.076360):0.018916,((13:0.096222,(20:0.073707,5:0.099064):0.093690):0.006470,(((((15:0.051445,16:0.020910):0.055071,7:0.123053):0.007286,((9:0.110011,(18:0.031426,17:0.019692):0.038628):0.036496,4:0.146372):0.010055):0.001763,10:0.092000):0.019601,((29:0.097259,((27:0.036684,26:0.014945):0.060780,28:0.060867):0.024017):0.008341,(19:0.053601,14:0 [...]
+   tree rep.11840000 = ((2:0.160738,((((13:0.082989,(20:0.117203,5:0.110232):0.062757):0.002040,(((((23:0.011071,24:0.005180):0.013113,25:0.011880):0.029079,22:0.037472):0.015783,21:0.057125):0.025274,8:0.081668):0.013893):0.009141,(((((10:0.084400,(15:0.041778,16:0.017127):0.098779):0.015508,((9:0.110963,(18:0.036971,17:0.017279):0.044998):0.035655,4:0.118392):0.010813):0.013899,7:0.087370):0.022257,(29:0.104149,((27:0.032424,26:0.046228):0.064430,28:0.043648):0.050381):0.007006):0.0042 [...]
+   tree rep.11860000 = (6:0.080872,(3:0.149673,(2:0.247025,((((8:0.134662,((((23:0.006466,24:0.018690):0.008694,25:0.006864):0.032395,22:0.028328):0.010567,21:0.070725):0.020371):0.032355,((5:0.106678,20:0.145795):0.092587,13:0.084003):0.000242):0.001268,(((29:0.088526,(28:0.060964,(26:0.031193,27:0.032102):0.047400):0.024869):0.001889,((((16:0.026161,15:0.035851):0.068019,((9:0.095805,(18:0.038009,17:0.020803):0.042794):0.020493,4:0.164919):0.018960):0.006210,10:0.105349):0.005783,7:0.0 [...]
+   tree rep.11880000 = ((2:0.200807,((11:0.024864,12:0.062160):0.038728,((((19:0.112149,14:0.089694):0.042191,(((26:0.018427,27:0.036705):0.059034,28:0.038240):0.011084,29:0.093063):0.016844):0.010740,(10:0.108594,((7:0.116522,(16:0.013280,15:0.052701):0.060785):0.011262,(4:0.134078,(9:0.139339,(18:0.036573,17:0.038236):0.020174):0.022748):0.048504):0.010494):0.026969):0.011556,((13:0.132477,(5:0.085346,20:0.136824):0.108732):0.003175,(((22:0.026446,21:0.074375):0.002257,(25:0.009334,(23 [...]
+   tree rep.11900000 = ((3:0.101007,6:0.084779):0.017974,(2:0.140452,(((((21:0.042957,(22:0.053276,((24:0.008301,23:0.003509):0.008097,25:0.004626):0.022095):0.004067):0.018318,8:0.068574):0.024571,(20:0.089114,5:0.092941):0.117317):0.001246,(((((((9:0.068502,(18:0.045838,17:0.012269):0.050857):0.018656,4:0.121117):0.019608,(7:0.112502,(15:0.026056,16:0.018382):0.053988):0.008944):0.005712,10:0.055839):0.023612,(19:0.102068,14:0.094834):0.017304):0.003323,(29:0.073071,((27:0.037934,26:0. [...]
+   tree rep.11920000 = (((((((((25:0.001027,(23:0.005444,24:0.027943):0.012801):0.022422,22:0.046438):0.035055,21:0.048959):0.035482,8:0.120277):0.014261,13:0.099893):0.005193,((20:0.086237,5:0.106346):0.081636,(((19:0.073908,14:0.091051):0.043464,(((4:0.102654,((18:0.037120,17:0.018861):0.047523,9:0.150810):0.025981):0.006507,((15:0.034268,16:0.026208):0.047121,10:0.094331):0.006891):0.012441,7:0.123477):0.010253):0.003155,(((27:0.032358,26:0.028256):0.060720,28:0.042718):0.019892,29:0. [...]
+   tree rep.11940000 = ((6:0.095205,(((((((26:0.015398,27:0.055302):0.060209,28:0.036255):0.028570,29:0.117089):0.022082,((19:0.090940,14:0.092135):0.033116,((10:0.086731,((((18:0.045710,17:0.041404):0.031866,9:0.117960):0.015852,4:0.143577):0.018225,(16:0.030524,15:0.039974):0.041210):0.014661):0.009296,7:0.117516):0.014351):0.002228):0.009069,(((21:0.060854,(22:0.020847,(25:0.010275,(24:0.014080,23:0.009208):0.015198):0.022188):0.013490):0.032910,8:0.103117):0.072452,((5:0.075850,20:0. [...]
+   tree rep.11960000 = ((6:0.080226,3:0.112653):0.009986,(((11:0.046213,12:0.078473):0.072583,((13:0.088048,((21:0.031242,(((23:0.005865,24:0.008003):0.015462,25:0.007900):0.010639,22:0.024289):0.013547):0.038001,8:0.109248):0.011259):0.003266,(((19:0.086668,14:0.076333):0.035126,((29:0.105585,(28:0.051843,(27:0.026007,26:0.039356):0.047943):0.015053):0.010775,(7:0.090945,((((9:0.112728,(18:0.042883,17:0.013805):0.038106):0.018864,4:0.145187):0.005304,10:0.089940):0.004699,(15:0.037427,1 [...]
+   tree rep.11980000 = ((6:0.087520,(2:0.113588,((((13:0.092025,(5:0.099726,20:0.096491):0.083581):0.006209,((((9:0.131490,(18:0.033575,17:0.019776):0.039718):0.033952,4:0.128604):0.010227,((7:0.115644,(16:0.017078,15:0.034810):0.071375):0.003750,10:0.092383):0.000788):0.031751,((((26:0.017640,27:0.042461):0.077128,28:0.043626):0.017983,29:0.102746):0.001880,(14:0.087212,19:0.101049):0.032312):0.011615):0.020447):0.003335,((21:0.033608,((25:0.004197,(23:0.006791,24:0.008346):0.005357):0. [...]
+   tree rep.12000000 = ((6:0.059208,3:0.143650):0.006973,(2:0.174779,((12:0.061399,11:0.019223):0.050893,((((13:0.108507,(20:0.116383,5:0.129412):0.078993):0.000730,(((22:0.026046,((24:0.003387,23:0.022213):0.005862,25:0.002284):0.018591):0.019148,21:0.034497):0.052543,8:0.089982):0.034706):0.000484,((19:0.080386,14:0.084397):0.023408,((28:0.041347,(27:0.041995,26:0.005390):0.069948):0.010299,29:0.120191):0.012266):0.007242):0.011157,(((10:0.086905,(15:0.048059,16:0.013166):0.054790):0.0 [...]
+   tree rep.12020000 = (6:0.106339,(3:0.154367,((((((5:0.113526,20:0.123402):0.099991,((10:0.124808,(((9:0.136580,(18:0.039612,17:0.024809):0.060239):0.039412,4:0.134277):0.009736,((16:0.029123,15:0.050780):0.042022,7:0.103804):0.009460):0.006059):0.019479,((29:0.118969,(28:0.034053,(26:0.022775,27:0.044063):0.045900):0.030888):0.043547,(14:0.102143,19:0.088491):0.052616):0.005303):0.022100):0.000315,13:0.103121):0.003103,(8:0.091347,(((25:0.006378,(23:0.012087,24:0.011110):0.000924):0.0 [...]
+   tree rep.12040000 = ((6:0.096941,3:0.148429):0.018534,(((11:0.023102,12:0.066363):0.070827,((29:0.111189,(((19:0.075666,14:0.072534):0.036495,((((((18:0.026624,17:0.026320):0.044391,9:0.103525):0.022477,4:0.124405):0.011327,(15:0.043046,16:0.016299):0.053374):0.004253,10:0.105820):0.019265,7:0.087595):0.016531):0.008252,(28:0.043087,(27:0.040131,26:0.022268):0.056766):0.024689):0.000507):0.009585,((8:0.094797,((((24:0.007463,23:0.009458):0.006963,25:0.002848):0.056711,22:0.021770):0.0 [...]
+   tree rep.12060000 = ((6:0.082228,3:0.139728):0.014981,(2:0.196549,((11:0.034417,12:0.079299):0.059503,((((7:0.098255,((((9:0.091635,(18:0.040352,17:0.022614):0.048099):0.031748,4:0.119777):0.029643,(15:0.044261,16:0.018913):0.051407):0.004581,10:0.099353):0.008389):0.035951,(14:0.079823,19:0.077390):0.025527):0.007820,((28:0.046056,(27:0.034099,26:0.012181):0.074684):0.028161,29:0.091204):0.004047):0.032620,(13:0.126083,((8:0.083114,(((25:0.009741,(24:0.006859,23:0.012663):0.007574):0 [...]
+   tree rep.12080000 = (((2:0.229712,(((((21:0.054327,(22:0.034684,(25:0.008627,(24:0.014963,23:0.016400):0.006846):0.024769):0.012768):0.024057,8:0.133748):0.026862,((5:0.082135,20:0.072816):0.077519,13:0.077890):0.009948):0.003122,(((28:0.028448,(26:0.038971,27:0.025083):0.077567):0.043866,29:0.092674):0.022270,((14:0.085043,19:0.086156):0.028364,((((9:0.110387,(18:0.044099,17:0.023256):0.041054):0.021978,4:0.125886):0.009878,((16:0.028662,15:0.028233):0.058675,10:0.093800):0.016375):0 [...]
+   tree rep.12100000 = ((((11:0.021594,12:0.078759):0.054127,((13:0.097928,((14:0.112231,19:0.087657):0.020008,(((20:0.087621,5:0.104680):0.117477,(10:0.119687,((7:0.112183,(15:0.028501,16:0.023767):0.037286):0.009811,((9:0.101551,(18:0.045507,17:0.018497):0.046110):0.017114,4:0.109785):0.024478):0.009129):0.020315):0.008338,(29:0.076143,(28:0.049853,(27:0.032914,26:0.021123):0.066890):0.019310):0.011263):0.002896):0.003306):0.001755,(8:0.085883,((22:0.024217,((24:0.013666,23:0.012252):0 [...]
+   tree rep.12120000 = ((3:0.130694,6:0.064931):0.005073,(2:0.210323,((12:0.083149,11:0.044622):0.036878,((((29:0.100611,(28:0.057266,(27:0.051067,26:0.009820):0.068869):0.027404):0.003913,(((((15:0.029953,16:0.027195):0.064599,10:0.099111):0.011705,(4:0.141650,((18:0.040139,17:0.015299):0.039165,9:0.117899):0.023430):0.018418):0.015490,7:0.085343):0.022381,(19:0.088553,14:0.079280):0.035281):0.022939):0.020169,(13:0.099045,(20:0.079795,5:0.079745):0.061107):0.002026):0.000412,(8:0.09225 [...]
+   tree rep.12140000 = (6:0.133895,(3:0.090498,(2:0.147583,((((29:0.084438,((26:0.034562,27:0.028421):0.075841,28:0.053160):0.005951):0.011739,((14:0.062201,19:0.067758):0.021695,(7:0.096650,(10:0.084765,((4:0.125255,(9:0.072786,(18:0.032989,17:0.027649):0.038083):0.028840):0.016778,(16:0.034617,15:0.040096):0.050723):0.004498):0.013050):0.017501):0.001099):0.004233,(((5:0.121321,20:0.085507):0.067863,13:0.080844):0.012595,(((22:0.040041,(25:0.007101,(23:0.014846,24:0.007154):0.002510):0 [...]
+   tree rep.12160000 = (((2:0.237367,((12:0.099834,11:0.006596):0.080046,(((8:0.071428,(21:0.046028,(22:0.024649,(25:0.008977,(24:0.010637,23:0.008343):0.002718):0.023833):0.044371):0.056388):0.078442,((5:0.106675,20:0.156612):0.093626,13:0.108666):0.010572):0.002927,(((((4:0.133251,(9:0.127330,(18:0.055751,17:0.013173):0.039653):0.025735):0.011075,10:0.101565):0.005243,((16:0.025925,15:0.020817):0.051759,7:0.137069):0.017720):0.026379,(14:0.080447,19:0.105821):0.040028):0.004207,((28:0. [...]
+   tree rep.12180000 = ((6:0.078543,(((12:0.052524,11:0.042131):0.033082,((13:0.095121,(((22:0.033255,((24:0.010192,23:0.016952):0.006572,25:0.011917):0.015582):0.025746,21:0.057253):0.027995,8:0.089530):0.041120):0.007230,(((19:0.065872,14:0.104037):0.017974,(((4:0.159958,((18:0.035459,17:0.022360):0.054886,9:0.115166):0.022629):0.015080,((15:0.041956,16:0.021861):0.054480,10:0.087870):0.004249):0.003919,7:0.116207):0.022354):0.003010,((((27:0.024444,26:0.027334):0.074400,28:0.053460):0 [...]
+   tree rep.12200000 = ((6:0.093925,3:0.142271):0.005157,(((((10:0.103083,7:0.102490):0.017298,((4:0.132304,((18:0.045648,17:0.019396):0.050502,9:0.100642):0.038342):0.007365,(15:0.032834,16:0.017967):0.047751):0.007790):0.023586,((13:0.100208,((20:0.122720,5:0.115418):0.092185,(((22:0.030809,((24:0.011873,23:0.009534):0.004102,25:0.003149):0.017392):0.021151,21:0.035575):0.029577,8:0.108288):0.013242):0.000297):0.009703,((14:0.102219,19:0.074213):0.024822,(29:0.103276,(28:0.036336,(27:0 [...]
+   tree rep.12220000 = (3:0.118047,(6:0.072807,((((((21:0.049951,((25:0.011786,(23:0.007414,24:0.007745):0.005206):0.039358,22:0.031399):0.020327):0.036133,8:0.092346):0.014904,(13:0.097394,(5:0.125785,20:0.134426):0.098901):0.014068):0.001154,((29:0.097531,((26:0.010755,27:0.050420):0.070357,28:0.036830):0.028935):0.014434,((7:0.109947,(((16:0.028878,15:0.043326):0.063527,10:0.085378):0.001493,(4:0.145969,(9:0.132491,(18:0.037836,17:0.017711):0.059207):0.023940):0.005131):0.006045):0.02 [...]
+   tree rep.12240000 = ((3:0.109609,6:0.100838):0.015660,((((((21:0.046951,((25:0.003013,(23:0.017974,24:0.019364):0.007528):0.033080,22:0.029601):0.037450):0.020023,8:0.101339):0.029612,(13:0.078763,(5:0.086422,20:0.111992):0.116134):0.008676):0.015136,((29:0.099597,((26:0.012859,27:0.032583):0.075459,28:0.032887):0.026353):0.014195,(((10:0.097390,((16:0.033328,15:0.033756):0.049806,(4:0.151721,(9:0.105604,(18:0.035901,17:0.013306):0.043682):0.030191):0.023419):0.000533):0.012414,7:0.11 [...]
+   tree rep.12260000 = ((6:0.104616,3:0.112530):0.014140,(2:0.148182,((((20:0.071783,5:0.081855):0.084653,(13:0.112895,(8:0.081948,((22:0.028037,((23:0.034561,24:0.011643):0.008221,25:0.011356):0.027754):0.017456,21:0.041642):0.037687):0.024173):0.004649):0.003546,((((((9:0.202455,(18:0.047412,17:0.019351):0.029517):0.040234,((15:0.043505,16:0.011335):0.055271,4:0.109290):0.000073):0.006363,10:0.101148):0.014844,7:0.104241):0.020537,(29:0.092841,(28:0.038503,(27:0.029732,26:0.023979):0.0 [...]
+   tree rep.12280000 = (((2:0.146311,((11:0.036677,12:0.061325):0.049188,(((((10:0.092534,(7:0.099843,(15:0.045160,16:0.017352):0.041458):0.013612):0.003155,((9:0.122738,(18:0.039830,17:0.011144):0.053970):0.019455,4:0.139163):0.009785):0.028803,(14:0.107857,19:0.068634):0.021212):0.002688,(29:0.099596,((27:0.040939,26:0.010722):0.091007,28:0.032869):0.027340):0.008150):0.016566,((8:0.121205,(((25:0.001572,(24:0.007361,23:0.011886):0.009297):0.023850,22:0.025376):0.020215,21:0.054695):0. [...]
+   tree rep.12300000 = ((6:0.085766,3:0.097677):0.015152,(2:0.186303,((11:0.035465,12:0.080307):0.044366,((((5:0.103700,20:0.095840):0.084677,((22:0.026542,((24:0.009780,23:0.003978):0.011528,25:0.006533):0.034323):0.010075,21:0.058996):0.058533):0.029189,(((14:0.088461,19:0.049816):0.040462,(((((9:0.108220,(18:0.035870,17:0.018090):0.041507):0.029917,4:0.124333):0.032768,10:0.104322):0.001761,(16:0.026660,15:0.034376):0.037375):0.017987,7:0.088523):0.028843):0.014446,(29:0.099747,(28:0. [...]
+   tree rep.12320000 = (6:0.074415,(3:0.135510,((((((20:0.075965,5:0.095363):0.092876,(((((23:0.008579,24:0.011854):0.001106,25:0.009034):0.022622,22:0.037465):0.025831,21:0.050421):0.021736,8:0.082392):0.028197):0.019532,(((28:0.043581,(27:0.043305,26:0.011895):0.043174):0.020008,29:0.084938):0.020771,((10:0.090083,(((15:0.061998,16:0.026144):0.056836,7:0.134921):0.012014,(4:0.139623,((18:0.034775,17:0.015991):0.071103,9:0.102380):0.022160):0.010102):0.012719):0.030666,(19:0.066989,14:0 [...]
+   tree rep.12340000 = ((3:0.132018,6:0.091494):0.000804,(((11:0.031773,12:0.072790):0.031038,(((5:0.116655,20:0.105669):0.136998,(13:0.088713,(8:0.090752,(21:0.054226,((25:0.004405,(23:0.009253,24:0.006463):0.004331):0.034170,22:0.041753):0.013510):0.018396):0.048237):0.017705):0.003860,(((29:0.081874,(28:0.054185,(26:0.042999,27:0.023095):0.075244):0.023093):0.008332,(14:0.082401,19:0.114144):0.019822):0.000224,(4:0.132498,((((16:0.008548,15:0.045992):0.065424,7:0.135649):0.000376,10:0 [...]
+   tree rep.12360000 = ((3:0.118470,6:0.074535):0.006164,(2:0.154213,(((((21:0.069511,(22:0.025455,(25:0.008436,(24:0.010546,23:0.010160):0.016140):0.018938):0.009943):0.021540,8:0.087519):0.007157,((29:0.084758,((27:0.034414,26:0.029201):0.078449,28:0.054360):0.019327):0.009491,((20:0.058496,5:0.143641):0.110286,13:0.063693):0.008902):0.002822):0.004298,((19:0.093414,14:0.080379):0.029973,(((10:0.089826,(15:0.033517,16:0.028270):0.042256):0.001313,7:0.083495):0.002221,(((18:0.039434,17: [...]
+   tree rep.12380000 = ((6:0.092375,3:0.088164):0.007970,(2:0.204134,(((((19:0.077258,14:0.072566):0.042066,((7:0.087619,10:0.100999):0.003417,((15:0.036420,16:0.016239):0.064651,(4:0.126963,((18:0.038513,17:0.016575):0.048764,9:0.111110):0.014341):0.016875):0.004691):0.040343):0.004581,((28:0.033995,(27:0.029841,26:0.023839):0.070937):0.018012,29:0.124655):0.013980):0.020195,(((8:0.068406,((((23:0.008907,24:0.009377):0.012482,25:0.002937):0.023026,22:0.037494):0.014271,21:0.041453):0.02 [...]
+   tree rep.12400000 = (3:0.136912,((2:0.215358,((11:0.029996,12:0.072495):0.052221,(((((7:0.098387,(((4:0.136272,((18:0.022861,17:0.027866):0.063541,9:0.125318):0.020359):0.012977,(16:0.031336,15:0.034056):0.036521):0.003849,10:0.094186):0.008756):0.034609,(14:0.118609,19:0.101129):0.031076):0.008899,(((26:0.023584,27:0.024832):0.096788,28:0.035668):0.027886,29:0.092093):0.011169):0.007183,(13:0.100874,(5:0.130499,20:0.079599):0.065182):0.008408):0.007470,((21:0.046582,(22:0.023012,(25: [...]
+   tree rep.12420000 = ((((((((24:0.013877,23:0.009797):0.002380,25:0.012636):0.016379,(22:0.027577,21:0.055138):0.002266):0.063524,8:0.124187):0.028249,((13:0.097345,(20:0.070874,5:0.101056):0.157017):0.012085,((10:0.094552,((7:0.117307,(15:0.039501,16:0.032114):0.034567):0.013561,(((18:0.046955,17:0.031351):0.040671,9:0.109067):0.028319,4:0.136945):0.005824):0.009488):0.018696,((((27:0.022235,26:0.029018):0.053559,28:0.062218):0.021718,29:0.143091):0.003111,(19:0.086445,14:0.097902):0. [...]
+   tree rep.12440000 = ((2:0.191866,(((((20:0.072502,5:0.107537):0.115069,(8:0.075892,(21:0.033255,(22:0.035629,(23:0.006794,(25:0.009185,24:0.008231):0.001945):0.017708):0.009297):0.051383):0.011688):0.001787,13:0.127293):0.000925,(((19:0.061631,14:0.093889):0.028086,((7:0.091372,(15:0.035377,16:0.026585):0.046716):0.012869,((4:0.116469,((18:0.026684,17:0.026249):0.038706,9:0.082945):0.034275):0.014588,10:0.087060):0.005846):0.024316):0.005386,(29:0.080176,(28:0.049258,(27:0.030911,26:0 [...]
+   tree rep.12460000 = ((6:0.047612,3:0.190952):0.002173,(((12:0.066836,11:0.049904):0.051069,(((21:0.040380,(((24:0.007699,23:0.012559):0.003511,25:0.004799):0.029891,22:0.022391):0.024527):0.028443,8:0.090594):0.025320,((13:0.087310,(5:0.119074,20:0.071663):0.130919):0.000281,((29:0.095574,((26:0.032562,27:0.025561):0.050878,28:0.035204):0.039700):0.009644,(((10:0.090056,((16:0.017590,15:0.043985):0.051442,7:0.087376):0.001448):0.014125,((9:0.125318,(18:0.049334,17:0.023446):0.035399): [...]
+   tree rep.12480000 = ((((((29:0.078112,(28:0.030360,(27:0.037377,26:0.022881):0.097957):0.034639):0.029368,(((10:0.073845,((9:0.090572,(18:0.053139,17:0.007691):0.061704):0.019234,4:0.141071):0.028884):0.001180,(7:0.098595,(15:0.038806,16:0.022263):0.041126):0.001424):0.031725,(19:0.068940,14:0.118499):0.020838):0.012144):0.003660,((8:0.108844,(((25:0.005890,(24:0.012897,23:0.004802):0.003658):0.012325,22:0.042301):0.014089,21:0.059129):0.027137):0.015722,((20:0.114281,5:0.131130):0.08 [...]
+   tree rep.12500000 = ((((11:0.029962,12:0.075824):0.049944,(((((26:0.030919,27:0.027705):0.056616,28:0.039072):0.031578,29:0.097849):0.002907,((19:0.085113,14:0.081148):0.019505,(7:0.079395,(10:0.083867,((16:0.023661,15:0.024304):0.057986,(9:0.098370,((18:0.040186,17:0.009311):0.071448,4:0.122690):0.003175):0.024934):0.006667):0.009563):0.011419):0.013456):0.005383,((8:0.103909,((22:0.029849,(25:0.009684,(23:0.013493,24:0.014348):0.002444):0.015587):0.007691,21:0.046008):0.053779):0.05 [...]
+   tree rep.12520000 = (((((((29:0.129686,((27:0.033230,26:0.036515):0.058598,28:0.067437):0.016706):0.002401,((((9:0.120360,(18:0.032568,17:0.017991):0.030735):0.027036,4:0.136580):0.008692,((7:0.111666,(15:0.043917,16:0.029823):0.043152):0.007778,10:0.104794):0.000271):0.020336,(19:0.051552,14:0.106563):0.043172):0.002043):0.005760,((20:0.127375,5:0.059879):0.064225,13:0.106601):0.007203):0.003139,(8:0.152990,((((24:0.011089,23:0.004471):0.002859,25:0.021408):0.031373,22:0.054538):0.00 [...]
+   tree rep.12540000 = (((((((8:0.094002,(21:0.030297,(22:0.020064,(25:0.015393,(23:0.005795,24:0.021004):0.011961):0.033692):0.018697):0.014067):0.027050,((20:0.119084,5:0.081909):0.084727,13:0.095844):0.012627):0.005722,(((28:0.054888,(27:0.032509,26:0.038259):0.064184):0.036667,29:0.099731):0.012250,(((10:0.096150,7:0.096271):0.011681,((4:0.175602,((18:0.030781,17:0.019973):0.062857,9:0.120233):0.026783):0.008869,(15:0.039416,16:0.027439):0.074369):0.006192):0.017363,(19:0.090015,14:0 [...]
+   tree rep.12560000 = ((((12:0.044065,11:0.061137):0.035622,((((21:0.038058,(22:0.030348,(25:0.006623,(23:0.012782,24:0.011468):0.008026):0.013420):0.017954):0.076959,8:0.107414):0.023147,((20:0.087948,5:0.131163):0.081138,13:0.078880):0.007601):0.009684,((((27:0.031279,26:0.032124):0.078286,28:0.045150):0.016603,29:0.092700):0.005789,((19:0.125723,14:0.095522):0.032054,(((7:0.123964,(15:0.044556,16:0.024548):0.052158):0.002230,10:0.100739):0.002164,(4:0.129112,(9:0.112280,(18:0.044618, [...]
+   tree rep.12580000 = ((3:0.115805,6:0.097989):0.011697,(((11:0.037907,12:0.073759):0.043998,((((19:0.054812,14:0.101794):0.029929,(((4:0.125684,((18:0.051945,17:0.015644):0.058829,9:0.098918):0.031946):0.017590,(7:0.066432,(15:0.036285,16:0.017952):0.058166):0.008492):0.006824,10:0.083687):0.022647):0.007298,((29:0.083624,(28:0.033067,(27:0.029712,26:0.021106):0.063878):0.008019):0.003493,(20:0.093758,5:0.068299):0.061813):0.011289):0.003816,((8:0.063274,(21:0.039070,(((24:0.007938,23: [...]
+   tree rep.12600000 = ((6:0.103884,(((12:0.053459,11:0.038978):0.055562,((8:0.089553,(21:0.035051,(22:0.039178,(25:0.003816,(24:0.017080,23:0.008435):0.003227):0.021460):0.007542):0.044355):0.022769,(((((28:0.024043,(27:0.034543,26:0.015941):0.083927):0.025585,29:0.081977):0.011225,(((((15:0.036223,16:0.030102):0.051218,10:0.073343):0.003126,((9:0.106913,(18:0.045408,17:0.018690):0.036457):0.028581,4:0.104332):0.005019):0.010093,7:0.081158):0.025944,(19:0.085748,14:0.085905):0.034840):0 [...]
+   tree rep.12620000 = (3:0.116468,(6:0.056348,(((11:0.037564,12:0.058181):0.045533,((((7:0.092137,((4:0.153025,((18:0.028624,17:0.026632):0.037614,9:0.149857):0.029196):0.009140,(10:0.101210,(15:0.027864,16:0.036149):0.050875):0.001714):0.025821):0.014701,(19:0.077528,14:0.083764):0.043612):0.004947,(((27:0.031421,26:0.019503):0.062352,28:0.050505):0.018203,29:0.125260):0.005126):0.009057,(((20:0.084023,5:0.094214):0.079245,13:0.110267):0.001832,(8:0.100704,((22:0.025442,((23:0.008065,2 [...]
+   tree rep.12640000 = ((2:0.138253,((11:0.016817,12:0.074127):0.048943,((20:0.076616,5:0.088876):0.070476,(((8:0.074542,(((25:0.006544,(24:0.010511,23:0.013736):0.003681):0.041698,22:0.029195):0.011316,21:0.056518):0.038358):0.031423,13:0.070873):0.012670,(((19:0.086890,14:0.073513):0.029875,(((27:0.038551,26:0.038616):0.072977,28:0.044773):0.029582,29:0.096350):0.013109):0.008982,((10:0.065352,((15:0.039135,16:0.023230):0.061904,(((18:0.062864,17:0.016488):0.047460,9:0.113473):0.023746 [...]
+   tree rep.12660000 = ((6:0.068180,3:0.163562):0.022656,(((12:0.043776,11:0.039442):0.037375,(((((4:0.151435,((18:0.046539,17:0.028113):0.048503,9:0.102232):0.024275):0.000496,(((16:0.024801,15:0.021817):0.041244,7:0.110535):0.014679,10:0.103565):0.008989):0.025854,(14:0.108256,19:0.069847):0.031816):0.008587,((28:0.045547,(26:0.022444,27:0.038241):0.062448):0.031720,29:0.106133):0.013726):0.006435,((((((24:0.014703,23:0.015957):0.008755,25:0.011912):0.017254,22:0.032223):0.011816,21:0. [...]
+   tree rep.12680000 = ((6:0.044286,(2:0.202380,((((20:0.101898,5:0.108710):0.090830,((8:0.093497,(21:0.034855,(22:0.032762,((23:0.007085,24:0.011600):0.005477,25:0.012171):0.036251):0.024466):0.016263):0.024986,13:0.086560):0.006578):0.015652,(((((18:0.049128,17:0.019587):0.080980,9:0.099022):0.009034,4:0.112189):0.017240,(10:0.093024,((15:0.040099,16:0.027490):0.066764,7:0.067782):0.014420):0.009721):0.025073,((29:0.094158,((27:0.026138,26:0.029958):0.062164,28:0.037304):0.021812):0.02 [...]
+   tree rep.12700000 = ((((12:0.085320,11:0.019735):0.057507,(((5:0.123478,20:0.090813):0.100535,(((29:0.062345,((26:0.012032,27:0.038952):0.052168,28:0.059956):0.037819):0.017800,(14:0.101716,19:0.067434):0.032789):0.008365,((((22:0.034462,21:0.061122):0.002066,((23:0.014681,24:0.005480):0.005322,25:0.014793):0.015657):0.062367,8:0.073697):0.020445,13:0.077054):0.014114):0.002971):0.007420,((((18:0.028336,17:0.017266):0.036761,9:0.120063):0.029700,4:0.092806):0.017410,(10:0.064892,(7:0. [...]
+   tree rep.12720000 = ((3:0.113668,(((12:0.053184,11:0.042333):0.066536,(((5:0.120277,20:0.133027):0.093592,((((26:0.044196,27:0.018570):0.079055,28:0.047372):0.026862,29:0.113411):0.006692,((((4:0.129548,((18:0.044452,17:0.017436):0.048766,9:0.104962):0.029496):0.016899,((16:0.019240,15:0.033621):0.029352,7:0.107377):0.019106):0.006396,10:0.082115):0.047702,(14:0.098254,19:0.076490):0.026630):0.003171):0.000598):0.005009,((((22:0.042873,(25:0.004394,(23:0.020614,24:0.011635):0.006067): [...]
+   tree rep.12740000 = (6:0.071111,(((((((20:0.095505,5:0.120927):0.104154,(8:0.089995,((((23:0.006490,24:0.010095):0.007912,25:0.005472):0.016967,22:0.054090):0.009506,21:0.032657):0.051209):0.024438):0.001265,13:0.121245):0.003855,(((29:0.090568,((27:0.033122,26:0.034567):0.075146,28:0.052841):0.018713):0.009857,((10:0.106316,((4:0.137032,((18:0.033745,17:0.015199):0.046241,9:0.139656):0.037556):0.017804,(15:0.046906,16:0.018591):0.072725):0.000240):0.009811,7:0.103506):0.030446):0.000 [...]
+   tree rep.12760000 = (6:0.100603,(3:0.129993,(2:0.127004,(((((28:0.051788,(26:0.024553,27:0.040127):0.039884):0.032773,29:0.118095):0.026908,((19:0.084211,14:0.102049):0.015653,(10:0.082863,((7:0.084019,(16:0.023143,15:0.027077):0.043639):0.008337,(((18:0.031181,17:0.022191):0.045535,9:0.089344):0.026427,4:0.131169):0.010411):0.006548):0.024250):0.002613):0.009106,(((8:0.092798,(21:0.041802,(22:0.019337,(25:0.003829,(23:0.006288,24:0.010409):0.007904):0.020937):0.006277):0.036458):0.03 [...]
+   tree rep.12780000 = ((2:0.204674,((((10:0.083151,(((16:0.026659,15:0.028892):0.030631,7:0.112605):0.015214,(((18:0.041073,17:0.029095):0.051343,9:0.123131):0.021723,4:0.124339):0.018099):0.003936):0.022237,(((28:0.057739,(26:0.029268,27:0.038212):0.088039):0.039205,29:0.100794):0.017878,(19:0.091121,14:0.115544):0.014524):0.003774):0.001069,(13:0.111992,((5:0.071939,20:0.131643):0.049386,(8:0.074102,((22:0.029034,(25:0.011469,(24:0.012594,23:0.006928):0.002037):0.028404):0.024860,21:0 [...]
+   tree rep.12800000 = ((3:0.133547,6:0.086550):0.012245,(((11:0.014702,12:0.065833):0.072175,((((28:0.045992,(27:0.049190,26:0.034426):0.084513):0.017239,29:0.088963):0.001577,((19:0.089115,14:0.082428):0.042947,(10:0.078900,(4:0.111476,((7:0.111307,(15:0.033420,16:0.029951):0.053574):0.001262,((18:0.039649,17:0.017290):0.039989,9:0.086821):0.034834):0.006464):0.008464):0.015003):0.002840):0.010140,((20:0.099057,5:0.098300):0.084354,((((25:0.003932,(24:0.010231,23:0.017129):0.008680):0. [...]
+   tree rep.12820000 = ((6:0.093206,(((11:0.031103,12:0.066511):0.042030,((((29:0.077472,(28:0.041891,(27:0.042643,26:0.016324):0.079140):0.016014):0.018198,(19:0.068971,14:0.102844):0.047531):0.006732,(13:0.089298,((20:0.073097,5:0.132496):0.083211,(((22:0.032933,((24:0.008076,23:0.010747):0.017391,25:0.003701):0.031235):0.006649,21:0.057938):0.040603,8:0.060920):0.019469):0.005504):0.013222):0.008534,((10:0.104331,(((9:0.105277,(18:0.033782,17:0.021448):0.040878):0.031889,4:0.158786):0 [...]
+   tree rep.12840000 = (((((11:0.022186,12:0.060206):0.037866,((((29:0.075787,(28:0.035085,(26:0.033812,27:0.031968):0.077631):0.019551):0.015313,((7:0.086237,(10:0.080280,((16:0.019599,15:0.032691):0.055024,(4:0.150678,((18:0.018399,17:0.041972):0.081869,9:0.136986):0.026416):0.008961):0.000186):0.009223):0.021059,(19:0.081186,14:0.057118):0.036007):0.003459):0.017679,(5:0.112177,20:0.106989):0.108268):0.003002,(13:0.103260,((21:0.053203,((25:0.003607,(23:0.005600,24:0.018096):0.010363) [...]
+   tree rep.12860000 = (2:0.223431,((6:0.094550,3:0.128806):0.026893,((12:0.091242,11:0.030424):0.065490,(((5:0.089886,20:0.116855):0.070059,(((28:0.085250,(26:0.031335,27:0.036787):0.043331):0.013627,29:0.102552):0.008667,((10:0.082359,((4:0.132020,(9:0.107872,(18:0.037764,17:0.023516):0.031942):0.041910):0.011428,(7:0.115737,(16:0.019895,15:0.044509):0.035261):0.007379):0.001526):0.030586,(19:0.099761,14:0.103373):0.057945):0.002086):0.005246):0.003169,((8:0.130255,((((23:0.016941,24:0 [...]
+   tree rep.12880000 = ((((12:0.075252,11:0.024445):0.043933,(((13:0.073637,(5:0.088132,20:0.090503):0.085586):0.003133,((7:0.086973,((10:0.080811,(16:0.017425,15:0.035579):0.055382):0.000362,(4:0.143364,(9:0.093394,(18:0.043829,17:0.018701):0.037168):0.021491):0.007129):0.012077):0.021708,((19:0.098511,14:0.077628):0.020608,((28:0.039016,(26:0.022291,27:0.048842):0.060527):0.023780,29:0.098535):0.016950):0.002868):0.020681):0.004223,(8:0.093622,(21:0.028934,((25:0.002581,(24:0.003710,23 [...]
+   tree rep.12900000 = ((2:0.198379,(((8:0.130465,(21:0.040883,(22:0.028831,((24:0.018605,23:0.013866):0.005991,25:0.005084):0.015310):0.014323):0.034006):0.050331,((13:0.078883,(5:0.083642,20:0.107364):0.094808):0.005016,((19:0.094721,14:0.113993):0.006288,((29:0.097639,((26:0.023795,27:0.043012):0.093574,28:0.034604):0.013984):0.021889,(10:0.084747,(((16:0.024793,15:0.040258):0.033563,7:0.138326):0.013373,(((18:0.048080,17:0.017945):0.046599,9:0.088178):0.022970,4:0.098274):0.022758):0 [...]
+   tree rep.12920000 = (3:0.115793,((2:0.132746,(((((21:0.069561,(22:0.030356,((23:0.006952,24:0.012110):0.008368,25:0.002085):0.023476):0.002727):0.027519,8:0.090102):0.026733,13:0.094463):0.012716,((20:0.089365,5:0.081860):0.087933,(((7:0.087702,((15:0.029685,16:0.026889):0.051884,(10:0.068046,(((18:0.027902,17:0.024298):0.039682,9:0.088445):0.021171,4:0.136260):0.014925):0.000987):0.013358):0.016134,(19:0.047544,14:0.072135):0.033649):0.003796,(((27:0.034023,26:0.030575):0.070058,28:0 [...]
+   tree rep.12940000 = ((((((((19:0.082024,14:0.086303):0.026506,((10:0.076012,((4:0.140522,((18:0.054255,17:0.025113):0.036862,9:0.111334):0.022652):0.015017,(15:0.053819,16:0.017053):0.050196):0.001906):0.005118,7:0.082739):0.024412):0.008440,((28:0.058119,(27:0.029397,26:0.032138):0.093688):0.032729,29:0.118927):0.002117):0.004137,((8:0.085377,((((24:0.016601,23:0.016071):0.008144,25:0.008068):0.023271,22:0.031915):0.004708,21:0.025314):0.032913):0.027947,(13:0.074291,(20:0.118724,5:0 [...]
+   tree rep.12960000 = ((3:0.124328,6:0.089792):0.008795,(2:0.173113,(((((29:0.090957,(28:0.027573,(26:0.009456,27:0.045307):0.084395):0.033637):0.011388,(19:0.083502,14:0.099023):0.041887):0.008356,(((4:0.092991,(9:0.095529,(18:0.036837,17:0.017574):0.055150):0.023026):0.000692,((16:0.027770,15:0.029294):0.042412,10:0.112959):0.018860):0.005114,7:0.138711):0.023472):0.002950,((5:0.082843,20:0.086486):0.082052,(((21:0.055292,(22:0.024709,(25:0.007370,(24:0.008026,23:0.017563):0.004731):0 [...]
+   tree rep.12980000 = ((2:0.193393,((12:0.054898,11:0.039572):0.036717,((((29:0.122654,(28:0.039833,(27:0.029351,26:0.025770):0.060108):0.034782):0.024580,((((7:0.151927,(15:0.043115,16:0.023173):0.058276):0.005628,10:0.114701):0.004239,(4:0.120029,(9:0.115278,(18:0.030592,17:0.015033):0.059427):0.021755):0.011369):0.008920,(19:0.073697,14:0.106912):0.022255):0.007327):0.005859,((((25:0.003087,(23:0.013002,24:0.009482):0.009477):0.027167,22:0.041384):0.020035,21:0.045415):0.047165,8:0.1 [...]
+   tree rep.13000000 = ((3:0.121524,((((8:0.091031,(((25:0.001076,(23:0.007493,24:0.010641):0.009200):0.025936,22:0.023385):0.008636,21:0.059333):0.021573):0.022726,((((26:0.021901,27:0.045532):0.096725,28:0.047924):0.046767,(((14:0.111518,19:0.062836):0.027266,(7:0.106489,(10:0.103801,((16:0.021027,15:0.054429):0.055805,(((18:0.029882,17:0.026816):0.067642,9:0.081685):0.013664,4:0.108848):0.006312):0.005761):0.015307):0.020632):0.006471,29:0.125014):0.000096):0.008842,(13:0.097477,(5:0. [...]
+   tree rep.13020000 = (3:0.125018,(((((((5:0.124508,20:0.102897):0.069661,13:0.092650):0.004077,((((26:0.032075,27:0.029894):0.061556,28:0.041545):0.021383,29:0.101138):0.011203,((14:0.107680,19:0.087693):0.021266,(10:0.088222,(((16:0.018226,15:0.031323):0.043502,7:0.086045):0.006277,(((18:0.039127,17:0.037617):0.040628,9:0.139220):0.032285,4:0.116791):0.019056):0.005118):0.022089):0.003705):0.022307):0.002078,((21:0.072236,(((24:0.022201,23:0.010100):0.002511,25:0.005935):0.034163,22:0 [...]
+   tree rep.13040000 = ((2:0.176395,(((((((((15:0.031058,16:0.021492):0.045213,7:0.147497):0.012907,10:0.101414):0.003785,(((18:0.052719,17:0.013859):0.047632,9:0.100223):0.054938,4:0.141706):0.009303):0.018886,((27:0.055229,26:0.026687):0.071067,28:0.052581):0.023949):0.005779,(29:0.083641,(19:0.064742,14:0.088237):0.055823):0.011803):0.013722,(13:0.119959,(20:0.099502,5:0.097688):0.114855):0.000453):0.002748,(8:0.078957,((22:0.048515,(25:0.006073,(24:0.011398,23:0.007786):0.003719):0.0 [...]
+   tree rep.13060000 = (6:0.060239,(3:0.140832,((((5:0.050892,20:0.097335):0.080633,(((((7:0.099484,(16:0.028012,15:0.046817):0.038288):0.022335,((((18:0.044244,17:0.026424):0.036218,9:0.107244):0.011389,4:0.133208):0.013691,10:0.092381):0.002790):0.025377,(14:0.097520,19:0.087600):0.011564):0.003840,(((26:0.033461,27:0.030253):0.057231,28:0.041807):0.009145,29:0.125054):0.014930):0.007650,(13:0.084083,((21:0.037475,((25:0.008059,(24:0.006166,23:0.017952):0.002313):0.022286,22:0.028832): [...]
+   tree rep.13080000 = (3:0.138924,(6:0.074729,(((((29:0.078648,((27:0.039848,26:0.015185):0.046174,28:0.043898):0.026599):0.003842,((14:0.085534,19:0.085900):0.018334,(7:0.111343,(((9:0.098503,(18:0.033114,17:0.011978):0.042496):0.024481,4:0.125950):0.007789,((15:0.024146,16:0.027931):0.054635,10:0.100679):0.015922):0.014597):0.008222):0.006182):0.018262,((8:0.083704,(21:0.027542,((25:0.005846,(23:0.012415,24:0.003945):0.004570):0.016384,22:0.028564):0.018254):0.019090):0.045303,((20:0. [...]
+   tree rep.13100000 = ((6:0.107871,(((((((26:0.029693,27:0.027973):0.055214,28:0.075567):0.017121,29:0.101090):0.016568,((10:0.078331,((4:0.151005,((18:0.030757,17:0.018015):0.043688,9:0.094457):0.039264):0.012192,(7:0.094233,(16:0.013619,15:0.029011):0.064448):0.021464):0.005331):0.021222,(14:0.074688,19:0.066935):0.026304):0.007921):0.001729,((13:0.083097,(5:0.095862,20:0.062212):0.069946):0.004852,((21:0.032632,(22:0.033970,(25:0.003365,(23:0.012846,24:0.016139):0.004898):0.023804):0 [...]
+   tree rep.13120000 = ((3:0.141386,(((((((29:0.092127,((27:0.037768,26:0.036366):0.061510,28:0.068223):0.015819):0.006315,((7:0.104876,(10:0.069603,(15:0.043192,16:0.016854):0.065091):0.003952):0.007513,(4:0.110342,((18:0.049202,17:0.018886):0.035135,9:0.125558):0.020561):0.010377):0.024866):0.003010,(14:0.099511,19:0.088890):0.026351):0.008033,(13:0.091344,(8:0.090424,((22:0.043513,((23:0.022979,24:0.011590):0.007705,25:0.005623):0.014967):0.022676,21:0.042105):0.042076):0.016786):0.00 [...]
+   tree rep.13140000 = ((((((14:0.093912,19:0.097478):0.019675,((7:0.106214,((4:0.121603,((18:0.033052,17:0.018608):0.049497,9:0.087943):0.043756):0.009028,((15:0.029088,16:0.023962):0.048704,10:0.116262):0.009002):0.026558):0.009954,(((27:0.035473,26:0.017222):0.052103,28:0.049818):0.013014,29:0.068812):0.016832):0.002575):0.002043,((8:0.086848,((22:0.026276,((24:0.009324,23:0.009990):0.004092,25:0.002441):0.039421):0.016382,21:0.031675):0.043881):0.027468,((20:0.100491,5:0.078810):0.09 [...]
+   tree rep.13160000 = ((6:0.059597,3:0.138239):0.010451,(((((20:0.067376,5:0.143821):0.126116,(8:0.100089,((22:0.034040,((24:0.013555,23:0.007426):0.012912,25:0.007668):0.035289):0.014395,21:0.049478):0.051391):0.016764):0.001162,(13:0.103394,(((14:0.072789,19:0.093961):0.025840,(((7:0.101083,(15:0.069282,16:0.017798):0.058937):0.018127,(4:0.157565,((18:0.031177,17:0.019221):0.037910,9:0.103635):0.014759):0.013695):0.010338,10:0.113696):0.025530):0.005488,(((27:0.026149,26:0.043701):0.0 [...]
+   tree rep.13180000 = (6:0.111364,(3:0.173360,(2:0.215781,(((((20:0.073141,5:0.136214):0.100012,13:0.088322):0.007776,(((19:0.082838,14:0.090402):0.020116,(10:0.110914,(((15:0.033368,16:0.021986):0.078613,7:0.104175):0.020428,(((18:0.052915,17:0.017390):0.055998,9:0.114095):0.028930,4:0.164428):0.010703):0.002868):0.016716):0.012088,(29:0.116781,((27:0.026295,26:0.036366):0.085313,28:0.041227):0.015128):0.005487):0.011129):0.001338,(8:0.115787,(((25:0.008031,(24:0.009593,23:0.014084):0. [...]
+   tree rep.13200000 = ((3:0.167356,6:0.114928):0.007504,(((11:0.037547,12:0.062921):0.032883,(((8:0.125936,((22:0.041629,(25:0.006843,(23:0.005934,24:0.007146):0.018370):0.011882):0.025795,21:0.052842):0.041677):0.046101,(13:0.090037,(20:0.088445,5:0.108815):0.084791):0.001966):0.016740,(((28:0.037342,(27:0.048991,26:0.019410):0.070128):0.040167,((14:0.110076,19:0.102468):0.044142,(((7:0.094328,(15:0.037964,16:0.025688):0.039572):0.013716,((9:0.115557,(18:0.042341,17:0.021321):0.072426) [...]
+   tree rep.13220000 = ((6:0.091398,3:0.095214):0.004381,((((13:0.072309,(5:0.138682,20:0.070228):0.059653):0.001627,((((28:0.040860,(26:0.011203,27:0.045034):0.087248):0.030839,29:0.085753):0.024191,(8:0.109429,((22:0.049215,((24:0.012975,23:0.004022):0.006305,25:0.003115):0.025958):0.031315,21:0.039959):0.035723):0.013696):0.011444,((19:0.088895,14:0.086923):0.050461,(((4:0.115115,(9:0.099395,(18:0.052415,17:0.018611):0.037624):0.017162):0.022040,((16:0.023817,15:0.038156):0.048914,7:0 [...]
+   tree rep.13240000 = ((6:0.089562,3:0.095630):0.021451,(2:0.134938,((((((((15:0.061638,16:0.025620):0.066853,(((18:0.030508,17:0.019930):0.032625,9:0.099374):0.028308,10:0.089052):0.007697):0.008007,4:0.131739):0.012239,7:0.073563):0.014515,(14:0.075335,19:0.048628):0.027091):0.012509,(((27:0.029927,26:0.020828):0.070384,28:0.049803):0.015820,29:0.113377):0.018265):0.013569,((13:0.105399,(20:0.106387,5:0.074816):0.061722):0.003911,(8:0.093375,((22:0.034693,((23:0.006003,24:0.007591):0. [...]
+   tree rep.13260000 = ((2:0.280570,((12:0.051553,11:0.028100):0.049392,(((8:0.101407,((((23:0.010846,24:0.013607):0.009095,25:0.004437):0.012406,22:0.024684):0.007910,21:0.040003):0.027883):0.021890,((((((((18:0.048166,17:0.016963):0.022893,9:0.097902):0.038834,4:0.099424):0.012908,(16:0.017881,15:0.018832):0.040246):0.000912,10:0.091448):0.012150,7:0.091231):0.015327,(14:0.088546,19:0.057710):0.037854):0.011604,(29:0.081843,((26:0.023999,27:0.021614):0.077649,28:0.029446):0.019573):0.0 [...]
+   tree rep.13280000 = ((((12:0.062198,11:0.030550):0.052174,((13:0.107968,((20:0.093285,5:0.090312):0.093190,(8:0.097446,((((23:0.007871,24:0.017531):0.005213,25:0.001287):0.028105,21:0.054441):0.001451,22:0.032899):0.059082):0.014269):0.006566):0.005910,(((19:0.092942,14:0.100273):0.031952,(((7:0.064294,((15:0.055635,16:0.017513):0.055903,(9:0.132394,(18:0.021619,17:0.024011):0.043124):0.021582):0.005795):0.011027,10:0.079931):0.003297,4:0.117278):0.026419):0.009847,(29:0.076861,((27:0 [...]
+   tree rep.13300000 = (3:0.113247,(6:0.087040,(2:0.228612,((12:0.066853,11:0.035348):0.059004,((8:0.072084,(21:0.047505,(22:0.029108,((23:0.031107,24:0.014841):0.003202,25:0.007911):0.019047):0.008763):0.055956):0.060350,(13:0.083260,((20:0.092803,5:0.111486):0.106835,(((((9:0.126204,(18:0.039894,17:0.016338):0.028759):0.029870,4:0.116705):0.012482,((7:0.097233,(15:0.038920,16:0.028481):0.055265):0.012402,10:0.123212):0.001963):0.036662,(29:0.091804,(19:0.091273,14:0.108474):0.039349):0 [...]
+   tree rep.13320000 = (6:0.117688,(3:0.136847,(((12:0.043633,11:0.038271):0.059883,(((8:0.107245,(21:0.032921,(22:0.038105,(25:0.008926,(23:0.011026,24:0.004448):0.006248):0.034657):0.014452):0.019401):0.025491,((5:0.102530,20:0.139529):0.084948,13:0.069691):0.014278):0.014055,(((14:0.075333,19:0.080333):0.032386,((28:0.057639,(26:0.030695,27:0.024972):0.066557):0.028661,29:0.076890):0.029309):0.002609,((10:0.106646,(7:0.106435,(16:0.024814,15:0.038192):0.038140):0.011747):0.005597,(4:0 [...]
+   tree rep.13340000 = (6:0.092372,((2:0.150636,((12:0.068281,11:0.027016):0.061098,(((5:0.109643,20:0.100064):0.103067,((((((24:0.007087,23:0.009042):0.002829,25:0.004988):0.018776,22:0.032368):0.023434,21:0.026259):0.034939,8:0.090363):0.013217,13:0.085773):0.007706):0.016065,(((14:0.101937,19:0.081866):0.024884,(29:0.083329,(28:0.049572,(26:0.019944,27:0.042377):0.058150):0.018525):0.008413):0.008337,((4:0.089020,((18:0.032889,17:0.025812):0.049617,9:0.091748):0.013895):0.010748,(10:0 [...]
+   tree rep.13360000 = ((6:0.081300,3:0.123880):0.014558,(2:0.149695,((((29:0.120342,(28:0.062719,(26:0.026391,27:0.041343):0.080067):0.014632):0.019655,((19:0.061768,14:0.120156):0.054347,(7:0.091165,((10:0.086109,(16:0.028469,15:0.041282):0.064607):0.003493,(4:0.107889,(9:0.139783,(18:0.048930,17:0.021579):0.048444):0.022437):0.027595):0.009935):0.023509):0.003695):0.009932,((8:0.077383,((((23:0.015870,24:0.023773):0.002676,25:0.007581):0.028615,22:0.035153):0.012259,21:0.083732):0.038 [...]
+   tree rep.13380000 = ((((((13:0.101806,((5:0.146417,20:0.118156):0.121303,((22:0.029764,((24:0.003226,23:0.028607):0.016007,25:0.008671):0.025677):0.005800,21:0.051230):0.020237):0.018093):0.038211,8:0.127548):0.010249,(((19:0.093472,14:0.070667):0.053497,(10:0.105129,(((9:0.124302,(18:0.057792,17:0.017163):0.057226):0.010086,4:0.128420):0.027204,(7:0.125094,(16:0.013185,15:0.046001):0.036473):0.005316):0.015794):0.033988):0.013694,(29:0.100363,(28:0.057183,(26:0.034279,27:0.051782):0. [...]
+   tree rep.13400000 = ((2:0.170020,((((19:0.090190,14:0.114256):0.035701,((((7:0.081644,10:0.108643):0.007937,(15:0.025113,16:0.024515):0.039308):0.003439,((9:0.096287,(18:0.046882,17:0.018443):0.032562):0.028552,4:0.133686):0.008188):0.022234,(29:0.074656,(28:0.024523,(27:0.037503,26:0.015838):0.061581):0.012113):0.019639):0.005133):0.004120,((13:0.071802,(20:0.086117,5:0.102000):0.072301):0.008809,(8:0.075005,(((25:0.009648,(24:0.013942,23:0.009355):0.002485):0.038159,22:0.016860):0.0 [...]
+   tree rep.13420000 = ((6:0.062313,3:0.127960):0.022823,(2:0.143688,((((8:0.090809,(21:0.033797,(22:0.027282,((23:0.010002,24:0.011770):0.005674,25:0.011489):0.018213):0.011188):0.046747):0.018481,(13:0.104105,(20:0.110965,5:0.100370):0.098296):0.013524):0.003178,(((((((9:0.087268,(18:0.034480,17:0.013725):0.045949):0.018249,4:0.120504):0.013201,(15:0.040567,16:0.023979):0.049001):0.003179,10:0.089962):0.008803,7:0.101852):0.029450,(((27:0.018430,26:0.020713):0.063862,28:0.056341):0.021 [...]
+   tree rep.13440000 = (6:0.112734,((2:0.183460,(((((((21:0.059776,22:0.030112):0.018093,((24:0.015929,23:0.008711):0.006056,25:0.021455):0.029936):0.040684,(5:0.148105,20:0.101583):0.126766):0.046291,(((26:0.054552,27:0.017893):0.087854,28:0.044685):0.013686,29:0.114844):0.024681):0.001141,((19:0.093127,14:0.111343):0.034805,(8:0.132051,13:0.122385):0.020403):0.010857):0.014605,(((4:0.189070,(7:0.107432,(16:0.026676,15:0.036446):0.039176):0.036298):0.020844,((18:0.054908,17:0.016166):0. [...]
+   tree rep.13460000 = ((2:0.154274,(((((((25:0.010567,(24:0.013083,23:0.007272):0.013434):0.023270,22:0.026459):0.005394,21:0.061653):0.021055,8:0.086311):0.027307,13:0.076979):0.006104,((5:0.056493,20:0.073819):0.064554,(((14:0.096478,19:0.070589):0.036431,(10:0.082158,((4:0.098906,((18:0.023982,17:0.019026):0.035344,9:0.083107):0.027662):0.009979,(7:0.130470,(16:0.029517,15:0.045485):0.045306):0.007171):0.009789):0.032095):0.001685,(29:0.078789,((26:0.031522,27:0.037605):0.058484,28:0 [...]
+   tree rep.13480000 = ((3:0.106959,(2:0.152983,((((29:0.086235,((26:0.023875,27:0.035338):0.080952,28:0.046746):0.021693):0.022487,((19:0.067561,14:0.077446):0.018060,((4:0.117294,((18:0.033365,17:0.034329):0.046013,9:0.088972):0.026707):0.015975,(10:0.080101,(7:0.114089,(16:0.014187,15:0.059249):0.039785):0.014861):0.000475):0.034958):0.015580):0.004839,((8:0.092155,(21:0.045924,(22:0.037078,((23:0.004304,24:0.006634):0.002120,25:0.007714):0.024595):0.042036):0.027782):0.032385,(13:0.0 [...]
+   tree rep.13500000 = ((3:0.142434,6:0.094577):0.015010,(((((((26:0.028682,27:0.024656):0.062259,28:0.092990):0.005617,29:0.092581):0.015885,(((4:0.117573,((18:0.030706,17:0.023112):0.042168,9:0.103899):0.028031):0.020328,(10:0.102696,(7:0.095814,(16:0.028007,15:0.043259):0.053201):0.000784):0.003042):0.029415,(19:0.073423,14:0.107434):0.007091):0.015410):0.014196,(((5:0.103598,20:0.075239):0.064968,(8:0.075705,((22:0.050079,((24:0.007658,23:0.015958):0.001366,25:0.007921):0.025865):0.0 [...]
+   tree rep.13520000 = ((6:0.075706,3:0.180204):0.030337,(((12:0.065318,11:0.037045):0.048730,((((14:0.081130,19:0.079988):0.029695,((10:0.088288,((16:0.016452,15:0.034031):0.084964,7:0.094360):0.003097):0.005127,((9:0.098206,(18:0.040853,17:0.011852):0.054926):0.029798,4:0.129146):0.014687):0.021968):0.002438,((28:0.071710,(26:0.034448,27:0.027936):0.078809):0.031256,29:0.100923):0.014936):0.010814,(((5:0.084838,20:0.080548):0.090865,13:0.081898):0.002867,((21:0.056056,(22:0.030964,((23 [...]
+   tree rep.13540000 = ((((12:0.051060,11:0.036807):0.046768,((13:0.120839,(5:0.131647,20:0.073581):0.080643):0.012319,(((((26:0.032969,27:0.036054):0.052329,28:0.057090):0.022063,29:0.114494):0.022399,(((((16:0.021227,15:0.040674):0.063380,7:0.093252):0.001734,(4:0.122964,(9:0.110437,(18:0.044148,17:0.026478):0.061420):0.028937):0.029001):0.002287,10:0.118120):0.031545,(14:0.092417,19:0.083827):0.023609):0.011010):0.013203,(8:0.101480,(21:0.046938,(22:0.028223,(25:0.013613,(23:0.010460, [...]
+   tree rep.13560000 = ((((12:0.068540,11:0.023421):0.055242,(8:0.127040,((((10:0.094509,((16:0.027455,15:0.034072):0.037239,7:0.110941):0.004417):0.009811,(4:0.133792,(9:0.092569,(18:0.031933,17:0.016687):0.062473):0.022050):0.007327):0.032402,((((26:0.028755,27:0.031334):0.068704,28:0.038735):0.030050,29:0.097537):0.002976,(14:0.062337,19:0.081093):0.043783):0.006907):0.005775,(13:0.099749,((5:0.123039,20:0.095223):0.054782,((((23:0.006532,24:0.015554):0.004170,25:0.004705):0.024756,22 [...]
+   tree rep.13580000 = ((6:0.068400,3:0.120971):0.004201,(2:0.184454,((((((19:0.082105,14:0.106949):0.021538,29:0.083188):0.005108,((28:0.051544,(27:0.028964,26:0.010488):0.052694):0.032556,(10:0.088971,(((15:0.036346,16:0.015677):0.044984,7:0.103635):0.000093,(((18:0.030823,17:0.025587):0.046316,9:0.114874):0.036571,4:0.120702):0.008579):0.011246):0.013796):0.010087):0.013286,((21:0.046539,((25:0.010226,(24:0.006504,23:0.004322):0.007405):0.027743,22:0.023967):0.010837):0.035970,8:0.082 [...]
+   tree rep.13600000 = (((((13:0.086193,(((((27:0.020343,26:0.031031):0.053781,28:0.059505):0.022980,29:0.081804):0.021769,((19:0.086151,14:0.068892):0.023325,(((7:0.102380,(15:0.036923,16:0.034666):0.043260):0.003214,(4:0.120586,((18:0.041097,17:0.019102):0.035567,9:0.103633):0.037319):0.012122):0.012383,10:0.097145):0.035998):0.000723):0.013683,(8:0.098307,((((23:0.018107,24:0.010534):0.007034,25:0.005388):0.033834,22:0.036070):0.010305,21:0.046581):0.029562):0.020302):0.002680):0.0022 [...]
+   tree rep.13620000 = (((((((((4:0.117647,((18:0.034180,17:0.023425):0.033338,9:0.125778):0.017591):0.025576,((15:0.046579,16:0.017950):0.054145,10:0.074901):0.009764):0.011507,7:0.092180):0.024389,(19:0.081880,14:0.100700):0.023807):0.016305,(29:0.094008,((27:0.023073,26:0.024474):0.059670,28:0.055707):0.012339):0.009252):0.007484,((13:0.081458,(8:0.096974,(21:0.042715,(22:0.022211,((24:0.013913,23:0.004406):0.009770,25:0.005724):0.023591):0.015030):0.014710):0.010235):0.011020,(20:0.0 [...]
+   tree rep.13640000 = (3:0.131127,(6:0.096971,((((((20:0.088539,5:0.125758):0.137882,((29:0.070699,((27:0.030404,26:0.023937):0.086704,28:0.034503):0.043844):0.007726,(19:0.084660,14:0.075770):0.029076):0.008963):0.000297,(13:0.118237,(8:0.121581,((22:0.033001,21:0.086036):0.001455,(25:0.002352,(23:0.019103,24:0.006356):0.014175):0.024193):0.045551):0.020617):0.006483):0.003581,(((9:0.121945,(18:0.031765,17:0.020115):0.024642):0.030114,4:0.105135):0.029491,((7:0.091230,(15:0.036151,16:0 [...]
+   tree rep.13660000 = (((((((((4:0.110592,((18:0.031161,17:0.013724):0.053384,9:0.090026):0.017702):0.019173,(7:0.056938,(16:0.022910,15:0.043297):0.029509):0.009107):0.008416,10:0.090645):0.022631,(29:0.078321,(28:0.036277,(26:0.029920,27:0.036950):0.085555):0.026735):0.028119):0.002222,(14:0.109791,19:0.059172):0.024519):0.013748,((5:0.088393,20:0.084198):0.097991,((8:0.092016,((22:0.031212,21:0.050359):0.000551,(25:0.007374,(23:0.007182,24:0.011995):0.003644):0.029554):0.035311):0.01 [...]
+   tree rep.13680000 = ((((((19:0.102325,14:0.068398):0.040331,((((27:0.025751,26:0.036199):0.062886,28:0.074123):0.024867,29:0.092296):0.013670,(((20:0.093618,5:0.101728):0.073858,13:0.113793):0.014130,((((25:0.018833,(23:0.008574,24:0.021675):0.015846):0.028178,22:0.042678):0.012411,21:0.046823):0.011264,8:0.094740):0.038975):0.007503):0.010093):0.001942,((4:0.144180,((18:0.034843,17:0.034849):0.077085,9:0.113066):0.029674):0.006040,((7:0.083167,(15:0.039024,16:0.011536):0.039839):0.01 [...]
+   tree rep.13700000 = (6:0.103470,(3:0.121508,(2:0.177217,((12:0.085018,11:0.031181):0.051252,((((10:0.105409,(((16:0.028849,15:0.054493):0.052804,7:0.097471):0.012539,(((18:0.031951,17:0.024335):0.046623,9:0.089751):0.029372,4:0.151066):0.025423):0.006984):0.015497,(14:0.112412,19:0.083813):0.030693):0.021474,(((26:0.035127,27:0.028880):0.059449,28:0.044742):0.026198,29:0.107138):0.007977):0.005322,((((21:0.047679,(22:0.034225,((25:0.008044,24:0.004425):0.024600,23:0.003384):0.008315): [...]
+   tree rep.13720000 = ((6:0.100983,(2:0.181696,(((13:0.132338,((20:0.076794,5:0.124746):0.111853,((((27:0.026067,26:0.032178):0.060412,28:0.063557):0.024544,29:0.084776):0.009192,((19:0.087050,14:0.078896):0.031517,(10:0.114170,((7:0.126929,(((18:0.058249,17:0.010172):0.057446,9:0.123238):0.024233,4:0.125016):0.018969):0.000635,(15:0.035098,16:0.043408):0.036024):0.000955):0.025853):0.001047):0.009155):0.000236):0.000665,(8:0.094182,(21:0.041737,((25:0.013473,(24:0.018374,23:0.011634):0 [...]
+   tree rep.13740000 = (6:0.073006,(3:0.172748,(2:0.210379,(((((14:0.085073,19:0.094095):0.020010,((28:0.048831,(26:0.032663,27:0.030566):0.096476):0.021892,29:0.086824):0.035835):0.016566,((13:0.107158,(5:0.132637,20:0.096367):0.115338):0.005103,(8:0.096371,(21:0.058329,((23:0.007337,(24:0.008367,25:0.009955):0.008722):0.010510,22:0.035159):0.018574):0.064522):0.024809):0.002525):0.003231,(((4:0.094981,((18:0.024020,17:0.019764):0.044979,9:0.118487):0.016326):0.012469,((16:0.020174,15:0 [...]
+   tree rep.13760000 = (((2:0.204258,(((((20:0.086490,5:0.087574):0.100452,(((22:0.016432,((23:0.015415,24:0.007503):0.010203,25:0.006213):0.022226):0.008551,21:0.060209):0.059701,8:0.113510):0.015476):0.009183,((19:0.067550,14:0.118358):0.039612,(((10:0.097566,((15:0.036813,16:0.005207):0.078565,7:0.092243):0.017780):0.007465,(4:0.122878,((18:0.023820,17:0.030318):0.035372,9:0.104011):0.025729):0.012262):0.014021,(((27:0.027704,26:0.018445):0.050866,28:0.032952):0.037950,29:0.089304):0. [...]
+   tree rep.13780000 = (3:0.136514,(6:0.099232,((((((14:0.070902,19:0.079559):0.050583,((((4:0.130351,((18:0.028560,17:0.024363):0.042677,9:0.112611):0.024617):0.014219,(16:0.022683,15:0.054213):0.064559):0.006247,10:0.118196):0.009453,7:0.097931):0.016575):0.005139,((5:0.106504,20:0.120196):0.081209,(29:0.101551,(28:0.032156,(26:0.033517,27:0.028597):0.062478):0.032865):0.009941):0.002309):0.014383,((8:0.104277,((((23:0.014044,24:0.005841):0.005443,25:0.005233):0.009429,22:0.073990):0.0 [...]
+   tree rep.13800000 = (6:0.107603,(3:0.164976,(((((29:0.104080,(28:0.049156,(26:0.018878,27:0.025160):0.060348):0.032291):0.010413,((14:0.081498,19:0.066247):0.054216,(((4:0.114655,((18:0.022247,17:0.039058):0.035870,9:0.116382):0.027241):0.008907,((16:0.034587,15:0.023725):0.067882,7:0.156475):0.013745):0.002472,10:0.116454):0.031560):0.002304):0.009856,((13:0.076029,(5:0.110025,20:0.117904):0.098585):0.016313,(8:0.150079,(((25:0.022124,(24:0.012849,23:0.022215):0.001726):0.020869,22:0 [...]
+   tree rep.13820000 = ((2:0.194591,((((((27:0.033714,26:0.043531):0.066709,28:0.038460):0.032538,29:0.065247):0.013188,((10:0.118063,((7:0.119670,(15:0.047880,16:0.026337):0.046272):0.009942,(((18:0.044450,17:0.024378):0.069983,9:0.075376):0.024288,4:0.122935):0.021019):0.007444):0.019775,(19:0.049340,14:0.094558):0.036348):0.014964):0.023739,((20:0.098648,5:0.092204):0.136947,(13:0.095283,((21:0.047241,(22:0.041558,(25:0.005538,(23:0.007385,24:0.013866):0.005553):0.012480):0.022992):0. [...]
+   tree rep.13840000 = (6:0.049953,((2:0.167491,(((((10:0.121205,((7:0.083334,(15:0.051124,16:0.034321):0.054242):0.006525,(4:0.113310,(9:0.100143,(18:0.050213,17:0.014359):0.047344):0.015803):0.014692):0.018739):0.015079,(19:0.070222,14:0.103718):0.022797):0.002484,(29:0.091821,((27:0.025034,26:0.021712):0.073943,28:0.035710):0.025300):0.019405):0.004807,((20:0.091647,5:0.079971):0.070925,(13:0.103172,(((((24:0.017889,23:0.007832):0.003492,25:0.007773):0.036682,22:0.026152):0.012992,21: [...]
+   tree rep.13860000 = (3:0.136744,((((((((7:0.065614,(15:0.019356,16:0.037879):0.042890):0.011299,(4:0.118268,(9:0.110268,(18:0.028194,17:0.022895):0.037918):0.024977):0.027361):0.008984,10:0.094176):0.033762,((((27:0.018096,26:0.023044):0.061733,28:0.066541):0.028976,(20:0.085884,5:0.084666):0.085591):0.004769,(29:0.109155,(19:0.111450,14:0.079912):0.014317):0.002493):0.008739):0.008744,((8:0.097952,(21:0.057054,(((23:0.005016,24:0.008844):0.010979,25:0.005354):0.021135,22:0.046332):0. [...]
+   tree rep.13880000 = ((6:0.075570,(2:0.182709,(((13:0.085133,((((25:0.009232,(23:0.010133,24:0.008090):0.010255):0.022244,22:0.033334):0.024253,21:0.039989):0.013696,8:0.087639):0.009882):0.015924,((20:0.100982,5:0.117323):0.071441,(((19:0.081714,14:0.097174):0.032023,((((15:0.035341,16:0.020041):0.034057,10:0.109152):0.010804,(4:0.106968,((18:0.052080,17:0.020162):0.037173,9:0.079864):0.017319):0.016371):0.012342,7:0.110111):0.019413):0.013225,(29:0.095084,((27:0.036021,26:0.026265):0 [...]
+   tree rep.13900000 = ((((11:0.033862,12:0.068340):0.048490,((13:0.105661,((20:0.092156,5:0.077130):0.085003,(((((((18:0.036975,17:0.012333):0.070178,9:0.093166):0.015387,4:0.113024):0.014411,((15:0.050159,16:0.026513):0.040908,10:0.114558):0.015395):0.003603,7:0.102616):0.021038,(19:0.074676,14:0.096945):0.017494):0.024907,(29:0.090101,((27:0.025060,26:0.025759):0.079581,28:0.054133):0.024185):0.014127):0.008984):0.003568):0.001984,(8:0.088991,((22:0.018668,((23:0.008813,24:0.008897):0 [...]
+   tree rep.13920000 = (3:0.123826,((((11:0.010050,12:0.070839):0.070386,((((19:0.074741,14:0.080045):0.036334,((((15:0.030498,16:0.023483):0.065951,(((18:0.043682,17:0.013650):0.064834,9:0.105097):0.015597,4:0.149093):0.020270):0.003316,10:0.106196):0.013748,7:0.090080):0.017382):0.009351,(((27:0.026413,26:0.029235):0.073662,28:0.038404):0.017618,29:0.132846):0.002225):0.008456,((8:0.100718,((22:0.029462,(25:0.008355,(23:0.007976,24:0.004469):0.005016):0.025574):0.012185,21:0.042962):0. [...]
+   tree rep.13940000 = ((6:0.092894,3:0.184987):0.010489,(2:0.217613,((11:0.031395,12:0.060894):0.055865,(((8:0.086163,((22:0.040991,((24:0.020070,23:0.015078):0.011068,25:0.007336):0.016167):0.012743,21:0.070663):0.057414):0.050478,(13:0.086507,(5:0.094425,20:0.127380):0.093191):0.011002):0.004142,((((4:0.120904,((18:0.030062,17:0.017625):0.046809,9:0.117713):0.035488):0.006147,((16:0.026522,15:0.028354):0.044096,10:0.094896):0.003153):0.010223,7:0.100419):0.018737,((19:0.096463,14:0.08 [...]
+   tree rep.13960000 = (((2:0.221424,((11:0.024306,12:0.086340):0.061313,((((19:0.078390,14:0.082110):0.039383,(((((18:0.032155,17:0.019642):0.075552,9:0.111896):0.020319,4:0.100168):0.006034,(7:0.088610,(15:0.029952,16:0.025195):0.054484):0.006458):0.020347,10:0.083881):0.015260):0.003871,((28:0.047912,(27:0.037714,26:0.027339):0.067031):0.020964,29:0.110862):0.021098):0.007870,(13:0.092573,((20:0.092674,5:0.134439):0.073802,((((25:0.003004,(24:0.018772,23:0.005823):0.005164):0.019183,2 [...]
+   tree rep.13980000 = (((2:0.212817,((11:0.034563,12:0.045501):0.066806,(((21:0.047225,(22:0.036674,((24:0.014259,23:0.005046):0.010859,25:0.008867):0.026392):0.012161):0.021516,8:0.086271):0.023365,(((19:0.079645,14:0.105997):0.030601,((29:0.078868,((26:0.034319,27:0.032990):0.066701,28:0.033916):0.023944):0.011999,(10:0.073787,((7:0.096388,(16:0.018042,15:0.036304):0.055525):0.009056,(4:0.172032,(9:0.097697,(18:0.041866,17:0.015127):0.044610):0.025764):0.012161):0.014374):0.032090):0. [...]
+   tree rep.14000000 = (6:0.121620,(3:0.126833,(((11:0.069652,12:0.077800):0.055093,((20:0.100026,5:0.076344):0.068563,(((((14:0.072734,19:0.076614):0.029069,((10:0.094355,((((18:0.046106,17:0.021821):0.045168,9:0.091756):0.027579,4:0.097052):0.009338,(15:0.031803,16:0.051664):0.038423):0.002991):0.002024,7:0.087101):0.008927):0.007430,((28:0.038375,(27:0.042867,26:0.030067):0.061060):0.023575,29:0.078684):0.026175):0.006721,13:0.108417):0.003617,(8:0.077409,(((25:0.006262,(23:0.007798,2 [...]
+   tree rep.14020000 = ((2:0.137582,((11:0.023985,12:0.070194):0.055428,((8:0.097367,(((25:0.008329,(24:0.004392,23:0.006185):0.008290):0.041579,22:0.027986):0.010358,21:0.047942):0.047491):0.016262,(((29:0.110248,((26:0.017362,27:0.030149):0.066293,28:0.047817):0.023764):0.011529,((19:0.063549,14:0.107804):0.031674,((10:0.090776,((4:0.098444,(9:0.088266,(18:0.043791,17:0.021404):0.036974):0.013382):0.010640,(16:0.010458,15:0.043105):0.055049):0.009419):0.004639,7:0.103193):0.014400):0.0 [...]
+   tree rep.14040000 = (6:0.106191,(3:0.137760,(((11:0.033969,12:0.071736):0.051668,((((20:0.106119,5:0.103357):0.101652,13:0.106297):0.003464,(((28:0.047437,(27:0.039551,26:0.045894):0.086981):0.037650,29:0.102139):0.013775,((14:0.116257,19:0.069023):0.055487,(((7:0.111033,(15:0.029309,16:0.025419):0.041188):0.016262,((9:0.125727,(18:0.051558,17:0.024678):0.020750):0.022692,4:0.142793):0.013508):0.016910,10:0.113403):0.025371):0.001038):0.013704):0.011882,(8:0.095700,((((24:0.011256,23: [...]
+   tree rep.14060000 = (((((12:0.076276,11:0.027319):0.058281,(((((14:0.100722,19:0.064228):0.051928,(((26:0.028182,27:0.023610):0.057231,28:0.044328):0.024893,29:0.067766):0.014119):0.004683,(5:0.082940,20:0.137550):0.125886):0.000646,(13:0.075480,(8:0.088405,(21:0.038061,(22:0.043540,((24:0.012846,23:0.014373):0.009244,25:0.013107):0.023611):0.022070):0.042801):0.028382):0.001747):0.002344,((((16:0.024068,15:0.031416):0.057042,7:0.092941):0.004033,((9:0.120343,(18:0.038935,17:0.024661) [...]
+   tree rep.14080000 = (2:0.137350,((6:0.110432,3:0.115946):0.006252,((11:0.015930,12:0.080364):0.076003,(((20:0.104253,5:0.118271):0.083481,((8:0.076463,(21:0.061468,(22:0.036936,((24:0.016140,23:0.007587):0.006244,25:0.002974):0.045889):0.007630):0.015705):0.030095,13:0.103947):0.011022):0.000436,((((((15:0.040226,16:0.019074):0.063613,(4:0.109197,(9:0.082678,(18:0.032057,17:0.023034):0.048917):0.023861):0.009125):0.008429,10:0.095530):0.007573,7:0.085657):0.023321,((28:0.051903,(27:0. [...]
+   tree rep.14100000 = ((((11:0.016619,12:0.057491):0.054783,((8:0.110314,((((23:0.004605,24:0.004725):0.009030,25:0.002090):0.027674,22:0.026194):0.020634,21:0.045262):0.033363):0.025685,((((10:0.085457,(4:0.124634,(9:0.087786,(18:0.045013,17:0.026335):0.046059):0.032279):0.009362):0.002823,((15:0.032521,16:0.017312):0.054164,7:0.085054):0.003088):0.030502,(((14:0.079096,19:0.055015):0.039905,(20:0.096164,5:0.079161):0.067624):0.005864,((28:0.050262,(27:0.032340,26:0.029600):0.076920):0 [...]
+   tree rep.14120000 = (3:0.156137,(6:0.127181,(((12:0.062785,11:0.016359):0.043561,((13:0.079127,((20:0.088696,5:0.106604):0.089255,(8:0.090132,(((25:0.007206,(24:0.004385,23:0.020107):0.000088):0.020085,22:0.017548):0.026718,21:0.038499):0.030990):0.034196):0.005694):0.007361,((29:0.098492,((27:0.025255,26:0.020511):0.055060,28:0.050917):0.015049):0.030706,((7:0.089614,((15:0.044252,16:0.023015):0.051655,((4:0.123555,(9:0.106954,(18:0.032828,17:0.033599):0.045748):0.028486):0.005869,10 [...]
+   tree rep.14140000 = (3:0.115104,(6:0.068832,(((((((((15:0.051422,16:0.015635):0.055218,7:0.081958):0.013597,10:0.092965):0.007442,(((18:0.030731,17:0.027876):0.045583,9:0.119940):0.019617,4:0.158736):0.037003):0.037045,(14:0.101292,19:0.102021):0.039752):0.009821,(29:0.092076,(28:0.034098,(27:0.022400,26:0.026845):0.058948):0.013937):0.019948):0.012568,((13:0.078086,(8:0.114486,(21:0.036605,(22:0.070450,(25:0.005547,(23:0.006818,24:0.014915):0.004265):0.023884):0.014374):0.034633):0.0 [...]
+   tree rep.14160000 = (6:0.092080,(3:0.114547,(((((29:0.095609,(28:0.060102,(27:0.043620,26:0.016867):0.058596):0.016112):0.018052,(((((15:0.040193,16:0.027777):0.058052,(4:0.168737,((18:0.043208,17:0.022372):0.050386,9:0.124826):0.023504):0.018233):0.000479,10:0.099214):0.004667,7:0.095055):0.018183,(14:0.092927,19:0.090473):0.026299):0.026680):0.012010,(((21:0.053997,(22:0.038059,((23:0.010981,24:0.006512):0.005489,25:0.005463):0.028238):0.005442):0.019467,8:0.114783):0.033707,(13:0.0 [...]
+   tree rep.14180000 = (((((((5:0.098003,20:0.093994):0.075393,(((14:0.093447,19:0.088191):0.037978,(((7:0.136101,(16:0.022344,15:0.048648):0.061870):0.001109,(4:0.156201,(9:0.128003,(18:0.045105,17:0.031432):0.028437):0.020964):0.020669):0.006613,10:0.111591):0.018464):0.009947,(((26:0.033130,27:0.026649):0.074726,28:0.055755):0.011774,29:0.086790):0.014280):0.021878):0.011068,(13:0.109827,(((22:0.061207,((24:0.010487,23:0.010533):0.007171,25:0.007259):0.019450):0.008659,21:0.030908):0. [...]
+   tree rep.14200000 = ((6:0.071145,3:0.085173):0.017527,(((((((10:0.107201,((16:0.039344,15:0.027203):0.054392,7:0.097777):0.014047):0.007206,(((18:0.033833,17:0.029689):0.049573,9:0.102038):0.030053,4:0.137424):0.015242):0.030100,(14:0.115056,19:0.086562):0.028413):0.005216,((28:0.045363,(26:0.024469,27:0.041001):0.072359):0.015433,29:0.072022):0.004469):0.011371,(((8:0.089323,(21:0.056382,(22:0.037095,((24:0.005563,25:0.008117):0.004321,23:0.007925):0.017454):0.010982):0.035601):0.022 [...]
+   tree rep.14220000 = ((6:0.080980,3:0.116570):0.004411,(((((((19:0.074340,14:0.092269):0.024568,29:0.071982):0.009097,(((27:0.048186,26:0.012942):0.092708,28:0.048547):0.027351,(7:0.088214,((((18:0.029615,17:0.022752):0.042224,9:0.093992):0.012194,4:0.126903):0.006045,(10:0.092112,(15:0.044749,16:0.024822):0.044496):0.005706):0.008609):0.026058):0.001767):0.005949,(20:0.077944,5:0.103161):0.083094):0.001775,((((25:0.005886,(24:0.004864,23:0.007547):0.004220):0.044939,(22:0.021725,21:0. [...]
+   tree rep.14240000 = (6:0.075767,(3:0.154230,(2:0.186006,((((9:0.111814,(18:0.037662,17:0.021731):0.034945):0.025816,((7:0.119053,((15:0.043941,16:0.039754):0.049586,10:0.087245):0.002529):0.011420,4:0.112542):0.007501):0.030558,((((8:0.070849,((22:0.020041,(25:0.004154,(23:0.014247,24:0.004979):0.006517):0.026332):0.022777,21:0.043851):0.032212):0.024133,13:0.072922):0.010556,(20:0.091791,5:0.101059):0.098898):0.001057,((29:0.076494,((27:0.034171,26:0.023092):0.053980,28:0.049849):0.0 [...]
+   tree rep.14260000 = ((6:0.081026,(2:0.211894,((((5:0.105582,20:0.093265):0.085642,(13:0.107986,(8:0.086399,(21:0.049552,(22:0.038408,((23:0.014578,24:0.004917):0.010642,25:0.018661):0.030823):0.013572):0.054991):0.036547):0.000880):0.003943,((((28:0.062142,(26:0.036922,27:0.035720):0.051021):0.022224,29:0.096744):0.027843,(7:0.088812,(((16:0.031195,15:0.043427):0.045888,(4:0.160037,(9:0.108933,(18:0.066281,17:0.015853):0.038709):0.027913):0.004655):0.011347,10:0.101662):0.012108):0.02 [...]
+   tree rep.14280000 = ((((((5:0.145348,20:0.094691):0.095511,((((28:0.052403,(26:0.034551,27:0.024764):0.055517):0.020009,29:0.106329):0.016677,((7:0.076261,(((16:0.040669,15:0.046193):0.070219,10:0.075746):0.005288,((9:0.102947,(18:0.041676,17:0.025028):0.045900):0.021853,4:0.203531):0.010512):0.000185):0.032575,(19:0.073962,14:0.083016):0.051732):0.003267):0.004670,(13:0.078911,(8:0.111345,((22:0.028306,(25:0.003669,(23:0.008580,24:0.003994):0.005152):0.027437):0.023716,21:0.036203):0 [...]
+   tree rep.14300000 = ((2:0.197297,((((((4:0.120820,(9:0.095197,(18:0.041428,17:0.015818):0.061398):0.017896):0.004989,(10:0.113263,((16:0.038318,15:0.043885):0.029874,7:0.092842):0.012108):0.014220):0.021723,(19:0.099404,14:0.099051):0.025603):0.007237,(29:0.078344,(28:0.037268,(26:0.031743,27:0.034239):0.059426):0.010736):0.022385):0.006425,((13:0.075579,(((22:0.021284,((24:0.018461,23:0.012408):0.003164,25:0.030544):0.041784):0.025439,21:0.051087):0.023462,8:0.092459):0.010060):0.002 [...]
+   tree rep.14320000 = ((6:0.095807,3:0.079535):0.011903,((((((19:0.086772,14:0.093520):0.025963,(((27:0.026309,26:0.025614):0.080765,28:0.039563):0.038677,29:0.063196):0.008931):0.000842,((7:0.105030,((15:0.038536,16:0.027796):0.067886,10:0.088900):0.000403):0.012691,(4:0.104714,((18:0.039717,17:0.016315):0.063001,9:0.119497):0.019896):0.005591):0.029214):0.009690,((8:0.084660,((22:0.061830,((23:0.006918,24:0.018140):0.003372,25:0.005134):0.015418):0.026933,21:0.066010):0.018595):0.0475 [...]
+   tree rep.14340000 = (((((((5:0.093585,20:0.088784):0.122535,13:0.063321):0.019326,((8:0.083754,(((25:0.014735,(24:0.005560,23:0.013425):0.006175):0.020413,22:0.038811):0.004929,21:0.044991):0.018572):0.060321,((29:0.123394,(28:0.063781,(26:0.021188,27:0.027509):0.072114):0.021717):0.006677,((14:0.083448,19:0.099654):0.036617,(((7:0.106952,(16:0.011448,15:0.040508):0.049158):0.016181,(((18:0.051742,17:0.020941):0.013937,9:0.152473):0.032463,4:0.180910):0.025729):0.005740,10:0.089356):0 [...]
+   tree rep.14360000 = ((((((13:0.082279,(8:0.080600,(21:0.039152,(22:0.029154,((23:0.009204,24:0.013178):0.004352,25:0.007928):0.022018):0.019835):0.034694):0.026954):0.000359,(5:0.099226,20:0.080336):0.103325):0.000166,((((28:0.031693,(26:0.018676,27:0.043489):0.064340):0.011942,29:0.090570):0.007426,(((4:0.123130,((18:0.032696,17:0.017318):0.054678,9:0.133004):0.030652):0.008577,(7:0.108388,(16:0.015328,15:0.042876):0.049616):0.008982):0.006852,10:0.077154):0.032379):0.004000,(14:0.09 [...]
+   tree rep.14380000 = ((3:0.161460,(((12:0.064123,11:0.051177):0.057477,((13:0.097005,(8:0.117979,((((24:0.007041,23:0.004805):0.009300,25:0.006699):0.021100,22:0.037570):0.021306,21:0.035016):0.018641):0.027237):0.000471,(((20:0.093183,5:0.092578):0.117122,((19:0.115319,14:0.087951):0.030451,(29:0.102726,((27:0.041716,26:0.027781):0.055089,28:0.038444):0.024474):0.011407):0.002059):0.006271,(((7:0.152134,(15:0.037355,16:0.018386):0.053173):0.008422,10:0.121825):0.015566,(4:0.154633,(9: [...]
+   tree rep.14400000 = (((2:0.182385,((((8:0.100526,(21:0.051880,((25:0.008924,(23:0.018404,24:0.007340):0.004255):0.025612,22:0.026206):0.009396):0.067431):0.064997,((((16:0.025452,15:0.027526):0.069229,(10:0.095866,7:0.087125):0.011234):0.003655,((9:0.109984,(18:0.038870,17:0.010620):0.047729):0.030429,4:0.146657):0.001860):0.039571,(((19:0.069995,14:0.103147):0.030160,29:0.113829):0.022598,(28:0.055335,(26:0.024974,27:0.034259):0.053663):0.018596):0.001999):0.017714):0.000258,(13:0.07 [...]
+   tree rep.14420000 = ((6:0.118975,(((12:0.073058,11:0.028996):0.043081,(((13:0.105889,(8:0.074037,((22:0.034386,(25:0.004400,(23:0.008141,24:0.012951):0.010736):0.027715):0.017763,21:0.029287):0.013108):0.046936):0.003868,(5:0.126609,20:0.095677):0.106997):0.000365,(((10:0.078730,(((9:0.093869,(18:0.030316,17:0.019799):0.050520):0.031460,4:0.139378):0.009258,((16:0.034416,15:0.034210):0.063128,7:0.093926):0.008622):0.009365):0.028676,(19:0.077486,14:0.079473):0.030785):0.000857,(29:0.1 [...]
+   tree rep.14440000 = (2:0.144574,(((11:0.030124,12:0.094769):0.054869,((((20:0.100846,5:0.089995):0.121170,13:0.073179):0.006107,(8:0.101970,(21:0.037557,(22:0.059594,((23:0.022751,24:0.012959):0.009672,25:0.012736):0.016366):0.030159):0.046434):0.026969):0.006800,((19:0.096372,14:0.090025):0.043098,(((28:0.064258,(27:0.036330,26:0.027497):0.057822):0.028356,29:0.095392):0.012262,(((4:0.130393,(9:0.117242,(18:0.049745,17:0.021656):0.037866):0.043269):0.019074,(10:0.091895,(15:0.040993, [...]
+   tree rep.14460000 = ((6:0.096093,(((12:0.094901,11:0.017764):0.065316,((((22:0.042956,21:0.067586):0.005885,(23:0.009733,(25:0.014910,24:0.006742):0.003640):0.028335):0.053738,8:0.087925):0.011709,((20:0.070635,5:0.086703):0.079683,((((19:0.079877,14:0.084180):0.037157,((28:0.044942,(27:0.047180,26:0.018611):0.076416):0.030044,29:0.111024):0.012511):0.005397,(((9:0.115394,(18:0.036856,17:0.022787):0.041823):0.017919,4:0.122201):0.022512,((7:0.093915,10:0.079163):0.004668,(15:0.035275, [...]
+   tree rep.14480000 = (6:0.076804,(3:0.131985,(2:0.175797,((12:0.056970,11:0.014497):0.061079,((((28:0.071815,(27:0.036400,26:0.036213):0.070471):0.008914,29:0.086441):0.001921,((19:0.053581,14:0.109095):0.037484,(((10:0.095131,(15:0.023717,16:0.022341):0.043499):0.000497,((9:0.116382,(18:0.046916,17:0.014446):0.045406):0.020083,4:0.129287):0.003964):0.002427,7:0.105601):0.028446):0.000916):0.011150,(((21:0.042405,(22:0.045847,((24:0.015331,23:0.017413):0.001413,25:0.005903):0.019928):0 [...]
+   tree rep.14500000 = ((((11:0.018297,12:0.080051):0.047845,(((((27:0.043992,26:0.018108):0.071024,28:0.026291):0.025729,29:0.120020):0.017142,((10:0.100030,((4:0.160242,(9:0.102193,(18:0.043024,17:0.035537):0.030354):0.028238):0.006614,(7:0.094682,(15:0.032203,16:0.018649):0.057108):0.004923):0.008185):0.013191,(19:0.073228,14:0.099684):0.029431):0.011377):0.006531,(((((22:0.026252,((24:0.014597,25:0.013620):0.007544,23:0.023304):0.019942):0.007705,21:0.029318):0.030661,8:0.081721):0.0 [...]
+   tree rep.14520000 = ((3:0.113746,6:0.059317):0.016682,(2:0.178560,((12:0.064105,11:0.044953):0.050830,((((21:0.041725,(22:0.024816,((24:0.015388,23:0.008307):0.008930,25:0.003320):0.026442):0.007439):0.024342,8:0.090055):0.034229,(20:0.100807,5:0.098142):0.094531):0.001326,(13:0.098177,(((10:0.093148,(((15:0.033541,16:0.034825):0.033327,7:0.072312):0.003505,(((18:0.027196,17:0.018674):0.037362,9:0.109414):0.018489,4:0.113940):0.014468):0.000868):0.032819,(19:0.072184,14:0.070719):0.01 [...]
+   tree rep.14540000 = (((((12:0.046254,11:0.026087):0.047404,((((((((23:0.018302,24:0.006231):0.001678,25:0.024077):0.026603,22:0.032296):0.021016,21:0.048543):0.042350,8:0.111247):0.012288,13:0.089956):0.001381,(20:0.094367,5:0.104479):0.075631):0.000815,(((19:0.083949,14:0.095685):0.022912,((10:0.072374,((15:0.044920,16:0.019874):0.034504,7:0.102263):0.007305):0.007046,(4:0.108679,((18:0.039308,17:0.021718):0.038554,9:0.105166):0.022527):0.017253):0.017719):0.016304,((28:0.041030,(27: [...]
+   tree rep.14560000 = (((((((((7:0.097140,(10:0.102396,((16:0.022168,15:0.038716):0.056339,(4:0.126187,(9:0.114752,(18:0.035421,17:0.018483):0.034806):0.033388):0.009983):0.007985):0.002263):0.049354,(19:0.098187,14:0.095292):0.024960):0.012342,(29:0.071038,(28:0.042836,(26:0.033786,27:0.036382):0.072083):0.018878):0.013233):0.013214,(5:0.094292,20:0.077047):0.115692):0.000575,13:0.080213):0.000620,((((25:0.012132,(23:0.009062,24:0.004384):0.001028):0.028770,22:0.034851):0.010128,21:0.0 [...]
+   tree rep.14580000 = ((((12:0.079634,11:0.029859):0.037428,((((8:0.077540,(21:0.041897,((25:0.002396,(23:0.013115,24:0.008542):0.007138):0.016472,22:0.030626):0.013511):0.021982):0.014737,(20:0.104956,5:0.110339):0.089261):0.004381,13:0.099766):0.010469,(((29:0.101025,(28:0.045330,(27:0.026036,26:0.032456):0.067149):0.022235):0.000549,(((7:0.074047,(15:0.038566,16:0.026670):0.042416):0.005141,(4:0.138242,(9:0.089116,(18:0.030830,17:0.032023):0.050450):0.022636):0.004874):0.000683,10:0. [...]
+   tree rep.14600000 = ((6:0.129336,3:0.140670):0.007301,(2:0.162211,((12:0.057380,11:0.020723):0.054218,(((20:0.087970,5:0.101376):0.101492,(13:0.062819,(8:0.119562,((22:0.025604,(25:0.011605,(24:0.006910,23:0.010304):0.003519):0.047901):0.019913,21:0.081167):0.037159):0.006419):0.001737):0.000835,(((7:0.082827,((10:0.102982,(15:0.035003,16:0.041997):0.039516):0.002175,(4:0.099513,(9:0.091338,(18:0.041176,17:0.029249):0.069544):0.029735):0.012196):0.010982):0.022516,(19:0.066805,14:0.11 [...]
+   tree rep.14620000 = ((2:0.189761,(((13:0.080830,(((((23:0.012261,24:0.016584):0.006595,25:0.007070):0.026213,22:0.027958):0.014021,21:0.035618):0.029212,8:0.087826):0.022699):0.015449,((((10:0.077393,((((18:0.027888,17:0.030157):0.036752,9:0.083130):0.027838,4:0.137787):0.022183,((15:0.034883,16:0.031421):0.050143,7:0.094247):0.012836):0.001313):0.026844,((28:0.042356,(27:0.041389,26:0.019302):0.072279):0.016291,29:0.095475):0.015122):0.001128,(19:0.064036,14:0.091034):0.028495):0.012 [...]
+   tree rep.14640000 = ((3:0.121275,6:0.086111):0.014742,(2:0.149928,((12:0.073851,11:0.041551):0.034521,((20:0.141020,5:0.101288):0.096611,((13:0.113530,(8:0.102680,((((24:0.013000,23:0.011456):0.010713,25:0.011044):0.035962,22:0.027023):0.017410,21:0.050472):0.010086):0.039270):0.004349,((29:0.106097,((27:0.037634,26:0.023628):0.082746,28:0.025744):0.024668):0.010787,((7:0.076127,(10:0.086947,((15:0.042316,16:0.020140):0.059829,(4:0.137140,(9:0.089367,(18:0.046690,17:0.017899):0.056462 [...]
+   tree rep.14660000 = ((((((13:0.099253,(5:0.147867,20:0.070251):0.112829):0.004202,((((29:0.160143,(28:0.035827,(26:0.017322,27:0.038550):0.116168):0.030346):0.015774,(19:0.096493,14:0.099791):0.031080):0.002703,((10:0.094849,((16:0.021292,15:0.030273):0.063274,7:0.077606):0.021801):0.007364,((9:0.115592,(18:0.034656,17:0.021368):0.071147):0.032717,4:0.169187):0.016208):0.042149):0.018879,(8:0.095213,(21:0.050322,(22:0.028522,((24:0.013731,25:0.007633):0.003273,23:0.005657):0.021388):0 [...]
+   tree rep.14680000 = (((((11:0.053577,12:0.058274):0.042506,(((5:0.117190,20:0.089358):0.078681,(13:0.105370,(((28:0.036870,(26:0.025515,27:0.042758):0.076989):0.031071,29:0.076775):0.018281,((10:0.136168,(((16:0.033633,15:0.037289):0.047604,7:0.099900):0.009097,(4:0.119587,(9:0.142491,(18:0.028639,17:0.028050):0.047504):0.019919):0.010461):0.011478):0.033714,(14:0.061988,19:0.075355):0.023506):0.001185):0.007139):0.002865):0.007701,((21:0.049470,(22:0.032892,(25:0.003046,(23:0.008538, [...]
+   tree rep.14700000 = ((((11:0.031295,12:0.074195):0.039913,(((29:0.100991,((26:0.008832,27:0.049438):0.043105,28:0.069344):0.023203):0.011724,(14:0.120070,19:0.062236):0.023134):0.009592,((((7:0.133548,(16:0.027357,15:0.039313):0.029430):0.019990,10:0.101082):0.011435,(4:0.106275,(9:0.104620,(18:0.027530,17:0.030760):0.056380):0.010603):0.009203):0.025037,((((22:0.040495,(25:0.001528,(24:0.010008,23:0.017727):0.008065):0.028358):0.004601,21:0.074044):0.025074,8:0.104886):0.022208,((5:0 [...]
+   tree rep.14720000 = ((((((28:0.062585,(27:0.022656,26:0.022648):0.043518):0.038181,(29:0.101810,((19:0.103047,14:0.122130):0.020373,(((((18:0.036206,17:0.014553):0.057045,9:0.123302):0.028408,4:0.105112):0.004967,((15:0.033898,16:0.024941):0.061644,10:0.107659):0.011650):0.011283,7:0.102818):0.010033):0.007743):0.002487):0.010718,((20:0.112120,5:0.078075):0.110033,((8:0.090073,((22:0.026362,((23:0.006962,24:0.006793):0.005062,25:0.011420):0.020296):0.028604,21:0.060177):0.024008):0.03 [...]
+   tree rep.14740000 = (3:0.114310,((2:0.156348,((11:0.043395,12:0.054648):0.044471,(((((19:0.080416,14:0.090519):0.030017,(4:0.125964,(((7:0.106721,(16:0.037779,15:0.049201):0.040706):0.009224,10:0.088085):0.004853,((18:0.050839,17:0.021516):0.039878,9:0.086826):0.030457):0.002314):0.007831):0.007413,((28:0.046931,(26:0.019990,27:0.016709):0.052604):0.016188,29:0.094713):0.013114):0.010154,(13:0.103776,(5:0.103938,20:0.077120):0.089655):0.005429):0.000534,(8:0.086214,(21:0.041591,((25:0 [...]
+   tree rep.14760000 = ((3:0.115667,(2:0.206120,((11:0.032297,12:0.076028):0.054902,((((29:0.125053,((26:0.033984,27:0.037250):0.070873,28:0.031069):0.030883):0.015770,(((10:0.076788,((16:0.033864,15:0.030666):0.036654,(4:0.143343,((18:0.027087,17:0.020234):0.058904,9:0.132587):0.027833):0.020686):0.000947):0.011197,7:0.116007):0.011793,(19:0.087294,14:0.094852):0.027088):0.008079):0.005453,(8:0.113149,(21:0.041413,((25:0.000385,(23:0.007816,24:0.008171):0.010542):0.024830,22:0.033073):0 [...]
+   tree rep.14780000 = ((6:0.086740,3:0.104422):0.008010,(2:0.162734,((11:0.039787,12:0.076531):0.048434,(((29:0.078831,(28:0.054754,(26:0.020790,27:0.037148):0.062607):0.033308):0.022105,((((16:0.024464,15:0.038310):0.035885,((9:0.090250,(18:0.039332,17:0.021730):0.041296):0.013700,4:0.117507):0.007361):0.016851,(10:0.094994,7:0.073288):0.009662):0.016243,(19:0.097540,14:0.088025):0.031311):0.007602):0.010728,(((8:0.124584,(21:0.044089,(22:0.039812,(25:0.008010,(24:0.008393,23:0.015935) [...]
+   tree rep.14800000 = ((3:0.161002,6:0.114759):0.011448,(2:0.107852,((12:0.080895,11:0.041647):0.045714,(((14:0.071705,19:0.064301):0.048445,(((4:0.149642,(9:0.127481,(18:0.054981,17:0.018276):0.044471):0.033303):0.038685,(10:0.092918,(7:0.101381,(15:0.053300,16:0.021854):0.046857):0.005234):0.004838):0.022172,(((27:0.041737,26:0.019283):0.074053,28:0.046206):0.021290,(29:0.090614,(20:0.086188,5:0.113530):0.093881):0.002524):0.006300):0.000721):0.007283,((8:0.084367,(21:0.035999,(22:0.0 [...]
+   tree rep.14820000 = ((6:0.095071,3:0.133600):0.011955,(2:0.198492,((12:0.067004,11:0.036058):0.048054,(((((4:0.112652,((18:0.025098,17:0.020273):0.039824,9:0.099853):0.022026):0.018877,((16:0.025389,15:0.044840):0.059654,7:0.105934):0.008022):0.014817,10:0.117418):0.021291,((19:0.092955,14:0.081831):0.035823,((28:0.040257,(26:0.019200,27:0.037201):0.061638):0.011509,29:0.083647):0.016041):0.000695):0.014509,((13:0.092903,(8:0.101607,(((25:0.013185,(23:0.008928,24:0.017896):0.004679):0 [...]
+   tree rep.14840000 = ((6:0.071929,3:0.152713):0.018990,(((11:0.021993,12:0.073863):0.047784,((((((27:0.041598,26:0.022841):0.071488,28:0.029489):0.021511,29:0.097754):0.014995,((19:0.086350,14:0.102331):0.032418,(7:0.134879,((4:0.135945,((18:0.036784,17:0.022932):0.036193,9:0.075733):0.028766):0.003628,(10:0.097081,(15:0.027549,16:0.019101):0.056891):0.018084):0.013670):0.015631):0.003731):0.004404,((20:0.100873,5:0.089342):0.090499,13:0.085459):0.020868):0.002102,(8:0.069593,((((24:0. [...]
+   tree rep.14860000 = (3:0.150861,(6:0.081495,(2:0.165103,((12:0.081034,11:0.027585):0.036514,(((((26:0.022593,27:0.019630):0.057310,28:0.038994):0.040053,29:0.085304):0.010416,(((((16:0.029772,15:0.049217):0.046986,7:0.107403):0.005480,10:0.139305):0.004142,(4:0.110704,((18:0.038655,17:0.014948):0.060329,9:0.100171):0.021299):0.008842):0.017390,(19:0.083579,14:0.101296):0.025440):0.026105):0.008855,((((22:0.016814,((23:0.010928,24:0.007625):0.005721,25:0.010553):0.030223):0.021716,21:0 [...]
+   tree rep.14880000 = ((2:0.151074,((11:0.046346,12:0.065799):0.048808,((((((7:0.082735,(16:0.023288,15:0.036195):0.037905):0.012910,10:0.095069):0.007422,(((18:0.037731,17:0.024512):0.041247,9:0.128444):0.027314,4:0.119126):0.016645):0.017997,(((28:0.048007,(26:0.029003,27:0.040684):0.070345):0.033897,29:0.121227):0.013159,(19:0.087538,14:0.066897):0.028675):0.000471):0.005992,(5:0.112733,20:0.093318):0.073807):0.011704,(13:0.082405,((21:0.043167,((25:0.011577,(23:0.013188,24:0.009130) [...]
+   tree rep.14900000 = ((((11:0.046317,12:0.076135):0.046648,((((19:0.088363,14:0.123604):0.072956,(10:0.096027,((7:0.098280,(15:0.041661,16:0.025197):0.062938):0.008851,((9:0.100466,(18:0.046481,17:0.010595):0.037519):0.023420,4:0.134003):0.020243):0.006616):0.008671):0.010537,(29:0.102598,(28:0.045450,(27:0.028623,26:0.025966):0.094518):0.019070):0.012744):0.016775,(((20:0.089888,5:0.112492):0.091858,13:0.110151):0.006801,((((25:0.010502,(23:0.007061,24:0.010174):0.004683):0.035756,22: [...]
+   tree rep.14920000 = ((((12:0.091432,11:0.010412):0.057714,((((((27:0.036780,26:0.030049):0.081279,28:0.038756):0.037310,29:0.115547):0.014270,(14:0.059524,19:0.086619):0.042962):0.004918,(((15:0.048170,16:0.016807):0.068112,7:0.116977):0.011165,((4:0.138171,((18:0.034831,17:0.035709):0.077649,9:0.118322):0.021066):0.018077,10:0.113986):0.010804):0.033757):0.008498,(((22:0.041337,(21:0.063586,(25:0.008148,(24:0.008831,23:0.005557):0.012830):0.024120):0.011827):0.025179,8:0.103264):0.03 [...]
+   tree rep.14940000 = ((6:0.099923,3:0.127922):0.006737,(2:0.218864,((11:0.030871,12:0.057379):0.059863,((7:0.158702,((10:0.075090,(15:0.050347,16:0.011980):0.069021):0.009871,((9:0.102395,(18:0.062053,17:0.018244):0.038518):0.035290,4:0.145665):0.011335):0.005706):0.024530,(((19:0.073947,14:0.071763):0.048762,((28:0.048284,(27:0.031525,26:0.034074):0.104597):0.031328,29:0.113638):0.017560):0.006141,((8:0.075681,((22:0.031260,((23:0.016923,24:0.013643):0.001481,25:0.004892):0.026256):0. [...]
+   tree rep.14960000 = ((2:0.328180,((12:0.090456,11:0.026885):0.068828,(((((14:0.095435,19:0.098737):0.041539,(29:0.149417,((27:0.022312,26:0.044589):0.082333,28:0.060732):0.016786):0.006607):0.008246,(20:0.072819,5:0.167102):0.139613):0.008423,(13:0.127341,(8:0.081155,((((24:0.006705,23:0.005538):0.007818,25:0.008458):0.021738,22:0.044723):0.003494,21:0.061323):0.028579):0.058520):0.021625):0.011421,((4:0.206385,((18:0.036642,17:0.029434):0.031998,9:0.150115):0.021646):0.020466,((7:0.1 [...]
+   tree rep.14980000 = ((6:0.135989,3:0.118557):0.024126,(2:0.223548,((((8:0.105006,((22:0.031863,(25:0.007069,(23:0.012164,24:0.006251):0.004066):0.017767):0.023576,21:0.038428):0.030856):0.041064,13:0.116735):0.013151,(((29:0.095160,((27:0.042719,26:0.013751):0.083821,28:0.057332):0.048614):0.001273,(20:0.072836,5:0.131268):0.094443):0.005390,((19:0.099344,14:0.065255):0.038848,(((9:0.120081,(18:0.052815,17:0.014191):0.059690):0.015359,4:0.157417):0.005809,(((15:0.025845,16:0.026725):0 [...]
+   tree rep.15000000 = (3:0.143153,(6:0.102970,(((11:0.041413,12:0.082407):0.026719,((((5:0.140933,20:0.063810):0.103181,((10:0.105610,((7:0.076401,(16:0.026868,15:0.054989):0.052122):0.000417,(4:0.107921,((18:0.030354,17:0.025480):0.042093,9:0.121915):0.016234):0.004939):0.005669):0.039628,(19:0.081036,14:0.086262):0.017023):0.004548):0.007963,((28:0.048190,(26:0.018506,27:0.041757):0.055777):0.024996,29:0.078906):0.008962):0.010760,(((((25:0.014986,(24:0.003403,23:0.009087):0.002852):0 [...]
+   tree rep.15020000 = ((3:0.143993,(((12:0.090924,11:0.024960):0.068487,((((14:0.090019,19:0.098867):0.033199,((((22:0.056981,21:0.055627):0.002801,(25:0.003038,(23:0.015571,24:0.015569):0.012635):0.018981):0.065525,8:0.147023):0.025071,((20:0.077692,5:0.154200):0.093655,13:0.093607):0.004337):0.002285):0.001792,(((27:0.058921,26:0.012075):0.077884,28:0.042066):0.029480,29:0.099061):0.010761):0.008529,((9:0.106495,(18:0.047632,17:0.021414):0.049951):0.014126,(4:0.140313,(10:0.112050,((1 [...]
+   tree rep.15040000 = ((2:0.189964,((12:0.053927,11:0.051163):0.050380,(((5:0.100000,20:0.118831):0.087423,(13:0.118506,((21:0.051248,((25:0.003371,(23:0.008852,24:0.012292):0.008900):0.016202,22:0.028578):0.010895):0.031035,8:0.096201):0.008180):0.006978):0.018404,((((4:0.137554,((18:0.043734,17:0.023880):0.040580,9:0.140669):0.019225):0.003570,((7:0.093394,(16:0.025653,15:0.041882):0.039220):0.005375,10:0.108154):0.003703):0.017539,(19:0.078993,14:0.096020):0.035155):0.011065,((28:0.0 [...]
+   tree rep.15060000 = ((((12:0.057172,11:0.022565):0.041024,((((((27:0.028420,26:0.020335):0.079021,28:0.037013):0.016989,29:0.085191):0.010442,((19:0.081475,14:0.091515):0.039295,((((18:0.049360,17:0.017449):0.046354,9:0.128850):0.029163,4:0.125176):0.015813,((7:0.077830,(15:0.046760,16:0.025975):0.065554):0.000380,10:0.085768):0.004324):0.016683):0.005666):0.020617,(((21:0.058381,(((24:0.017469,23:0.012597):0.001922,25:0.013479):0.004679,22:0.040754):0.016321):0.040027,(20:0.115902,5: [...]
+   tree rep.15080000 = ((6:0.058948,(((12:0.073431,11:0.032440):0.059282,((((((((18:0.035535,17:0.018202):0.038858,9:0.100777):0.009311,4:0.113118):0.003981,(10:0.113576,(7:0.092311,(16:0.024861,15:0.031701):0.044998):0.006288):0.016730):0.016616,(19:0.063833,14:0.105473):0.018625):0.006014,((5:0.098274,20:0.100669):0.096616,((28:0.047819,(26:0.026469,27:0.031412):0.060291):0.010090,29:0.111229):0.004908):0.006573):0.005740,13:0.071530):0.005961,((((25:0.004270,(23:0.012731,24:0.005230): [...]
+   tree rep.15100000 = (((((12:0.074602,11:0.034072):0.032336,(((((4:0.108819,((18:0.038316,17:0.020568):0.061177,9:0.101591):0.027463):0.013065,((16:0.026932,15:0.030504):0.048180,10:0.117432):0.002874):0.011482,7:0.091232):0.032112,((19:0.086259,14:0.124346):0.032316,((28:0.058844,(26:0.020110,27:0.041998):0.112032):0.028667,29:0.082739):0.021190):0.007719):0.009216,((((22:0.064130,(25:0.007074,(24:0.003515,23:0.005042):0.004907):0.034461):0.019277,21:0.056971):0.024288,8:0.090496):0.0 [...]
+   tree rep.15120000 = ((((((8:0.121732,(21:0.040273,(((23:0.007447,24:0.006848):0.002393,25:0.012552):0.025221,22:0.039611):0.021310):0.023401):0.018484,(((((28:0.041385,(27:0.040336,26:0.026180):0.071324):0.008770,29:0.087073):0.034607,((((15:0.025678,16:0.033846):0.053524,((9:0.091696,(18:0.032997,17:0.032381):0.063960):0.015233,4:0.123165):0.008931):0.002415,10:0.100455):0.011071,7:0.115027):0.022377):0.002152,(19:0.076515,14:0.095622):0.019015):0.012720,13:0.071855):0.004334):0.0030 [...]
+   tree rep.15140000 = ((2:0.176873,((11:0.032093,12:0.068763):0.056832,(((((19:0.091566,14:0.108557):0.034147,((4:0.137205,((18:0.027029,17:0.017193):0.039939,9:0.103269):0.046117):0.005293,(10:0.097894,(7:0.091393,(16:0.030602,15:0.029144):0.059770):0.003246):0.011560):0.012590):0.002842,(((26:0.030048,27:0.049667):0.069380,28:0.026687):0.026368,29:0.079665):0.019084):0.006140,((21:0.042817,(22:0.054024,(23:0.014042,(25:0.014445,24:0.012566):0.002222):0.048441):0.008981):0.065050,8:0.1 [...]
+   tree rep.15160000 = ((3:0.155072,(2:0.188844,(((((((25:0.014733,(23:0.023165,24:0.014928):0.005737):0.035924,22:0.027387):0.008645,21:0.036707):0.051974,8:0.092423):0.025136,(13:0.104753,(5:0.108966,20:0.101109):0.112959):0.000605):0.007078,((((((18:0.051270,17:0.024362):0.064679,9:0.128116):0.012940,4:0.170382):0.016558,(10:0.077024,(7:0.135219,(16:0.012221,15:0.045909):0.050630):0.016845):0.002632):0.023031,(19:0.130809,14:0.099168):0.050138):0.000024,(29:0.100919,(28:0.043221,(26:0 [...]
+   tree rep.15180000 = ((2:0.183673,((11:0.029251,12:0.061687):0.029007,((13:0.095742,(5:0.096907,20:0.088971):0.059380):0.008366,(((19:0.061147,14:0.119350):0.028262,((((16:0.041186,15:0.039200):0.071307,(7:0.114518,(((18:0.038378,17:0.028624):0.036666,9:0.103820):0.026181,4:0.092474):0.007593):0.002578):0.003282,10:0.107132):0.027058,((28:0.036858,(26:0.032991,27:0.044961):0.053642):0.016064,29:0.087416):0.010908):0.004577):0.017966,(((22:0.020778,(24:0.014432,(25:0.022039,23:0.008306) [...]
+   tree rep.15200000 = (6:0.096737,((((12:0.073874,11:0.034574):0.038219,((((((25:0.004109,(24:0.012687,23:0.007108):0.008990):0.016493,22:0.036361):0.011765,21:0.037147):0.025754,8:0.082879):0.036731,((20:0.097103,5:0.091394):0.086512,13:0.091293):0.007754):0.001066,((14:0.112485,19:0.100471):0.032836,((((28:0.061993,(27:0.027466,26:0.021941):0.078147):0.016836,29:0.075476):0.013972,7:0.113163):0.000908,(((4:0.160382,((18:0.028935,17:0.028590):0.048121,9:0.118081):0.030240):0.007166,(15 [...]
+   tree rep.15220000 = (((2:0.178827,((11:0.025813,12:0.074001):0.064601,(((((27:0.025133,26:0.043231):0.067729,28:0.046876):0.016445,29:0.117096):0.016291,((((4:0.135855,(9:0.142333,(18:0.044174,17:0.030891):0.045045):0.019735):0.010867,((15:0.043872,16:0.016654):0.066265,10:0.078816):0.007405):0.014871,7:0.108328):0.013802,(19:0.055510,14:0.099880):0.028314):0.009146):0.008225,(((21:0.062690,(22:0.022711,((23:0.008713,24:0.010819):0.003081,25:0.006360):0.031032):0.013403):0.035062,8:0. [...]
+   tree rep.15240000 = ((2:0.153954,((11:0.029798,12:0.061659):0.031927,((((14:0.088432,19:0.077406):0.045363,((5:0.080438,20:0.093763):0.072154,(13:0.079073,(8:0.126077,((22:0.039790,(25:0.001519,(23:0.013557,24:0.004003):0.008626):0.021402):0.014862,21:0.039680):0.039405):0.008999):0.009957):0.006588):0.000251,(29:0.095091,(28:0.041325,(26:0.029116,27:0.040708):0.041437):0.033053):0.008007):0.004355,((((9:0.099680,(18:0.031926,17:0.014153):0.037515):0.030195,4:0.117840):0.006102,(7:0.0 [...]
+   tree rep.15260000 = (2:0.218197,(((((8:0.100649,(21:0.050643,((25:0.005844,(24:0.005104,23:0.008130):0.008174):0.014420,22:0.030810):0.020660):0.026237):0.025954,(13:0.079877,(20:0.115893,5:0.109149):0.066517):0.007142):0.003459,(((4:0.132323,(9:0.135018,(18:0.033126,17:0.013956):0.072239):0.007331):0.016809,(7:0.097386,(10:0.067452,(15:0.053351,16:0.035913):0.071317):0.019766):0.013714):0.016138,((14:0.076086,19:0.060272):0.048481,(29:0.114395,((27:0.028112,26:0.030208):0.068011,28:0 [...]
+   tree rep.15280000 = (((2:0.189197,(((((8:0.125716,(21:0.039558,(((23:0.014351,24:0.011527):0.004051,25:0.001912):0.037969,22:0.028168):0.008141):0.042740):0.008483,(13:0.079788,(20:0.092980,5:0.081873):0.099492):0.018938):0.006466,((14:0.074369,19:0.100724):0.020664,(29:0.075143,((27:0.050720,26:0.013010):0.051304,28:0.043419):0.038535):0.014242):0.014027):0.002940,((((15:0.040774,16:0.023807):0.044727,7:0.110808):0.001635,10:0.114235):0.020614,(((18:0.031718,17:0.024596):0.028880,9:0 [...]
+   tree rep.15300000 = (2:0.189329,(((12:0.068165,11:0.026894):0.050061,((((5:0.104316,20:0.114787):0.064489,13:0.089224):0.006323,(8:0.080584,(21:0.064124,(22:0.043664,((24:0.004668,23:0.022039):0.021220,25:0.012397):0.035107):0.007254):0.015483):0.026867):0.001659,((29:0.080449,(28:0.048955,(26:0.023361,27:0.032223):0.040818):0.035115):0.013523,((14:0.097297,19:0.076024):0.031532,((((16:0.018987,15:0.036103):0.056724,7:0.075004):0.002579,10:0.100524):0.014550,(4:0.141110,((18:0.040458, [...]
+   tree rep.15320000 = (6:0.060925,((2:0.166192,((12:0.065801,11:0.027062):0.057005,(((13:0.082682,(8:0.066462,(((25:0.011014,(24:0.009483,23:0.004722):0.009538):0.013630,22:0.031598):0.012889,21:0.050969):0.031934):0.031743):0.001314,(5:0.088554,20:0.100464):0.111139):0.008698,(((((4:0.120268,((18:0.043565,17:0.019147):0.055543,9:0.096018):0.030674):0.006092,10:0.082637):0.001496,((16:0.022353,15:0.038413):0.033639,7:0.106219):0.007641):0.033649,(29:0.086508,(28:0.058178,(26:0.032353,27 [...]
+   tree rep.15340000 = ((3:0.119024,6:0.099541):0.010200,(((((((28:0.045314,(27:0.037762,26:0.020659):0.067666):0.015268,29:0.106990):0.016572,(19:0.088033,14:0.077086):0.047479):0.013447,((8:0.097508,(((25:0.012644,(24:0.007224,23:0.007950):0.007428):0.014037,22:0.046737):0.018205,21:0.031331):0.064815):0.025047,(((4:0.132065,((18:0.041282,17:0.013648):0.032830,9:0.141538):0.019631):0.010786,(7:0.126112,(15:0.029302,16:0.024789):0.054646):0.028340):0.004252,10:0.109924):0.033376):0.0001 [...]
+   tree rep.15360000 = ((((12:0.050202,11:0.032439):0.049443,(((8:0.088015,((((23:0.015468,24:0.012899):0.003929,25:0.008489):0.019177,22:0.033804):0.029010,21:0.038497):0.027637):0.036866,((20:0.100668,5:0.096663):0.138408,13:0.065711):0.015152):0.002010,((14:0.105233,19:0.077150):0.028586,((29:0.084647,((27:0.030834,26:0.029309):0.073151,28:0.044454):0.021146):0.022081,((((15:0.037255,16:0.026022):0.057479,10:0.094988):0.001429,(((18:0.032814,17:0.027739):0.047592,9:0.101219):0.035225, [...]
+   tree rep.15380000 = ((2:0.199922,((11:0.020306,12:0.057126):0.042906,((((5:0.111754,20:0.112638):0.137344,13:0.084941):0.014847,(8:0.094572,((((24:0.007890,23:0.009115):0.006194,25:0.008170):0.022091,21:0.064911):0.017655,22:0.026717):0.050581):0.007537):0.009805,(((14:0.085187,19:0.073046):0.042458,(((26:0.030520,27:0.035750):0.094417,28:0.038672):0.036203,29:0.107541):0.014824):0.003220,((10:0.123800,((16:0.027835,15:0.038048):0.043006,7:0.118835):0.010946):0.003963,((9:0.131001,(18 [...]
+   tree rep.15400000 = ((6:0.096116,3:0.154503):0.003149,(((11:0.035368,12:0.078318):0.048214,(((((10:0.107971,(((9:0.118929,(18:0.038618,17:0.026538):0.037240):0.024175,4:0.099397):0.025868,(7:0.097532,(15:0.059437,16:0.015597):0.071633):0.010536):0.008941):0.042292,(19:0.099183,14:0.077827):0.021566):0.004194,(29:0.101721,(28:0.062077,(27:0.038721,26:0.021951):0.053582):0.023584):0.004667):0.010268,(13:0.097800,(20:0.118259,5:0.171162):0.096708):0.013936):0.011709,((21:0.044993,(22:0.0 [...]
+   tree rep.15420000 = ((((11:0.037032,12:0.074646):0.053658,(((((26:0.018575,27:0.039914):0.082634,28:0.053032):0.011278,29:0.093457):0.018472,((14:0.105536,19:0.079030):0.053068,(((7:0.091280,(16:0.024838,15:0.054555):0.044826):0.003210,10:0.110425):0.006007,(4:0.133526,(9:0.149577,(18:0.039569,17:0.029893):0.043286):0.030710):0.009129):0.045904):0.007747):0.004526,((8:0.090297,(21:0.047576,(22:0.043285,(25:0.005680,(23:0.006635,24:0.016580):0.001295):0.018891):0.031977):0.031332):0.01 [...]
+   tree rep.15440000 = ((3:0.180548,6:0.062192):0.005175,(((11:0.024808,12:0.067665):0.068774,(((8:0.091012,((22:0.042407,((24:0.008699,23:0.011122):0.001982,25:0.003336):0.030376):0.021825,21:0.045227):0.038467):0.024489,((5:0.092013,20:0.137864):0.125526,13:0.089757):0.004530):0.002741,((29:0.099184,(14:0.099748,19:0.096336):0.026825):0.001004,((7:0.087656,(10:0.105768,((16:0.048035,15:0.040525):0.046699,(4:0.132601,(9:0.131070,(18:0.028750,17:0.034644):0.023761):0.023500):0.009530):0. [...]
+   tree rep.15460000 = ((((11:0.015192,12:0.059413):0.072161,(((5:0.086023,20:0.097285):0.068487,13:0.093959):0.011031,((((28:0.035027,(26:0.048334,27:0.025488):0.082307):0.026176,29:0.098735):0.008723,(((((4:0.125884,(9:0.104734,(18:0.042348,17:0.005531):0.025619):0.033370):0.014275,(16:0.030922,15:0.062776):0.060265):0.016000,10:0.110461):0.005526,7:0.107104):0.014503,(14:0.098343,19:0.092879):0.018983):0.008962):0.010048,((21:0.032778,(22:0.041662,((24:0.006950,23:0.009785):0.003732,2 [...]
+   tree rep.15480000 = ((2:0.194393,(((((((27:0.018291,26:0.038446):0.072913,28:0.044454):0.012667,29:0.092630):0.016193,((14:0.088472,19:0.056158):0.045346,((15:0.030845,16:0.046625):0.045373,(((9:0.098977,(18:0.036661,17:0.025315):0.034003):0.020569,4:0.115247):0.013800,(7:0.092317,10:0.078183):0.012854):0.000920):0.018767):0.009184):0.017570,((20:0.073056,5:0.126384):0.105681,13:0.088365):0.005502):0.006894,(8:0.127655,(((25:0.003453,(24:0.006423,23:0.011249):0.001775):0.033948,22:0.0 [...]
+   tree rep.15500000 = ((((((((13:0.102596,(8:0.112280,((((23:0.007913,24:0.009887):0.007602,25:0.014233):0.026009,22:0.037165):0.014759,21:0.033073):0.040518):0.021832):0.000958,((14:0.084196,19:0.069251):0.043813,(5:0.148309,20:0.088499):0.094618):0.010490):0.007905,(29:0.083133,((26:0.037810,27:0.029363):0.087123,28:0.061415):0.020649):0.011193):0.011261,((((16:0.020294,15:0.041420):0.047534,7:0.101182):0.009746,10:0.088235):0.027549,(((18:0.050022,17:0.009622):0.049732,9:0.109138):0. [...]
+   tree rep.15520000 = ((2:0.175476,(((((4:0.101197,((18:0.037482,17:0.033532):0.037252,9:0.103542):0.023668):0.018698,(7:0.097607,(15:0.059930,16:0.018868):0.033041):0.015591):0.008355,10:0.086350):0.023451,(((29:0.085417,(28:0.039520,(27:0.028136,26:0.019563):0.081891):0.014056):0.019568,(14:0.085970,19:0.092754):0.026493):0.008503,(8:0.105869,(((21:0.043471,(22:0.039003,((24:0.018575,23:0.007850):0.006932,25:0.010084):0.020159):0.016706):0.025924,(20:0.067813,5:0.097634):0.051821):0.0 [...]
+   tree rep.15540000 = (((((12:0.071832,11:0.024259):0.065101,(((13:0.111951,((21:0.040011,(((23:0.005911,25:0.013821):0.004026,24:0.013468):0.027804,22:0.054546):0.037835):0.036538,8:0.117638):0.022988):0.019970,((29:0.097910,((27:0.040645,26:0.021916):0.082972,28:0.037225):0.015474):0.036568,((((9:0.088895,(18:0.033089,17:0.022907):0.049630):0.014886,4:0.132269):0.015310,(((15:0.042762,16:0.016029):0.046912,10:0.094472):0.009407,7:0.102033):0.007140):0.019575,(19:0.082851,14:0.079575): [...]
+   tree rep.15560000 = ((6:0.078756,(2:0.233503,((12:0.063295,11:0.021547):0.045258,(((((25:0.008067,(24:0.003719,23:0.015420):0.001945):0.020601,22:0.037089):0.004837,21:0.039481):0.022430,8:0.068710):0.029505,(((20:0.089189,5:0.076522):0.103649,13:0.067027):0.021221,((((27:0.032307,26:0.016073):0.059072,28:0.062583):0.019159,29:0.085019):0.009940,(((10:0.079151,(7:0.103704,(15:0.044395,16:0.039416):0.053107):0.024635):0.000747,((9:0.075271,(18:0.036606,17:0.020149):0.043144):0.015686,4 [...]
+   tree rep.15580000 = ((((12:0.080448,11:0.046428):0.038080,((((((15:0.033450,16:0.024692):0.041758,7:0.096016):0.007554,10:0.101571):0.006356,(9:0.149179,(18:0.029790,17:0.024307):0.070760):0.023941):0.008402,4:0.111110):0.025697,((29:0.085163,((27:0.030166,26:0.037857):0.074081,28:0.041516):0.015642):0.051031,(((19:0.078202,14:0.084842):0.038362,(20:0.136774,5:0.114863):0.063250):0.018236,(13:0.082945,(8:0.092461,((22:0.023294,(25:0.010452,(23:0.006704,24:0.009165):0.004195):0.022962) [...]
+   tree rep.15600000 = (3:0.138700,((2:0.145469,((11:0.029717,12:0.057656):0.068249,(((29:0.089737,((26:0.011740,27:0.055799):0.055974,28:0.050380):0.024806):0.010256,((14:0.077429,19:0.076117):0.022858,((((9:0.123716,(18:0.038781,17:0.013446):0.035944):0.022971,4:0.125411):0.008623,(10:0.088749,(16:0.029753,15:0.049521):0.040994):0.020867):0.012768,7:0.100164):0.019159):0.001879):0.009847,(((5:0.076650,20:0.120938):0.074673,13:0.088843):0.009294,((21:0.036203,(((23:0.005333,24:0.004213) [...]
+   tree rep.15620000 = ((6:0.064520,3:0.131344):0.012181,(((11:0.028097,12:0.073741):0.086174,((8:0.094251,(21:0.053777,(22:0.021830,(25:0.003270,(24:0.012469,23:0.009945):0.004586):0.029699):0.019733):0.028749):0.026276,((((29:0.095515,((26:0.033874,27:0.047266):0.063620,28:0.044397):0.022117):0.020308,(((4:0.102779,(9:0.133353,(18:0.050107,17:0.021445):0.034160):0.029132):0.003388,((16:0.011991,15:0.041228):0.044797,7:0.099435):0.015708):0.010895,10:0.076563):0.022077):0.006398,(14:0.1 [...]
+   tree rep.15640000 = ((2:0.224044,((11:0.028743,12:0.061198):0.069979,(((8:0.091975,(21:0.040423,(22:0.009983,(25:0.003410,(23:0.010992,24:0.009918):0.003700):0.031131):0.024932):0.037245):0.033071,13:0.086636):0.007065,((5:0.108860,20:0.109323):0.085694,(((7:0.125682,(((16:0.038793,15:0.061243):0.047564,10:0.090670):0.000323,(4:0.098973,(9:0.071887,(18:0.039193,17:0.014015):0.041485):0.020727):0.019896):0.011866):0.010586,(14:0.096090,19:0.095451):0.016952):0.013041,(29:0.093716,((26: [...]
+   tree rep.15660000 = ((3:0.133276,6:0.093325):0.005146,(((12:0.098238,11:0.027673):0.048630,(((((22:0.030243,(25:0.002414,(24:0.011332,23:0.006011):0.015803):0.021210):0.021636,21:0.051854):0.039067,8:0.063755):0.028638,((19:0.092546,14:0.098423):0.043374,(((7:0.098970,((15:0.027300,16:0.025830):0.049951,(10:0.087522,(4:0.105357,((18:0.029520,17:0.027382):0.046018,9:0.101051):0.029922):0.035023):0.003600):0.013797):0.025848,(29:0.078360,((27:0.025534,26:0.022101):0.086992,28:0.043387): [...]
+   tree rep.15680000 = ((2:0.213969,(((((((22:0.034438,((23:0.009076,24:0.014111):0.003754,25:0.006058):0.025275):0.015302,21:0.041370):0.030962,8:0.111266):0.040645,13:0.120015):0.002708,(5:0.112471,20:0.136226):0.089417):0.003628,(((14:0.090134,19:0.095207):0.018557,(29:0.116063,((26:0.021033,27:0.036540):0.060112,28:0.056413):0.017112):0.011394):0.003707,(((9:0.109452,(18:0.051739,17:0.029098):0.056770):0.014608,4:0.106618):0.005181,(10:0.116324,((16:0.028952,15:0.038892):0.033222,7:0 [...]
+   tree rep.15700000 = ((6:0.084320,(((11:0.018746,12:0.083190):0.035964,((((10:0.101126,((4:0.145990,(9:0.104065,(18:0.031744,17:0.018669):0.039956):0.014248):0.019705,((15:0.049509,16:0.011546):0.044650,7:0.091174):0.013037):0.009331):0.022788,(19:0.085616,14:0.107190):0.026953):0.010506,(29:0.094622,((27:0.023702,26:0.035411):0.067093,28:0.041883):0.030967):0.023168):0.004362,(((20:0.094724,5:0.102355):0.099390,13:0.080523):0.002923,(8:0.081697,((((24:0.006927,23:0.008233):0.008375,25 [...]
+   tree rep.15720000 = ((6:0.098645,3:0.087612):0.008937,(((11:0.021164,12:0.058973):0.032573,(((21:0.079571,(22:0.033206,(25:0.008802,(23:0.005786,24:0.014051):0.008245):0.034484):0.017533):0.035741,8:0.101172):0.029209,(((((7:0.104278,(15:0.027046,16:0.032483):0.047038):0.012940,(10:0.120026,((9:0.072481,(18:0.023137,17:0.029070):0.063204):0.023853,4:0.122453):0.020367):0.000700):0.022886,(((27:0.037329,26:0.031893):0.068442,28:0.030069):0.024876,29:0.069545):0.022853):0.001551,(19:0.0 [...]
+   tree rep.15740000 = (((2:0.227548,(((((((23:0.014605,24:0.010762):0.006057,25:0.001131):0.031196,22:0.023440):0.034082,21:0.027971):0.027801,8:0.106320):0.019682,(((((15:0.029350,16:0.020513):0.049128,(10:0.066749,7:0.104220):0.008555):0.012349,((9:0.153763,(18:0.056171,17:0.020460):0.030167):0.030710,4:0.097820):0.015168):0.038843,((19:0.075482,14:0.078201):0.038592,(29:0.106786,((27:0.022534,26:0.022943):0.062968,28:0.045821):0.023387):0.002938):0.004352):0.006476,(13:0.070726,(20:0 [...]
+   tree rep.15760000 = (((((((((4:0.132005,(9:0.086225,(18:0.041395,17:0.017114):0.051002):0.027594):0.005623,(15:0.035486,16:0.039415):0.072561):0.000862,10:0.083774):0.033991,7:0.126602):0.020449,((29:0.101540,(28:0.039126,(27:0.024626,26:0.036324):0.078813):0.010839):0.014868,(19:0.116519,14:0.094683):0.024166):0.000612):0.007370,((13:0.073911,(20:0.094413,5:0.091196):0.081573):0.017884,((21:0.038048,(22:0.027841,((23:0.013632,24:0.007472):0.002914,25:0.008172):0.025740):0.008583):0.0 [...]
+   tree rep.15780000 = ((((12:0.046948,11:0.031198):0.048893,(((13:0.084401,(5:0.085679,20:0.116231):0.081795):0.003473,(((22:0.046333,(25:0.006392,(24:0.008777,23:0.007484):0.007428):0.021739):0.019956,21:0.069857):0.013072,8:0.089503):0.035129):0.000434,(((19:0.078364,14:0.081599):0.042806,((10:0.086859,((16:0.029524,15:0.043181):0.043111,((9:0.094006,(18:0.033797,17:0.018498):0.049290):0.015902,4:0.125932):0.015683):0.004116):0.022668,7:0.082645):0.024616):0.003825,((28:0.040372,(26:0 [...]
+   tree rep.15800000 = (3:0.136565,(6:0.082321,(2:0.152630,((12:0.066086,11:0.018615):0.045863,((((((26:0.032855,27:0.048711):0.098543,28:0.054056):0.024923,29:0.095765):0.015670,((((((18:0.047562,17:0.021498):0.025542,9:0.077016):0.039123,4:0.109637):0.027181,(10:0.087547,(16:0.017128,15:0.036223):0.053770):0.001287):0.016345,7:0.096882):0.016272,(14:0.084153,19:0.098382):0.020178):0.003623):0.010674,((5:0.170808,20:0.072103):0.086186,13:0.067483):0.004051):0.007133,(8:0.103039,((22:0.0 [...]
+   tree rep.15820000 = (((2:0.147655,((((20:0.120960,5:0.109839):0.120095,13:0.116155):0.003599,((((28:0.059976,(27:0.029406,26:0.016406):0.072364):0.022322,(29:0.096349,(14:0.084904,19:0.094489):0.056708):0.003765):0.015287,(((9:0.103518,(18:0.035583,17:0.016691):0.027421):0.023223,4:0.160162):0.005674,(((15:0.060381,16:0.033468):0.039165,7:0.090965):0.009163,10:0.109473):0.023552):0.025832):0.005584,(((22:0.033363,(25:0.001674,(24:0.008493,23:0.012557):0.004229):0.019397):0.020973,21:0 [...]
+   tree rep.15840000 = ((2:0.237101,((((4:0.117312,(9:0.114002,(18:0.044181,17:0.022143):0.026601):0.035133):0.008608,(10:0.078112,((16:0.033626,15:0.046001):0.048963,7:0.122369):0.018603):0.007433):0.012468,(((((21:0.069113,(((24:0.011087,23:0.011798):0.008625,25:0.007089):0.023672,22:0.054802):0.010962):0.032780,8:0.105199):0.039041,13:0.096124):0.002040,(((14:0.080412,19:0.100177):0.032676,29:0.116977):0.010575,(28:0.027699,(26:0.030566,27:0.036855):0.090590):0.029544):0.010044):0.001 [...]
+   tree rep.15860000 = ((2:0.264180,((((13:0.122134,((14:0.103237,19:0.102859):0.025680,((((27:0.021388,26:0.055567):0.081078,28:0.041362):0.016125,29:0.074957):0.027339,((((15:0.032695,16:0.040009):0.063071,7:0.119716):0.005728,(4:0.117166,(9:0.103608,(18:0.035618,17:0.021310):0.042383):0.020163):0.018243):0.006937,10:0.104590):0.010681):0.000832):0.009714):0.003095,(((((23:0.007129,24:0.011235):0.007238,25:0.006383):0.027084,22:0.044558):0.034170,21:0.029413):0.009728,8:0.104031):0.030 [...]
+   tree rep.15880000 = ((6:0.070648,3:0.139808):0.029326,(2:0.148024,((((((22:0.035497,((23:0.008010,24:0.012455):0.000983,25:0.005858):0.019102):0.024911,21:0.035847):0.042455,8:0.105739):0.010541,13:0.092303):0.000445,(((14:0.094961,19:0.075370):0.033140,((7:0.105211,(5:0.109307,20:0.105008):0.070217):0.011726,((((18:0.038681,17:0.022537):0.044694,9:0.100831):0.020598,4:0.140439):0.006362,((16:0.016361,15:0.050199):0.039725,10:0.072529):0.006107):0.020428):0.011451):0.002179,(29:0.0772 [...]
+   tree rep.15900000 = ((2:0.155358,(((13:0.135720,(8:0.085447,(21:0.050726,(22:0.050235,(25:0.006861,(23:0.012459,24:0.008199):0.015073):0.024280):0.006887):0.038422):0.026549):0.000519,((((20:0.064236,5:0.092960):0.159587,(7:0.071936,(10:0.095700,((4:0.115846,((18:0.037915,17:0.015886):0.048832,9:0.102520):0.023289):0.019317,(15:0.018426,16:0.045878):0.040509):0.000678):0.024047):0.017078):0.006324,((28:0.036043,(27:0.040343,26:0.017753):0.068705):0.013361,29:0.099081):0.029581):0.0057 [...]
+   tree rep.15920000 = (3:0.157396,(6:0.078500,(2:0.162582,(((((14:0.089854,19:0.089241):0.033569,(((10:0.080206,((9:0.108984,(18:0.030286,17:0.019336):0.039996):0.031158,4:0.079070):0.009602):0.000107,(15:0.035207,16:0.015077):0.085777):0.003285,7:0.107825):0.024201):0.014896,((28:0.053210,(27:0.040320,26:0.020303):0.067526):0.015338,29:0.084722):0.012965):0.012030,(((20:0.093253,5:0.088135):0.105099,13:0.081098):0.004735,(8:0.112598,((22:0.053523,21:0.053698):0.003086,(25:0.010750,(24: [...]
+   tree rep.15940000 = ((2:0.206467,((12:0.076185,11:0.027822):0.042175,((5:0.134735,20:0.142018):0.092540,((13:0.113340,(8:0.067739,(21:0.056870,(((23:0.011616,24:0.012391):0.004996,25:0.004781):0.029648,22:0.046830):0.031775):0.024363):0.043460):0.004871,(((28:0.070436,(26:0.022922,27:0.033958):0.059220):0.024099,29:0.115198):0.003621,((14:0.089700,19:0.084711):0.017536,(((16:0.020996,15:0.055809):0.085433,(7:0.097636,((9:0.109176,(18:0.033721,17:0.021097):0.082978):0.028979,4:0.153561 [...]
+   tree rep.15960000 = ((6:0.098034,3:0.105624):0.007634,(((11:0.040636,12:0.071854):0.047076,((((14:0.071707,19:0.069952):0.014550,((28:0.046782,(26:0.023855,27:0.027344):0.061048):0.028274,29:0.098927):0.012141):0.006246,((8:0.098707,(21:0.059648,((23:0.012398,(24:0.015525,25:0.009806):0.000327):0.022274,22:0.028820):0.025295):0.089121):0.013715,((5:0.074817,20:0.086606):0.069767,13:0.061191):0.004648):0.016140):0.008304,(10:0.103479,((((18:0.041173,17:0.027328):0.043893,9:0.082883):0. [...]
+   tree rep.15980000 = ((3:0.153439,6:0.076842):0.001326,(((11:0.032317,12:0.063392):0.058586,((13:0.066552,((((25:0.006678,(23:0.008089,24:0.018572):0.005820):0.021634,22:0.044445):0.014562,21:0.043313):0.048333,8:0.102914):0.017021):0.006006,((5:0.108170,20:0.070669):0.106680,((14:0.106656,19:0.097717):0.014251,((((((18:0.035639,17:0.013724):0.049182,9:0.101683):0.022355,4:0.123360):0.007965,((16:0.040344,15:0.027272):0.058301,10:0.118445):0.003837):0.008018,7:0.097761):0.016126,(29:0. [...]
+   tree rep.16000000 = ((6:0.091731,(((12:0.075430,11:0.039627):0.064094,((((21:0.038415,(22:0.058986,((24:0.012615,23:0.002989):0.018111,25:0.017091):0.019582):0.010275):0.046621,8:0.095756):0.012990,13:0.080259):0.012234,(((((10:0.068209,7:0.083528):0.002882,(16:0.012419,15:0.043837):0.056948):0.014785,(4:0.112773,((18:0.032407,17:0.016929):0.034953,9:0.111626):0.037130):0.009305):0.016720,((5:0.108674,20:0.084403):0.078736,(29:0.129434,((26:0.029111,27:0.039147):0.080489,28:0.065648): [...]
+   tree rep.16020000 = (((2:0.224391,((11:0.040944,12:0.062726):0.037327,((8:0.082397,(21:0.060908,(((24:0.014635,23:0.005421):0.012554,25:0.002398):0.027997,22:0.028639):0.006505):0.013772):0.037676,(13:0.094085,((((14:0.075006,19:0.085545):0.027885,(29:0.108892,((26:0.029821,27:0.053609):0.062877,28:0.055771):0.037978):0.023435):0.001798,((4:0.104972,((18:0.037615,17:0.018595):0.055534,9:0.105344):0.031953):0.011939,(10:0.100106,((16:0.022680,15:0.040727):0.044408,7:0.110423):0.017420) [...]
+   tree rep.16040000 = ((3:0.100104,(((12:0.050743,11:0.035863):0.035396,(((4:0.118242,((18:0.027976,17:0.019208):0.049479,9:0.092786):0.016149):0.041257,(((16:0.026216,15:0.027214):0.044771,7:0.083232):0.009814,10:0.087498):0.011711):0.016942,((((28:0.031644,(26:0.026230,27:0.039483):0.055620):0.015068,29:0.066305):0.011537,(14:0.075135,19:0.061092):0.022976):0.011029,(13:0.085397,((8:0.106192,(((24:0.011666,23:0.008956):0.006880,25:0.013531):0.008897,(21:0.035263,22:0.025607):0.005931) [...]
+   tree rep.16060000 = (6:0.081098,(3:0.135838,(2:0.201756,(((((9:0.110958,(18:0.040806,17:0.021882):0.038362):0.027872,4:0.115922):0.015233,(10:0.105665,(7:0.084072,(15:0.031089,16:0.023664):0.047244):0.017304):0.006160):0.022906,(((((27:0.036154,26:0.019763):0.044049,28:0.062744):0.021441,29:0.096108):0.017454,(13:0.089329,(((21:0.063051,(((23:0.012504,24:0.008075):0.004693,25:0.011577):0.023479,22:0.029916):0.027920):0.021539,8:0.104633):0.013886,(20:0.068358,5:0.102640):0.067309):0.0 [...]
+   tree rep.16080000 = ((6:0.072521,((((((19:0.079132,14:0.089261):0.050772,((4:0.136475,(9:0.085086,(18:0.039692,17:0.013616):0.037539):0.020314):0.012977,((15:0.045798,16:0.020050):0.041732,(10:0.107066,7:0.142031):0.002282):0.016840):0.025193):0.012991,(((27:0.025133,26:0.020018):0.057644,28:0.058838):0.021514,29:0.087172):0.012260):0.002332,((20:0.098368,5:0.072475):0.084229,(((((25:0.007148,(24:0.005834,23:0.005505):0.007260):0.039147,22:0.040270):0.019305,21:0.045360):0.013862,8:0. [...]
+   tree rep.16100000 = (((((12:0.058676,11:0.025977):0.043183,(((((((25:0.007731,(23:0.012565,24:0.012111):0.005035):0.034321,22:0.019954):0.008116,21:0.052405):0.022897,8:0.079367):0.036025,13:0.079789):0.014133,(5:0.080165,20:0.105826):0.075042):0.004615,(((28:0.060025,(26:0.037056,27:0.019962):0.069726):0.026832,(((((10:0.087082,(9:0.155325,(18:0.040695,17:0.015052):0.073735):0.029898):0.000179,(16:0.018659,15:0.046592):0.067586):0.007955,4:0.166893):0.009713,7:0.113197):0.004950,(14: [...]
+   tree rep.16120000 = ((6:0.086104,(((12:0.040734,11:0.043082):0.033946,((((9:0.083470,(18:0.030574,17:0.026088):0.052520):0.022649,4:0.117669):0.036172,((7:0.103819,(16:0.042304,15:0.028338):0.043759):0.011813,10:0.069379):0.009487):0.018254,(((28:0.066003,(26:0.031855,27:0.021718):0.077492):0.018906,29:0.130923):0.011159,((((5:0.132526,20:0.061435):0.118713,13:0.156801):0.006669,(8:0.091856,(21:0.034653,(((23:0.012471,24:0.009333):0.008732,25:0.011718):0.026118,22:0.057219):0.011896): [...]
+   tree rep.16140000 = ((((12:0.067021,11:0.034071):0.038161,((((((((15:0.033396,16:0.029455):0.057557,10:0.084849):0.005539,((9:0.096317,(18:0.037444,17:0.023698):0.046439):0.015902,4:0.131966):0.008791):0.002046,7:0.081907):0.015926,(19:0.055043,14:0.067262):0.026446):0.015304,(((27:0.029313,26:0.025905):0.054963,28:0.038660):0.023494,29:0.091419):0.010696):0.004433,((20:0.085544,5:0.117867):0.096118,13:0.105809):0.009283):0.004218,((21:0.064810,(((24:0.020729,23:0.013292):0.012976,25: [...]
+   tree rep.16160000 = ((((((((22:0.028156,(25:0.009153,(24:0.016150,23:0.016154):0.004238):0.021994):0.012199,21:0.038374):0.030304,8:0.086202):0.014715,((13:0.080798,(5:0.122822,20:0.107718):0.085093):0.009633,((29:0.127027,(28:0.038915,(26:0.019404,27:0.037632):0.051172):0.026066):0.016806,((14:0.078400,19:0.071436):0.027261,((4:0.118329,(9:0.092249,(18:0.030031,17:0.026821):0.040262):0.027516):0.012467,(7:0.105839,(10:0.104818,(16:0.010887,15:0.063373):0.055242):0.000894):0.014607):0 [...]
+   tree rep.16180000 = ((3:0.124510,(((((13:0.103784,(5:0.063331,20:0.095418):0.113295):0.004868,(((((24:0.014020,23:0.012497):0.003023,25:0.010459):0.028921,22:0.046551):0.013922,21:0.034214):0.045709,8:0.071926):0.051903):0.007225,((((10:0.087789,((16:0.017544,15:0.052392):0.041452,7:0.090622):0.008486):0.006262,(4:0.119977,(9:0.101614,(18:0.038469,17:0.034877):0.037389):0.052925):0.004696):0.020447,(14:0.076744,19:0.081178):0.050801):0.004134,((28:0.045445,(26:0.027623,27:0.016928):0. [...]
+   tree rep.16200000 = (3:0.121070,((2:0.229390,(((((((10:0.137589,(16:0.035303,15:0.047221):0.058267):0.008788,((9:0.096169,(18:0.039471,17:0.021081):0.045413):0.033482,4:0.138529):0.007181):0.017755,7:0.099675):0.028033,(14:0.080601,19:0.080583):0.022762):0.002187,(29:0.067832,(28:0.053643,(26:0.018550,27:0.034116):0.100077):0.029120):0.018287):0.003587,(13:0.102583,(((21:0.049482,(22:0.027417,(25:0.009468,(23:0.006398,24:0.011008):0.000656):0.025211):0.012895):0.041842,8:0.074401):0.0 [...]
+   tree rep.16220000 = (((((12:0.097230,11:0.039524):0.058230,(((10:0.112616,(7:0.117943,(15:0.030113,16:0.034076):0.061019):0.006285):0.023834,(4:0.131855,(9:0.082551,(18:0.023739,17:0.031540):0.050988):0.025912):0.004451):0.020624,(((14:0.074784,19:0.079553):0.044171,(((8:0.105100,(21:0.049096,(((24:0.010675,23:0.009350):0.004162,25:0.013199):0.028515,22:0.024292):0.014204):0.034592):0.042144,(20:0.120448,5:0.115530):0.113907):0.008872,13:0.126221):0.013216):0.000767,(29:0.102392,((27: [...]
+   tree rep.16240000 = ((6:0.080516,3:0.142014):0.002425,(2:0.144086,((((((21:0.059933,22:0.031350):0.007130,((23:0.011264,24:0.010612):0.002480,25:0.017649):0.028490):0.027278,8:0.135633):0.031853,((5:0.106477,20:0.104508):0.128763,13:0.083370):0.003398):0.006838,((7:0.116344,((10:0.101016,(16:0.030193,15:0.031608):0.046244):0.004909,(((18:0.041493,17:0.018006):0.048623,9:0.115053):0.041422,4:0.141116):0.012177):0.006100):0.030120,((14:0.095364,19:0.072841):0.040282,((28:0.049566,(26:0. [...]
+   tree rep.16260000 = ((6:0.097048,3:0.147600):0.018542,(((11:0.050487,12:0.061261):0.056956,((((20:0.091410,5:0.084001):0.066938,(29:0.083461,((27:0.033597,26:0.016315):0.052764,28:0.049058):0.032306):0.000303):0.001546,((7:0.104262,(((((18:0.041283,17:0.012179):0.068070,9:0.130659):0.013574,4:0.111559):0.005173,(15:0.056897,16:0.019888):0.051720):0.013888,10:0.108976):0.010220):0.016620,(19:0.091591,14:0.091544):0.030574):0.001612):0.010173,(13:0.100720,(8:0.083641,((22:0.035495,((24: [...]
+   tree rep.16280000 = ((3:0.104964,(2:0.173054,((((((10:0.099290,((((18:0.033930,17:0.014682):0.041563,9:0.089019):0.002503,4:0.113766):0.023601,(7:0.103885,(16:0.010923,15:0.050624):0.056704):0.018409):0.005243):0.029853,(((26:0.026256,27:0.035323):0.061727,(29:0.098133,28:0.036044):0.043849):0.019779,(19:0.072539,14:0.124598):0.032620):0.001066):0.016895,13:0.105033):0.002620,(8:0.087588,(22:0.022045,(((23:0.008446,24:0.013565):0.010251,25:0.007489):0.022108,21:0.080279):0.005332):0.0 [...]
+   tree rep.16300000 = (((2:0.196127,(((8:0.105007,((22:0.025651,(23:0.008277,(25:0.011830,24:0.009808):0.008857):0.039890):0.015226,21:0.043693):0.031828):0.030283,(((5:0.117487,20:0.088282):0.076741,(((((((18:0.044400,17:0.028082):0.058021,9:0.089490):0.008800,4:0.113908):0.010885,10:0.089915):0.001174,((16:0.025531,15:0.042192):0.070214,7:0.095992):0.012872):0.036891,(29:0.078729,(28:0.025027,(26:0.032613,27:0.025923):0.087661):0.019966):0.016800):0.003470,(19:0.060330,14:0.053823):0. [...]
+   tree rep.16320000 = ((6:0.097364,3:0.169311):0.005002,(((((8:0.093090,(21:0.051163,(22:0.034151,(25:0.009412,(23:0.016617,24:0.003582):0.003553):0.017693):0.024151):0.040289):0.037822,13:0.082643):0.000005,((7:0.096557,(((4:0.150391,((18:0.032900,17:0.021977):0.049496,9:0.101820):0.013693):0.006260,(15:0.034084,16:0.024355):0.061841):0.007402,10:0.102149):0.010196):0.009559,(((19:0.086620,14:0.107585):0.017449,29:0.114072):0.008467,(((27:0.035764,26:0.035253):0.068316,28:0.044880):0.0 [...]
+   tree rep.16340000 = ((3:0.135857,6:0.100700):0.006845,(((((((22:0.045461,(23:0.005413,(24:0.006639,25:0.006024):0.003419):0.029004):0.001683,21:0.047062):0.044248,8:0.091967):0.056037,(5:0.081222,20:0.108922):0.119278):0.002768,((((10:0.078290,((16:0.014591,15:0.044861):0.045738,((9:0.125854,(18:0.028096,17:0.014542):0.030088):0.043573,4:0.122448):0.008693):0.016650):0.004940,7:0.102719):0.017244,((((26:0.021044,27:0.024273):0.071848,28:0.038046):0.013304,29:0.096012):0.046234,(19:0.0 [...]
+   tree rep.16360000 = (6:0.088637,(3:0.106083,((((20:0.063747,5:0.117532):0.098672,(((8:0.140939,(21:0.048168,(((23:0.014233,24:0.006780):0.005942,25:0.014954):0.026200,22:0.030728):0.024709):0.019356):0.036550,(((19:0.093176,14:0.086351):0.017398,(((27:0.032094,26:0.029138):0.053171,28:0.043821):0.024688,29:0.129848):0.031889):0.000468,((7:0.094284,(((18:0.026781,17:0.024445):0.053011,9:0.100612):0.027200,(15:0.035274,16:0.034668):0.075806):0.007708):0.003840,(10:0.079594,4:0.142758):0 [...]
+   tree rep.16380000 = ((6:0.100481,3:0.160624):0.026051,(2:0.208027,(((((19:0.078376,14:0.076089):0.028551,((28:0.050168,(27:0.028698,26:0.041383):0.073586):0.028675,29:0.071008):0.014380):0.007456,((13:0.094476,(8:0.088643,((((23:0.011145,24:0.009397):0.013190,25:0.005046):0.035775,22:0.038084):0.015092,21:0.052338):0.053680):0.027612):0.003672,(20:0.079857,5:0.101041):0.139019):0.004864):0.001914,((((15:0.029203,16:0.029225):0.068199,7:0.074841):0.016680,10:0.091116):0.006245,((9:0.13 [...]
+   tree rep.16400000 = ((6:0.104760,3:0.166909):0.015676,(2:0.188348,((((8:0.065827,((((24:0.012411,23:0.009671):0.011564,25:0.004767):0.035298,22:0.057766):0.016139,21:0.052267):0.029630):0.012358,(13:0.092697,(((19:0.067048,14:0.099093):0.050175,(((10:0.091402,(15:0.044241,16:0.014036):0.074067):0.000164,(4:0.133270,(9:0.089063,(18:0.039711,17:0.038959):0.056690):0.025895):0.007192):0.012792,7:0.145090):0.013647):0.020245,(29:0.081816,((27:0.040322,26:0.018536):0.054724,28:0.036927):0. [...]
+   tree rep.16420000 = ((((((((25:0.005610,(24:0.011868,23:0.006887):0.008918):0.048353,22:0.045299):0.028972,21:0.027111):0.040069,8:0.099748):0.024262,((19:0.093857,14:0.083156):0.040997,((20:0.097474,5:0.070798):0.083668,(((((27:0.040701,26:0.024672):0.037987,28:0.049216):0.011475,29:0.111809):0.023680,13:0.084455):0.003021,(((7:0.112250,(15:0.038780,16:0.023771):0.052968):0.011086,10:0.099032):0.005717,(4:0.135785,(9:0.098982,(18:0.043780,17:0.014458):0.044267):0.030662):0.018165):0. [...]
+   tree rep.16440000 = ((6:0.076814,3:0.161697):0.025097,((((((29:0.110713,(28:0.057214,(26:0.041905,27:0.023756):0.068787):0.016128):0.005401,(((4:0.128146,((18:0.038631,17:0.027380):0.021645,9:0.140580):0.031757):0.004482,(10:0.108771,(16:0.019431,15:0.042376):0.046058):0.022677):0.012170,7:0.104993):0.019722):0.000489,(19:0.089672,14:0.082907):0.034955):0.013219,(((8:0.083684,(21:0.022812,(22:0.051452,(25:0.006031,(24:0.016930,23:0.005216):0.010978):0.017634):0.012257):0.026419):0.025 [...]
+   tree rep.16460000 = ((6:0.084916,((((13:0.098097,(5:0.107337,20:0.092638):0.142242):0.014897,((8:0.101336,(21:0.045187,(22:0.029416,((23:0.010894,24:0.019357):0.013800,25:0.005664):0.037729):0.024154):0.025249):0.038900,((19:0.107140,14:0.116728):0.021888,(((28:0.041302,(26:0.019148,27:0.050688):0.068690):0.029456,29:0.103543):0.022303,(10:0.108350,((((18:0.022946,17:0.023065):0.070274,9:0.107305):0.007046,4:0.111203):0.008911,(7:0.100822,(16:0.026499,15:0.023953):0.059702):0.007290): [...]
+   tree rep.16480000 = (6:0.086544,(3:0.124482,((((((19:0.050792,14:0.064885):0.024906,(7:0.108622,((((9:0.092449,(18:0.050356,17:0.019316):0.024583):0.040434,4:0.137074):0.007857,(15:0.042643,16:0.022192):0.036758):0.000441,10:0.105775):0.016753):0.031880):0.010880,(((27:0.035487,26:0.020632):0.068556,28:0.047525):0.020451,29:0.098514):0.006022):0.011766,(((20:0.068805,5:0.085616):0.093103,((21:0.035087,(22:0.038086,((23:0.015935,24:0.014754):0.006596,25:0.004324):0.024731):0.010966):0. [...]
+   tree rep.16500000 = ((6:0.061541,3:0.120793):0.000993,(2:0.202930,(((((8:0.088470,(21:0.072644,(22:0.025372,(25:0.007701,(24:0.014601,23:0.009994):0.003981):0.025394):0.030100):0.020977):0.039587,13:0.060749):0.005352,(20:0.075500,5:0.112020):0.068845):0.007538,(((((15:0.034803,16:0.023700):0.047919,7:0.104166):0.006021,10:0.101176):0.005901,(((18:0.035809,17:0.015320):0.046796,9:0.147426):0.029602,4:0.091892):0.004914):0.020903,((19:0.079607,14:0.095087):0.024108,(29:0.080291,((27:0. [...]
+   tree rep.16520000 = (3:0.138758,(6:0.085737,((((((5:0.112538,20:0.087788):0.070830,13:0.105320):0.011206,((21:0.067196,(22:0.047384,(25:0.002538,(23:0.008241,24:0.009910):0.012334):0.023516):0.008564):0.034086,8:0.105050):0.031823):0.001805,((29:0.106652,((26:0.024777,27:0.044056):0.064548,28:0.051555):0.030315):0.010130,((((9:0.129133,(18:0.041398,17:0.016338):0.037690):0.025292,4:0.147251):0.008906,(10:0.087178,((16:0.021101,15:0.060901):0.067934,7:0.127550):0.014362):0.007692):0.01 [...]
+   tree rep.16540000 = ((3:0.175527,6:0.085670):0.005731,(2:0.220922,(((((21:0.040292,((25:0.008127,(23:0.008953,24:0.006052):0.003966):0.011502,22:0.040696):0.020940):0.046287,8:0.105254):0.038413,13:0.117394):0.001273,((((29:0.079572,((27:0.033829,26:0.043715):0.073227,28:0.061479):0.029784):0.005393,(19:0.077148,14:0.094515):0.024371):0.001629,((20:0.111039,5:0.122861):0.094311,7:0.141470):0.016607):0.007254,(((15:0.023647,16:0.027714):0.077856,10:0.093953):0.009610,(4:0.154254,(9:0.1 [...]
+   tree rep.16560000 = (2:0.153243,(((((29:0.133471,((26:0.029712,27:0.033240):0.074049,28:0.041914):0.014827):0.013228,((7:0.075764,((10:0.083663,(16:0.035019,15:0.050859):0.043215):0.007334,(4:0.101088,(9:0.098784,(18:0.045428,17:0.012294):0.058738):0.017558):0.007862):0.010758):0.006562,(14:0.067609,19:0.061549):0.040517):0.001789):0.012502,(13:0.086001,((8:0.078265,(21:0.036182,(22:0.026253,((23:0.009619,24:0.014232):0.004802,25:0.003121):0.022712):0.026202):0.020299):0.050904,(5:0.0 [...]
+   tree rep.16580000 = ((2:0.158082,((13:0.099806,(((((((15:0.035961,16:0.023824):0.042452,7:0.116778):0.011359,(10:0.078828,(4:0.129699,(9:0.097371,(18:0.019074,17:0.025982):0.034995):0.009754):0.023683):0.005481):0.021373,(19:0.060681,14:0.090312):0.044548):0.014923,(((27:0.027070,26:0.027175):0.064418,28:0.032197):0.027174,29:0.098540):0.012950):0.008591,(20:0.131049,5:0.065838):0.099789):0.003800,(8:0.081481,((22:0.036615,(25:0.010091,(24:0.012867,23:0.013137):0.011488):0.028221):0.0 [...]
+   tree rep.16600000 = ((2:0.130889,((12:0.042430,11:0.035397):0.035946,((13:0.075721,((5:0.082596,20:0.075453):0.079159,((19:0.073336,14:0.105048):0.030529,(((((9:0.088789,(18:0.032125,17:0.025398):0.036405):0.011236,4:0.090207):0.008303,((16:0.017518,15:0.027769):0.052753,10:0.100258):0.004600):0.016830,7:0.102522):0.032880,((28:0.050552,(26:0.015292,27:0.041952):0.066047):0.029707,29:0.093129):0.003628):0.006581):0.004385):0.000438):0.005459,(8:0.078874,(21:0.042760,((25:0.015062,(23: [...]
+   tree rep.16620000 = ((2:0.133157,((((20:0.080034,5:0.104736):0.058342,13:0.122870):0.006746,(((19:0.101897,14:0.059082):0.018488,((7:0.113118,(10:0.073300,(((9:0.102978,(18:0.044348,17:0.012842):0.027568):0.019696,4:0.160212):0.012304,(15:0.031945,16:0.025495):0.045079):0.008941):0.005920):0.017117,(((27:0.030812,26:0.019598):0.062794,28:0.037801):0.015289,29:0.092082):0.009669):0.004715):0.007315,(8:0.057254,(((25:0.014850,(23:0.011561,24:0.013894):0.005314):0.026122,22:0.028805):0.0 [...]
+   tree rep.16640000 = (3:0.116733,((2:0.193720,((((13:0.082032,(8:0.089380,((22:0.026576,((23:0.011245,24:0.011092):0.012988,25:0.010051):0.030771):0.007863,21:0.043287):0.038729):0.020833):0.011650,(20:0.120920,5:0.140954):0.091280):0.009478,(((27:0.032566,26:0.016606):0.076687,28:0.051016):0.037674,((29:0.105042,((7:0.110431,((4:0.104777,(9:0.095450,(18:0.026243,17:0.024954):0.045070):0.018029):0.011323,(15:0.038248,16:0.027433):0.055453):0.014160):0.004184,10:0.113112):0.046043):0.00 [...]
+   tree rep.16660000 = ((2:0.182089,((((((22:0.025008,((24:0.015845,23:0.004203):0.008031,25:0.008140):0.029385):0.000804,21:0.055574):0.060501,8:0.089329):0.036102,13:0.065409):0.009966,((20:0.087833,5:0.090750):0.069714,((((4:0.177597,((18:0.031407,17:0.027572):0.040835,9:0.108565):0.036550):0.009023,(7:0.084344,(15:0.048371,16:0.026595):0.053407):0.009971):0.015382,10:0.107393):0.030532,((19:0.085794,14:0.072924):0.035128,(29:0.081168,((27:0.054100,26:0.021306):0.080879,28:0.065103):0 [...]
+   tree rep.16680000 = (((((12:0.077871,11:0.021663):0.066513,(((29:0.116558,((5:0.109229,20:0.089184):0.092512,(28:0.044248,(26:0.020476,27:0.024783):0.092388):0.024300):0.005325):0.017892,((14:0.095244,19:0.065657):0.052159,(((16:0.019791,15:0.028421):0.064731,(7:0.073669,10:0.089381):0.004523):0.004748,(4:0.137895,(9:0.096380,(18:0.032286,17:0.022579):0.046721):0.019169):0.011343):0.032936):0.002954):0.012889,(13:0.100135,((21:0.038015,(22:0.048393,(25:0.008691,(24:0.006714,23:0.00715 [...]
+   tree rep.16700000 = (3:0.138284,(6:0.078615,(2:0.134923,((12:0.060339,11:0.038945):0.043268,(((((21:0.052913,(((23:0.003484,24:0.012157):0.001808,25:0.007075):0.011676,22:0.028844):0.022503):0.044680,8:0.112831):0.013155,(13:0.069345,((29:0.066953,(28:0.047666,(26:0.046890,27:0.016400):0.073198):0.018851):0.012327,(14:0.090964,19:0.073699):0.043106):0.008186):0.001407):0.002990,(5:0.085665,20:0.124404):0.089254):0.002177,((4:0.133798,(9:0.155758,(18:0.037703,17:0.025091):0.036996):0.0 [...]
+   tree rep.16720000 = (6:0.067588,(3:0.120786,(2:0.215029,(((((4:0.124920,((18:0.053132,17:0.015028):0.048424,9:0.094833):0.028502):0.014310,(10:0.122172,((15:0.030072,16:0.031680):0.065953,7:0.131368):0.018020):0.008813):0.016934,(((20:0.066855,5:0.106577):0.077194,(19:0.099566,14:0.067417):0.044380):0.016943,(29:0.085953,(28:0.036357,(27:0.023864,26:0.032197):0.071440):0.016766):0.019241):0.000314):0.011909,(((21:0.070019,(((24:0.009609,23:0.014852):0.002196,25:0.011173):0.025611,22:0 [...]
+   tree rep.16740000 = ((((12:0.063684,11:0.023381):0.058327,((((29:0.100417,((26:0.020511,27:0.035003):0.085326,28:0.055339):0.021951):0.019440,(14:0.125323,19:0.088526):0.046309):0.001037,(7:0.124493,(((16:0.029168,15:0.037806):0.071818,(((18:0.042887,17:0.031191):0.028784,9:0.136522):0.043095,4:0.116827):0.023469):0.008578,10:0.120072):0.012265):0.017041):0.015363,(((5:0.120068,20:0.086275):0.122974,13:0.092428):0.012934,(((22:0.053943,(25:0.008741,(24:0.011917,23:0.015379):0.011892): [...]
+   tree rep.16760000 = (((2:0.170459,(((((29:0.088105,((26:0.030143,27:0.069606):0.076064,28:0.044364):0.017010):0.012688,((19:0.080399,14:0.090476):0.037527,(((4:0.109773,((18:0.043608,17:0.031235):0.050253,9:0.102036):0.029764):0.016518,(10:0.063945,(7:0.113142,(16:0.029365,15:0.042650):0.043132):0.007460):0.011833):0.015000,(5:0.086970,20:0.101175):0.100056):0.002528):0.006239):0.010619,13:0.099162):0.000446,(8:0.117451,(21:0.037042,(22:0.053002,((23:0.010273,24:0.013747):0.007817,25: [...]
+   tree rep.16780000 = (((6:0.095206,3:0.146459):0.004061,((12:0.075263,11:0.034187):0.045513,(((14:0.085757,19:0.084475):0.020294,(((28:0.036604,(26:0.018440,27:0.027994):0.056780):0.041652,29:0.122883):0.026015,((4:0.106799,((16:0.033804,15:0.036696):0.080753,(7:0.093227,((18:0.031935,17:0.016438):0.022865,9:0.105232):0.057416):0.007154):0.011747):0.001567,10:0.090064):0.043815):0.003364):0.007489,(((5:0.063880,20:0.094117):0.085163,13:0.052002):0.002557,(8:0.104393,((((24:0.007672,23: [...]
+   tree rep.16800000 = (6:0.086109,(3:0.116654,(2:0.228793,((12:0.083474,11:0.017866):0.047014,((((13:0.074636,(8:0.066455,(((25:0.001896,(23:0.006202,24:0.016734):0.010944):0.025631,22:0.026171):0.006908,21:0.044236):0.031314):0.039072):0.016454,(5:0.096381,20:0.085218):0.089678):0.004603,((14:0.075794,19:0.073954):0.055554,((28:0.044504,(26:0.027265,27:0.025667):0.070444):0.023917,29:0.106551):0.007408):0.008939):0.010838,(((4:0.126548,((18:0.026756,17:0.019962):0.032700,9:0.102850):0. [...]
+   tree rep.16820000 = (3:0.124476,(6:0.084486,(((((((19:0.077686,14:0.118269):0.019134,(((4:0.121583,((18:0.046014,17:0.022090):0.036520,9:0.109992):0.030791):0.004828,(7:0.072285,(16:0.021808,15:0.046791):0.061924):0.003035):0.017971,10:0.111092):0.023500):0.017454,((28:0.040915,(26:0.018917,27:0.033686):0.058867):0.042191,29:0.085607):0.004786):0.014063,((5:0.124386,20:0.071627):0.074068,13:0.086999):0.004857):0.003878,(8:0.090702,(21:0.040063,(22:0.041662,((23:0.014431,24:0.008416):0 [...]
+   tree rep.16840000 = (((((13:0.079431,((20:0.089400,5:0.104220):0.079715,(((14:0.106727,19:0.093583):0.023267,(((10:0.077989,(15:0.035812,16:0.020360):0.058758):0.001633,(4:0.133946,((18:0.039859,17:0.015549):0.040545,9:0.111743):0.027712):0.004922):0.004927,7:0.100883):0.049694):0.003351,(29:0.107826,(28:0.048967,(27:0.022999,26:0.015920):0.068784):0.023633):0.001234):0.008519):0.004140):0.008634,(8:0.091225,(21:0.043926,((25:0.010603,(24:0.011796,23:0.014962):0.006948):0.024697,22:0. [...]
+   tree rep.16860000 = ((3:0.171132,6:0.093268):0.004076,((((((14:0.075253,19:0.072200):0.031193,(7:0.083485,(((4:0.095278,((18:0.028708,17:0.019919):0.049237,9:0.082842):0.016416):0.011235,(16:0.023814,15:0.041086):0.044736):0.001934,10:0.118474):0.009884):0.036072):0.007547,(((26:0.027749,27:0.038766):0.076597,28:0.040773):0.011875,29:0.117177):0.015200):0.006892,(((5:0.178214,20:0.085117):0.083499,13:0.088627):0.021377,(8:0.085236,(21:0.054470,(22:0.037799,((23:0.015158,24:0.005915):0 [...]
+   tree rep.16880000 = ((2:0.246938,(((((20:0.148776,5:0.074219):0.086141,13:0.068933):0.010181,((21:0.027455,(((25:0.016703,24:0.007997):0.012731,23:0.015803):0.038175,22:0.028286):0.026920):0.029280,8:0.085784):0.047556):0.009817,(((29:0.094061,(28:0.058469,(27:0.043026,26:0.010483):0.069914):0.010549):0.009732,(14:0.080710,19:0.084874):0.021989):0.007859,(((15:0.043765,16:0.035107):0.059781,(7:0.102650,10:0.093497):0.000561):0.006508,(4:0.106949,(9:0.115341,(18:0.039196,17:0.025317):0 [...]
+   tree rep.16900000 = ((2:0.186549,(((((10:0.099815,((16:0.050451,15:0.027018):0.055801,(4:0.094419,((18:0.045074,17:0.019615):0.042880,9:0.121859):0.048361):0.003812):0.005996):0.015983,7:0.091527):0.009389,((19:0.081406,14:0.067045):0.023431,(29:0.099533,((26:0.031423,27:0.032169):0.059744,28:0.059411):0.022132):0.014935):0.017277):0.009624,(13:0.098957,((8:0.093670,(21:0.033975,(((23:0.007704,24:0.007134):0.013695,25:0.002213):0.026009,22:0.041502):0.016702):0.031361):0.015760,(5:0.0 [...]
+   tree rep.16920000 = ((((11:0.044880,12:0.047998):0.082468,((13:0.122731,((((7:0.121595,((4:0.129002,((18:0.041909,17:0.022850):0.075964,9:0.091405):0.018221):0.008912,(16:0.017438,15:0.038946):0.043671):0.016184):0.005557,10:0.070510):0.026853,((19:0.119281,14:0.116924):0.030171,(29:0.077652,((26:0.018021,27:0.040157):0.063523,28:0.074668):0.011847):0.018266):0.010498):0.002620,(5:0.104607,20:0.095065):0.100379):0.004227):0.002495,((((25:0.005145,(24:0.016474,23:0.007747):0.007472):0. [...]
+   tree rep.16940000 = (((((((29:0.119406,(28:0.035639,(27:0.022650,26:0.019729):0.069347):0.019625):0.011723,((19:0.076034,14:0.089303):0.031224,(((4:0.086793,((18:0.049333,17:0.026873):0.045269,9:0.104489):0.022044):0.022715,((15:0.029108,16:0.033777):0.055131,10:0.092404):0.001909):0.011365,7:0.089703):0.014681):0.001415):0.013135,((((22:0.041358,(25:0.004400,(24:0.003985,23:0.012384):0.006348):0.023741):0.016857,21:0.069100):0.029859,8:0.118147):0.015246,(13:0.077009,(20:0.075362,5:0 [...]
+   tree rep.16960000 = ((3:0.169051,((((((((19:0.077982,14:0.090414):0.020906,29:0.102864):0.004411,((((18:0.036011,17:0.020556):0.023839,9:0.104511):0.041583,4:0.138190):0.012437,((15:0.032539,16:0.027737):0.050652,(10:0.073153,7:0.095728):0.010857):0.002498):0.035075):0.000364,(28:0.046360,(27:0.031561,26:0.027961):0.054992):0.027608):0.008000,((((22:0.041239,((24:0.002909,23:0.015266):0.005477,25:0.006678):0.027496):0.019739,21:0.033763):0.039883,8:0.079966):0.001538,13:0.088682):0.00 [...]
+   tree rep.16980000 = (2:0.200720,((3:0.106868,6:0.111538):0.021314,(((((21:0.048532,((25:0.010887,(23:0.015861,24:0.011461):0.004068):0.033587,22:0.043513):0.006373):0.053444,8:0.093309):0.023973,((20:0.073316,5:0.114834):0.110364,13:0.111320):0.000778):0.008735,(((19:0.088234,14:0.067566):0.052969,((((15:0.036312,16:0.035280):0.036593,7:0.107768):0.005717,(((18:0.048016,17:0.014383):0.043237,9:0.087459):0.031051,4:0.123513):0.014392):0.008650,10:0.083739):0.021272):0.000286,(29:0.1089 [...]
+   tree rep.17000000 = ((6:0.086698,((((13:0.076629,((5:0.122791,20:0.085017):0.091849,(8:0.089171,(((25:0.011300,(23:0.016583,24:0.008719):0.000636):0.021497,22:0.029905):0.037649,21:0.022307):0.027076):0.035872):0.001912):0.016251,((((((9:0.100795,(18:0.034670,17:0.024712):0.038536):0.038065,4:0.118726):0.013528,(7:0.089816,(16:0.031573,15:0.039241):0.033036):0.006116):0.013963,10:0.085880):0.018000,(19:0.069866,14:0.091302):0.022016):0.026168,(29:0.067630,((26:0.029263,27:0.021799):0. [...]
+   tree rep.17020000 = (6:0.064414,(3:0.146352,(((((13:0.074041,(5:0.114777,20:0.097895):0.089509):0.000620,(((19:0.110705,14:0.066152):0.039908,(7:0.116825,((16:0.031075,15:0.035230):0.073310,((4:0.123925,(9:0.149733,(18:0.043975,17:0.019794):0.027647):0.016155):0.026931,10:0.116767):0.003186):0.005333):0.021805):0.023612,(29:0.114046,((26:0.021456,27:0.027254):0.077815,28:0.030589):0.035719):0.028458):0.008731):0.009782,((21:0.040179,(((23:0.021324,24:0.012545):0.006950,25:0.013579):0. [...]
+   tree rep.17040000 = (3:0.127099,((((((14:0.103692,19:0.069942):0.023068,((29:0.116449,(28:0.041317,(27:0.029559,26:0.036789):0.091109):0.031465):0.009540,((10:0.104559,((15:0.046478,16:0.015032):0.061323,((9:0.095984,(17:0.015569,18:0.044855):0.043771):0.012803,4:0.126077):0.016696):0.003343):0.010462,7:0.124909):0.026965):0.000880):0.019628,(((20:0.079607,5:0.100893):0.118112,13:0.092698):0.001688,((21:0.052098,(((24:0.005497,23:0.014828):0.003249,25:0.010560):0.023097,22:0.034444):0 [...]
+   tree rep.17060000 = ((6:0.116595,((((13:0.100199,((21:0.070441,(((25:0.009738,23:0.016136):0.002180,24:0.014275):0.019396,22:0.046713):0.002898):0.066984,8:0.052193):0.045661):0.012260,((((5:0.116882,20:0.087779):0.130351,(26:0.027394,27:0.022841):0.073790):0.011039,(29:0.101362,28:0.048497):0.016310):0.019586,((19:0.067446,14:0.120764):0.048013,(((9:0.082522,(18:0.044198,17:0.019765):0.050245):0.018929,4:0.169190):0.037313,((7:0.103570,(16:0.032306,15:0.024278):0.049063):0.007138,10: [...]
+   tree rep.17080000 = ((((12:0.064675,11:0.038873):0.038580,((((19:0.096918,14:0.090972):0.023998,(7:0.106943,(((16:0.012440,15:0.052566):0.085622,(((18:0.049019,17:0.019342):0.046648,9:0.103285):0.027127,4:0.208897):0.015504):0.000858,10:0.093301):0.013058):0.034938):0.015334,(((26:0.034411,27:0.026212):0.068374,28:0.049946):0.022791,29:0.108082):0.016838):0.018434,((13:0.095535,(8:0.118984,((((24:0.016893,23:0.007309):0.007953,25:0.008398):0.013615,22:0.048259):0.018883,21:0.055723):0 [...]
+   tree rep.17100000 = (3:0.141105,(6:0.158226,(2:0.260829,(((((7:0.120629,(15:0.039004,16:0.023815):0.049521):0.014241,(((18:0.038183,17:0.018405):0.054709,9:0.092778):0.033629,4:0.122519):0.006472):0.018887,10:0.135367):0.017409,((13:0.083982,(((21:0.069984,22:0.037132):0.014826,((23:0.008084,24:0.011623):0.001870,25:0.010624):0.008927):0.076173,8:0.072386):0.028865):0.058673,((19:0.116717,14:0.083451):0.033509,((29:0.090197,(20:0.094669,5:0.127102):0.134664):0.002769,((27:0.041058,26: [...]
+   tree rep.17120000 = ((((12:0.057557,11:0.038534):0.052639,((8:0.109018,(21:0.030041,((25:0.004085,(24:0.008647,23:0.006824):0.005562):0.017396,22:0.026440):0.021200):0.023467):0.022922,((((5:0.076653,20:0.090962):0.097367,(((26:0.017401,27:0.041916):0.060874,28:0.029443):0.025804,29:0.074363):0.017522):0.003916,13:0.073826):0.004073,((19:0.094506,14:0.079236):0.018004,(10:0.088614,((7:0.089377,(16:0.020550,15:0.028230):0.048043):0.008634,((9:0.075677,(18:0.035130,17:0.029884):0.044061 [...]
+   tree rep.17140000 = (((2:0.136864,((((13:0.102831,(5:0.089911,20:0.122646):0.062757):0.004422,((((26:0.027416,27:0.027164):0.070367,28:0.045478):0.018309,29:0.094813):0.010057,((10:0.093849,(((9:0.075738,(18:0.042520,17:0.012669):0.042437):0.017769,4:0.131435):0.018388,(7:0.109927,(16:0.030709,15:0.042078):0.035988):0.006069):0.005095):0.022141,(19:0.080858,14:0.083068):0.034088):0.001922):0.009999):0.011252,(8:0.086899,(21:0.036652,((23:0.001980,(25:0.019619,24:0.006359):0.015126):0. [...]
+   tree rep.17160000 = ((6:0.115362,3:0.133197):0.001281,(2:0.141895,(((((((26:0.025042,27:0.044820):0.051987,28:0.038909):0.032595,29:0.093207):0.003279,(19:0.069045,14:0.095817):0.047439):0.001896,((10:0.122852,((16:0.020430,15:0.027840):0.063321,(4:0.138896,(9:0.098588,(18:0.042498,17:0.026202):0.050422):0.019212):0.024992):0.013274):0.013222,7:0.101113):0.027788):0.005836,(((5:0.073872,20:0.093695):0.081151,13:0.087808):0.003725,(8:0.076884,(21:0.035304,(((23:0.007879,24:0.016279):0. [...]
+   tree rep.17180000 = (6:0.062751,((2:0.146548,(((((5:0.085174,20:0.065094):0.069511,(13:0.105642,(8:0.085906,(21:0.034102,(22:0.026099,(25:0.003825,(23:0.008262,24:0.012652):0.007615):0.012931):0.021828):0.016872):0.042397):0.002595):0.021044,((19:0.066699,14:0.080873):0.024355,(((16:0.020366,15:0.035013):0.037644,7:0.112943):0.008005,(10:0.081339,((9:0.099456,(18:0.027408,17:0.016884):0.033983):0.031280,4:0.108074):0.003904):0.000800):0.026909):0.002336):0.004507,(29:0.078603,((26:0.0 [...]
+   tree rep.17200000 = ((3:0.150840,6:0.099002):0.000614,(((((((19:0.092658,14:0.112871):0.022294,((10:0.085812,((16:0.032749,15:0.031720):0.049100,7:0.108313):0.012884):0.001529,(4:0.157458,(9:0.110125,(18:0.032500,17:0.020237):0.037350):0.026433):0.008442):0.020214):0.007532,(29:0.110318,((26:0.019446,27:0.033055):0.058505,28:0.054760):0.032667):0.012578):0.007318,13:0.090508):0.004665,((5:0.144396,20:0.113944):0.101901,(((((24:0.012742,23:0.006879):0.007298,25:0.015890):0.011040,22:0. [...]
+   tree rep.17220000 = ((6:0.067093,3:0.184083):0.003429,(2:0.171035,((11:0.029208,12:0.083858):0.056581,((((20:0.094246,5:0.077362):0.070977,13:0.110222):0.001691,(8:0.097680,(21:0.052075,((25:0.003320,(23:0.013218,24:0.011916):0.007147):0.030478,22:0.025741):0.017888):0.023152):0.024744):0.005348,((((27:0.027444,26:0.025912):0.095356,28:0.047128):0.010918,29:0.097838):0.023567,((14:0.094217,19:0.064233):0.055734,(7:0.091426,((10:0.099700,(15:0.029092,16:0.028011):0.047621):0.005225,(4: [...]
+   tree rep.17240000 = ((3:0.132463,6:0.094646):0.003469,(((((29:0.102309,((26:0.029000,27:0.027505):0.077483,28:0.072344):0.025909):0.011281,(((4:0.157374,(9:0.105795,(18:0.035584,17:0.027033):0.043843):0.033886):0.010224,(10:0.094931,((16:0.018758,15:0.060314):0.040569,7:0.118779):0.012969):0.007348):0.024594,(19:0.090854,14:0.107169):0.032993):0.006108):0.008768,((8:0.092382,(21:0.073947,(((24:0.003417,23:0.010805):0.012152,25:0.010452):0.031417,22:0.038306):0.015784):0.030399):0.0265 [...]
+   tree rep.17260000 = (2:0.155622,(((11:0.038627,12:0.077528):0.054190,((20:0.103405,5:0.108319):0.080680,(((29:0.095514,(28:0.032482,(27:0.031342,26:0.020423):0.067830):0.029407):0.013546,(((4:0.128802,(9:0.089019,(17:0.015128,18:0.037694):0.047723):0.030287):0.008002,(10:0.102232,((15:0.053878,16:0.019002):0.038485,7:0.106250):0.011559):0.000484):0.053523,(14:0.083551,19:0.079604):0.042230):0.018309):0.010717,(13:0.095188,(8:0.097743,(21:0.035779,(22:0.032093,((23:0.014798,24:0.016831 [...]
+   tree rep.17280000 = (3:0.147486,(6:0.093160,((((((19:0.091647,14:0.079915):0.025974,((((26:0.032053,27:0.044123):0.061233,28:0.033130):0.028470,29:0.111441):0.027577,((10:0.076029,(7:0.098167,(16:0.021213,15:0.036193):0.048344):0.011592):0.004487,((9:0.098695,(18:0.047414,17:0.019287):0.058148):0.016753,4:0.186667):0.022265):0.038540):0.009132):0.007660,(13:0.067611,(5:0.123926,20:0.059679):0.100492):0.027921):0.002003,((21:0.053654,(((23:0.007758,24:0.009625):0.010506,25:0.008579):0. [...]
+   tree rep.17300000 = ((((((((21:0.063336,((25:0.001455,(23:0.004233,24:0.010712):0.013617):0.024407,22:0.037366):0.034830):0.023260,8:0.095762):0.026701,13:0.109237):0.002474,((20:0.115990,5:0.085091):0.155227,((29:0.099523,((27:0.032747,26:0.027484):0.103450,28:0.034809):0.027837):0.024651,((((9:0.105946,(18:0.036916,17:0.012982):0.058878):0.032871,4:0.112113):0.005630,(((15:0.035030,16:0.030878):0.045224,7:0.126307):0.011352,10:0.112472):0.005603):0.027675,(19:0.082422,14:0.069192):0 [...]
+   tree rep.17320000 = (((((((((19:0.093272,14:0.108926):0.027325,(20:0.077287,5:0.101456):0.080237):0.002579,(((7:0.113918,(15:0.023259,16:0.029221):0.044980):0.007161,10:0.105013):0.002927,(((17:0.030449,18:0.028084):0.039400,9:0.115426):0.015008,4:0.105966):0.015386):0.046314):0.004368,((28:0.057075,(27:0.051209,26:0.018132):0.052915):0.017861,29:0.084867):0.014470):0.007841,13:0.106912):0.001394,(8:0.078328,((22:0.026829,(25:0.008228,(24:0.005711,23:0.015334):0.001143):0.046424):0.01 [...]
+   tree rep.17340000 = ((6:0.106390,3:0.146787):0.002592,(((12:0.049560,11:0.020602):0.045909,(((((16:0.027370,15:0.034879):0.046124,7:0.102901):0.012014,10:0.117924):0.003372,((9:0.111700,(18:0.025029,17:0.020109):0.038866):0.026870,4:0.122757):0.012077):0.033543,((((21:0.070931,(22:0.030299,(25:0.015061,(23:0.012888,24:0.011493):0.007875):0.027683):0.024093):0.028766,8:0.096644):0.017211,(13:0.094693,(5:0.101595,20:0.147436):0.082089):0.002946):0.003819,((19:0.074736,14:0.094975):0.025 [...]
+   tree rep.17360000 = ((((12:0.088190,11:0.016279):0.069358,(((29:0.104849,((26:0.038133,27:0.027209):0.070718,28:0.059566):0.026208):0.011178,((10:0.107645,(((9:0.104610,(18:0.036730,17:0.024393):0.059081):0.013944,4:0.119123):0.022744,((16:0.028853,15:0.045506):0.049678,7:0.094421):0.014607):0.001729):0.025869,(19:0.091053,14:0.107564):0.024130):0.024114):0.010719,((5:0.131791,20:0.126751):0.115171,(13:0.081674,((21:0.055891,(((24:0.017026,23:0.008878):0.002805,25:0.006211):0.064324,2 [...]
+   tree rep.17380000 = (6:0.097998,(3:0.148730,(((11:0.026213,12:0.076128):0.057299,((13:0.092934,(8:0.103971,((22:0.028979,((24:0.011593,23:0.003081):0.004167,25:0.005477):0.022553):0.013051,21:0.027807):0.038804):0.009071):0.002927,((((28:0.043393,(26:0.022928,27:0.024174):0.065369):0.040663,29:0.101815):0.017385,((((7:0.092513,(16:0.015115,15:0.037427):0.047590):0.018571,(4:0.120713,(9:0.125349,(18:0.032905,17:0.028382):0.044275):0.018662):0.013605):0.001049,10:0.090606):0.025820,(19: [...]
+   tree rep.17400000 = ((((11:0.024484,12:0.069252):0.054579,(((8:0.090707,((22:0.044292,((25:0.010207,23:0.018761):0.000053,24:0.017655):0.032463):0.022304,21:0.029176):0.047661):0.017119,(13:0.077011,(5:0.116606,20:0.118002):0.080115):0.008037):0.007522,((((10:0.113983,((16:0.021650,15:0.032306):0.037754,((9:0.094301,(18:0.044811,17:0.019711):0.063961):0.018359,4:0.127549):0.018022):0.006559):0.015590,7:0.113950):0.024933,(19:0.109232,14:0.103984):0.030321):0.004343,(29:0.101424,(28:0. [...]
+   tree rep.17420000 = ((2:0.174343,((12:0.072070,11:0.023503):0.052080,(((((22:0.034532,((24:0.006903,23:0.011077):0.015457,25:0.006552):0.014696):0.015954,21:0.040535):0.034863,8:0.096221):0.036105,((((4:0.106559,(9:0.096827,(18:0.038293,17:0.016678):0.053590):0.010023):0.003402,(((16:0.026574,15:0.041063):0.026886,7:0.075106):0.010005,10:0.084945):0.007860):0.011108,(14:0.084128,19:0.072436):0.035623):0.008997,(((26:0.024140,27:0.034254):0.058973,28:0.047442):0.025535,29:0.068446):0.0 [...]
+   tree rep.17440000 = ((3:0.146363,6:0.066180):0.019774,(2:0.155263,((12:0.061397,11:0.053527):0.042532,((13:0.099160,(((14:0.108389,19:0.055700):0.034597,((29:0.124542,((26:0.027029,27:0.029152):0.065725,28:0.052008):0.028792):0.001288,((10:0.115011,((16:0.037337,15:0.019696):0.045561,7:0.082959):0.004493):0.012107,(4:0.113894,(9:0.104425,(18:0.025781,17:0.013563):0.055799):0.010752):0.018675):0.032492):0.002980):0.000727,(5:0.085009,20:0.100488):0.074149):0.010980):0.001478,(((22:0.03 [...]
+   tree rep.17460000 = (((2:0.095603,((12:0.077437,11:0.041484):0.026866,(((8:0.098037,(21:0.067684,((23:0.010153,(25:0.009120,24:0.011052):0.002278):0.019010,22:0.015512):0.020963):0.021532):0.009698,((((((26:0.038951,27:0.032171):0.064841,28:0.060013):0.025276,29:0.109048):0.004466,(5:0.105699,20:0.111338):0.059195):0.011225,(((((18:0.032171,17:0.017808):0.035733,9:0.098885):0.011438,4:0.146075):0.028521,(10:0.098495,(16:0.037544,15:0.023207):0.037144):0.004043):0.015354,7:0.124137):0. [...]
+   tree rep.17480000 = ((2:0.147711,(((13:0.073842,(8:0.083120,(21:0.036039,(22:0.041466,(25:0.005857,(23:0.016043,24:0.009590):0.007620):0.029669):0.017677):0.019444):0.030370):0.006269,((((10:0.073603,(7:0.104231,(15:0.033594,16:0.030782):0.047755):0.008236):0.010106,(4:0.099175,((17:0.035766,18:0.042212):0.029956,9:0.115096):0.015655):0.008425):0.027483,(19:0.072752,14:0.093320):0.017244):0.013218,(((28:0.049949,(27:0.024705,26:0.025512):0.081047):0.019968,29:0.082652):0.009561,(20:0. [...]
+   tree rep.17500000 = ((6:0.076668,(2:0.143373,((12:0.064068,11:0.042606):0.045235,(((13:0.108015,(5:0.117164,20:0.090291):0.118958):0.001806,(8:0.085190,((22:0.049892,((23:0.010754,24:0.009470):0.007946,25:0.003523):0.022825):0.010384,21:0.049254):0.020715):0.016666):0.009315,(29:0.108739,(((14:0.073027,19:0.098527):0.029233,((((16:0.022925,15:0.043939):0.048691,7:0.078618):0.001561,10:0.094310):0.003986,(4:0.126734,((18:0.040525,17:0.016276):0.051556,9:0.092495):0.019786):0.007672):0. [...]
+   tree rep.17520000 = ((2:0.169035,(((((20:0.097978,5:0.114914):0.076319,13:0.099977):0.001828,(8:0.088427,((((23:0.014832,24:0.014077):0.001552,25:0.001736):0.023491,22:0.022774):0.045177,21:0.058992):0.037228):0.024148):0.008696,((19:0.047845,14:0.076029):0.059122,(((4:0.129039,((17:0.014995,18:0.047799):0.044652,9:0.142357):0.015728):0.020935,(((15:0.033817,16:0.047359):0.060878,7:0.106909):0.002039,10:0.129524):0.000677):0.015803,((28:0.062231,(27:0.052772,26:0.027079):0.104996):0.0 [...]
+   tree rep.17540000 = ((2:0.157822,((((((29:0.096052,((26:0.042488,27:0.023602):0.093195,28:0.040227):0.040764):0.010196,(14:0.092724,19:0.074107):0.012721):0.005533,(8:0.078042,((21:0.042945,22:0.040621):0.005418,((24:0.011202,23:0.009646):0.006827,25:0.011410):0.030480):0.034036):0.028397):0.009078,(13:0.092181,(5:0.114784,20:0.122368):0.090101):0.009917):0.001118,((4:0.110639,(9:0.103983,(18:0.035688,17:0.020517):0.046956):0.018755):0.010048,(10:0.095942,(7:0.101687,(16:0.028754,15:0 [...]
+   tree rep.17560000 = ((6:0.133063,(((11:0.050715,12:0.067527):0.059756,(((((7:0.121122,(16:0.026226,15:0.027494):0.045353):0.000493,10:0.115489):0.003650,(((18:0.055848,17:0.014200):0.056174,9:0.109488):0.028631,4:0.139709):0.004665):0.030308,((((26:0.011951,27:0.055655):0.085599,28:0.040434):0.025067,29:0.078309):0.020717,(19:0.057178,14:0.102110):0.035387):0.014897):0.001477,(13:0.113841,((5:0.106603,20:0.098148):0.059847,((((25:0.012735,(24:0.014850,23:0.011120):0.005862):0.015589,2 [...]
+   tree rep.17580000 = (((((20:0.091503,5:0.133035):0.109111,((((7:0.089126,((10:0.085625,(((17:0.017413,18:0.034273):0.056213,9:0.113179):0.031631,4:0.141848):0.005664):0.000706,(15:0.036391,16:0.021957):0.050593):0.020664):0.015891,(19:0.075882,14:0.091880):0.041675):0.014717,(29:0.115128,((27:0.034988,26:0.035534):0.072009,28:0.059081):0.029852):0.043042):0.002603,(((21:0.056325,(22:0.027739,((24:0.027135,23:0.021032):0.003891,25:0.012050):0.025605):0.005786):0.020586,8:0.126628):0.03 [...]
+   tree rep.17600000 = (((2:0.162017,((12:0.067607,11:0.038191):0.025615,(((8:0.084938,((22:0.033418,(25:0.001234,(24:0.020627,23:0.007053):0.006529):0.026336):0.023205,21:0.045183):0.026675):0.016011,13:0.085177):0.000206,(((19:0.065278,14:0.095061):0.031862,(((((9:0.102382,(18:0.033176,17:0.012669):0.058796):0.016460,4:0.099768):0.016619,((15:0.037147,16:0.018672):0.061519,10:0.093494):0.003842):0.009968,7:0.114656):0.013149,(((27:0.043957,26:0.018833):0.063911,28:0.047193):0.019545,29 [...]
+   tree rep.17620000 = ((3:0.123815,(2:0.177825,((12:0.059912,11:0.028306):0.046383,(((((22:0.028899,(25:0.012607,(24:0.019113,23:0.006932):0.005671):0.035867):0.010241,21:0.037812):0.031090,8:0.097144):0.043373,13:0.080678):0.009109,((29:0.109233,(((19:0.061904,14:0.079316):0.032980,(((10:0.086666,((9:0.109384,(18:0.054523,17:0.033764):0.073503):0.031065,4:0.153196):0.007405):0.000819,(15:0.031531,16:0.027581):0.057115):0.018907,7:0.111025):0.026795):0.014817,((27:0.035537,26:0.012575): [...]
+   tree rep.17640000 = ((6:0.108703,3:0.204232):0.023066,(2:0.202231,((12:0.067814,11:0.050503):0.036227,(((((22:0.059576,(25:0.008275,(24:0.020698,23:0.009165):0.016420):0.020196):0.018893,21:0.031599):0.007262,8:0.101965):0.036780,13:0.079914):0.007497,(((((27:0.027216,26:0.042835):0.099970,28:0.055331):0.020854,29:0.108712):0.005663,((((15:0.048006,16:0.024626):0.067165,((4:0.097661,(9:0.105721,(18:0.036502,17:0.032016):0.048907):0.016781):0.015333,10:0.119070):0.001650):0.009604,7:0. [...]
+   tree rep.17660000 = (3:0.123566,(6:0.106627,(((12:0.076911,11:0.013701):0.050628,((13:0.096946,((8:0.091435,(21:0.029389,((25:0.007462,(23:0.010370,24:0.006863):0.009812):0.028627,22:0.028333):0.017864):0.027807):0.038019,(20:0.130399,5:0.112196):0.076729):0.017762):0.004480,(((29:0.069951,((27:0.026223,26:0.029401):0.081212,28:0.039639):0.009636):0.038621,(10:0.132661,((7:0.101618,(15:0.025803,16:0.027529):0.052424):0.032697,(4:0.147267,(9:0.145978,(18:0.044005,17:0.028718):0.025743) [...]
+   tree rep.17680000 = ((3:0.124473,(((12:0.078271,11:0.032041):0.048961,((((22:0.052369,(25:0.002291,(23:0.019737,24:0.009193):0.006115):0.024528):0.011863,21:0.054650):0.015259,8:0.099339):0.064894,((((19:0.065267,14:0.132444):0.046605,(((27:0.029387,26:0.014927):0.049293,28:0.050024):0.009135,29:0.132524):0.029586):0.006593,(((9:0.105226,(18:0.035230,17:0.018745):0.043107):0.021264,4:0.110709):0.002874,(((15:0.027255,16:0.033297):0.051773,7:0.113892):0.016552,10:0.094273):0.005737):0. [...]
+   tree rep.17700000 = ((3:0.144083,(((11:0.031792,12:0.059930):0.042891,((((((((17:0.015731,18:0.030662):0.033320,9:0.095345):0.045826,4:0.132333):0.006820,(((15:0.033848,16:0.020337):0.045511,7:0.094700):0.005891,10:0.088007):0.006498):0.024189,(19:0.069151,14:0.089963):0.021212):0.015918,((28:0.047356,(27:0.036648,26:0.032132):0.076037):0.018488,29:0.079914):0.005727):0.006939,13:0.079662):0.010192,((20:0.081366,5:0.120014):0.084113,(((22:0.027297,(25:0.009879,(24:0.010689,23:0.015644 [...]
+   tree rep.17720000 = ((6:0.139871,3:0.143349):0.006563,(2:0.175587,((12:0.063919,11:0.038025):0.054223,(((((7:0.073273,(10:0.101240,((15:0.031294,16:0.030555):0.073640,(((18:0.049736,17:0.010988):0.049485,9:0.110791):0.022051,4:0.121245):0.015952):0.003145):0.008026):0.019632,(14:0.077513,19:0.068754):0.047107):0.000238,((28:0.054288,(27:0.043447,26:0.015138):0.068727):0.021791,29:0.079864):0.032178):0.001142,8:0.113893):0.006562,(13:0.071593,((21:0.041267,(22:0.022617,(25:0.004792,(24 [...]
+   tree rep.17740000 = ((3:0.116972,6:0.109059):0.006628,(2:0.143245,((12:0.077177,11:0.019284):0.069920,((14:0.053087,19:0.075449):0.030612,((((8:0.058810,((((24:0.015167,23:0.011601):0.003315,25:0.006440):0.014367,22:0.021299):0.023528,21:0.048581):0.034256):0.009572,(13:0.083173,(20:0.102515,5:0.103359):0.067539):0.001703):0.005079,(29:0.101199,((27:0.029798,26:0.024876):0.085861,28:0.033277):0.016480):0.006626):0.006547,((4:0.116930,((18:0.067054,17:0.032578):0.041759,9:0.125223):0.0 [...]
+   tree rep.17760000 = (3:0.128200,(6:0.109327,(2:0.150435,((((((28:0.062715,(26:0.028792,27:0.016553):0.058615):0.018560,29:0.084656):0.011479,(14:0.104047,19:0.072431):0.028781):0.000762,((5:0.104037,20:0.099387):0.128646,(7:0.127964,(((16:0.027747,15:0.041631):0.026928,10:0.129824):0.004546,(4:0.104848,(9:0.100182,(18:0.030651,17:0.019019):0.042775):0.037435):0.007660):0.017733):0.008177):0.005500):0.009573,((8:0.068935,((((24:0.013708,23:0.005111):0.007567,25:0.004856):0.017924,22:0. [...]
+   tree rep.17780000 = (3:0.157477,(6:0.105468,(2:0.179769,((11:0.028626,12:0.058227):0.032696,(((8:0.102379,((22:0.060633,((25:0.006922,24:0.007492):0.008441,23:0.002826):0.043374):0.018539,21:0.058721):0.047920):0.007792,((20:0.101697,5:0.102506):0.089781,13:0.115426):0.005716):0.015372,(((10:0.076424,(((9:0.093920,(17:0.023186,18:0.035004):0.057639):0.015805,4:0.105173):0.010792,((15:0.041496,16:0.026027):0.034258,7:0.084717):0.008990):0.006423):0.039182,(19:0.069490,14:0.088393):0.02 [...]
+   tree rep.17800000 = ((6:0.084754,3:0.100393):0.014852,(((((13:0.079553,(20:0.073577,5:0.117335):0.089765):0.000055,((21:0.027310,(22:0.036789,((24:0.007721,23:0.007391):0.012577,25:0.005008):0.017884):0.013054):0.021987,8:0.067972):0.017859):0.006772,((10:0.073306,(((9:0.077921,(17:0.021516,18:0.029585):0.067556):0.021748,4:0.126478):0.007618,(7:0.121164,(15:0.042841,16:0.025882):0.042317):0.008405):0.005889):0.029742,((19:0.054754,14:0.080936):0.023045,(29:0.099724,((27:0.034578,26:0 [...]
+   tree rep.17820000 = ((6:0.074301,(((((20:0.104352,5:0.073944):0.076298,13:0.088209):0.011538,((((((7:0.094608,(15:0.031818,16:0.032521):0.045149):0.010621,((9:0.110539,(17:0.015325,18:0.040270):0.023661):0.029850,4:0.159244):0.022668):0.012153,10:0.115091):0.030070,(19:0.063857,14:0.104132):0.030145):0.004993,((28:0.043502,(27:0.059553,26:0.013052):0.080178):0.009360,29:0.081838):0.033599):0.017510,(8:0.074553,(21:0.044955,((25:0.010863,(24:0.011037,23:0.010831):0.014407):0.050013,22: [...]
+   tree rep.17840000 = ((((((29:0.106229,((27:0.049084,26:0.038818):0.057869,28:0.049225):0.013117):0.020090,(((7:0.079570,(15:0.056928,16:0.019047):0.055684):0.033133,((((17:0.035406,18:0.032830):0.068161,9:0.088857):0.020802,4:0.135644):0.016383,10:0.120774):0.001644):0.032076,(19:0.083686,14:0.093921):0.023835):0.018160):0.005936,((8:0.104084,(21:0.032792,(22:0.053492,((23:0.016072,24:0.010626):0.006198,25:0.004588):0.010839):0.016796):0.028400):0.015180,((20:0.076801,5:0.102252):0.10 [...]
+   tree rep.17860000 = (((((13:0.075596,(5:0.102405,20:0.096732):0.078838):0.005080,((((((24:0.024921,23:0.005412):0.010443,25:0.012512):0.025875,22:0.034408):0.012350,21:0.055625):0.024217,8:0.111836):0.034157,((14:0.092698,19:0.088159):0.029591,(((28:0.056010,(26:0.023908,27:0.044260):0.073695):0.022931,29:0.113823):0.010264,(7:0.101564,((((9:0.119499,(18:0.042730,17:0.013119):0.054856):0.026731,4:0.119176):0.011659,(16:0.018557,15:0.036743):0.061419):0.001163,10:0.098241):0.008658):0. [...]
+   tree rep.17880000 = (((((11:0.030252,12:0.061180):0.061421,(((29:0.103934,(28:0.050546,(27:0.037609,26:0.018952):0.081794):0.014185):0.020360,((19:0.095113,14:0.091608):0.039838,((((15:0.028171,16:0.031718):0.045137,(((17:0.017662,18:0.039108):0.056194,9:0.138334):0.034470,4:0.137900):0.008051):0.004078,10:0.155016):0.009119,7:0.119256):0.031413):0.006907):0.024978,(((8:0.106489,((((23:0.014406,24:0.016017):0.011083,25:0.004377):0.020633,22:0.039975):0.021379,21:0.043997):0.037153):0. [...]
+   tree rep.17900000 = (((((((20:0.096969,5:0.087831):0.094702,(12:0.069608,11:0.028939):0.073081):0.003332,(((((((18:0.034751,17:0.020219):0.052736,9:0.115818):0.025647,4:0.103086):0.013638,10:0.114008):0.001373,(7:0.113312,(15:0.033199,16:0.020868):0.045372):0.001478):0.032190,(14:0.133424,19:0.101731):0.014361):0.006260,((28:0.061026,(27:0.039357,26:0.030880):0.064570):0.028778,29:0.108768):0.007321):0.005439):0.017063,((21:0.039577,(((24:0.015628,23:0.012229):0.008538,25:0.007781):0. [...]
+   tree rep.17920000 = (6:0.053102,(3:0.148588,(((11:0.026278,12:0.100509):0.025626,(((((29:0.095105,((26:0.013593,27:0.058958):0.073756,28:0.066247):0.027375):0.040081,(((9:0.099086,(18:0.056519,17:0.018868):0.047223):0.018528,4:0.155119):0.007502,(10:0.098440,((16:0.030156,15:0.041516):0.054233,7:0.094303):0.008413):0.001835):0.026187):0.005943,(14:0.075240,19:0.094443):0.037213):0.011038,((5:0.109327,20:0.073860):0.116078,13:0.083601):0.003906):0.004218,(8:0.121200,((((23:0.014736,24: [...]
+   tree rep.17940000 = (((((13:0.091165,((21:0.043124,((25:0.018181,(23:0.019181,24:0.011036):0.004196):0.021513,22:0.037576):0.027680):0.019674,8:0.070483):0.024754):0.006410,((20:0.133366,5:0.098784):0.098391,(((((9:0.119360,(18:0.044144,17:0.020342):0.036299):0.025619,4:0.143351):0.006501,(10:0.118069,(7:0.105118,(15:0.036740,16:0.019708):0.054716):0.010847):0.005914):0.031782,(14:0.106925,19:0.108319):0.028401):0.005821,(29:0.072102,((27:0.038420,26:0.030212):0.101673,28:0.056963):0. [...]
+   tree rep.17960000 = ((((((((27:0.030944,26:0.034788):0.061649,28:0.045101):0.019203,29:0.093981):0.003765,((((9:0.118510,(18:0.032140,17:0.021803):0.029845):0.032322,4:0.154035):0.001793,((7:0.088862,(15:0.032894,16:0.032712):0.038614):0.022741,10:0.112911):0.002851):0.021676,(14:0.081118,19:0.111268):0.039075):0.014796):0.006972,(((20:0.095635,5:0.095966):0.064550,13:0.071815):0.005144,(8:0.100646,((22:0.055671,(24:0.010598,(23:0.017448,25:0.010973):0.004649):0.021218):0.019813,21:0. [...]
+   tree rep.17980000 = ((6:0.110124,3:0.134543):0.013669,(2:0.254246,(((((21:0.075053,(22:0.031612,((24:0.011801,23:0.008948):0.003197,25:0.013270):0.011444):0.023673):0.078695,8:0.082666):0.042754,(((10:0.137875,(((16:0.024043,15:0.052319):0.055060,7:0.098627):0.009435,((9:0.121391,(18:0.029245,17:0.027827):0.038328):0.031550,4:0.145517):0.017055):0.002137):0.048202,13:0.133975):0.010757,((29:0.125701,(14:0.099301,19:0.090180):0.039873):0.008883,(28:0.054487,(26:0.056421,27:0.017181):0. [...]
+   tree rep.18000000 = ((2:0.155663,((((((((27:0.035489,26:0.036383):0.071701,28:0.045066):0.037767,29:0.059309):0.021578,(14:0.119030,19:0.058346):0.072381):0.017758,((((18:0.041627,17:0.025645):0.076297,9:0.099559):0.020947,4:0.162123):0.010920,(10:0.099168,((15:0.040339,16:0.032847):0.037281,7:0.108944):0.013163):0.010932):0.035985):0.002233,(8:0.070033,(12:0.056003,11:0.041944):0.071764):0.015256):0.007473,(((22:0.030392,((23:0.011379,24:0.007890):0.009981,25:0.004053):0.028774):0.00 [...]
+   tree rep.18020000 = ((3:0.108522,(2:0.156731,(((13:0.083528,(5:0.075645,20:0.087167):0.063260):0.008502,((8:0.061395,(((25:0.002343,(24:0.016570,23:0.015412):0.003106):0.019900,22:0.028082):0.014698,21:0.026437):0.023581):0.046438,((14:0.075554,19:0.082725):0.033954,((7:0.065154,((16:0.027363,15:0.026151):0.053363,(10:0.100292,((9:0.087482,(18:0.030703,17:0.016851):0.033225):0.018971,4:0.166580):0.007125):0.004726):0.008326):0.018999,(29:0.127198,(28:0.047238,(26:0.027426,27:0.019929) [...]
+   tree rep.18040000 = ((3:0.126952,6:0.076292):0.011279,(2:0.188908,((((8:0.097309,(21:0.035325,(22:0.041826,(25:0.006686,(23:0.017914,24:0.012855):0.007054):0.034294):0.009830):0.032426):0.010836,(((28:0.054161,(27:0.033256,26:0.021887):0.073542):0.035025,29:0.078537):0.015623,(((((15:0.068428,16:0.017169):0.047510,10:0.075619):0.007626,(4:0.106451,(9:0.161781,(17:0.020324,18:0.039361):0.026770):0.024055):0.010762):0.009282,7:0.079603):0.016489,(19:0.113048,14:0.069558):0.041600):0.001 [...]
+   tree rep.18060000 = ((6:0.093491,3:0.111545):0.004044,(2:0.172596,(((((19:0.072330,14:0.063354):0.030774,(10:0.096475,(((9:0.084482,(17:0.017130,18:0.040153):0.035434):0.025298,4:0.131093):0.013916,(7:0.085548,(15:0.040088,16:0.013900):0.047228):0.006715):0.011584):0.017524):0.015128,((28:0.069172,(27:0.031144,26:0.021861):0.076482):0.028188,29:0.075633):0.001459):0.005390,((20:0.084876,5:0.083057):0.089161,(((22:0.059524,(21:0.025435,(25:0.005844,(24:0.012095,23:0.012502):0.007248):0 [...]
+   tree rep.18080000 = (3:0.171576,((((((10:0.098421,((15:0.038985,16:0.020216):0.036621,7:0.091248):0.011203):0.008015,((9:0.117398,(17:0.013689,18:0.048621):0.078664):0.015896,4:0.111043):0.021518):0.034317,(((19:0.092633,14:0.116204):0.018325,(((27:0.048639,26:0.014490):0.073649,28:0.046216):0.044527,29:0.086933):0.002021):0.007261,((8:0.061558,(21:0.029284,(22:0.038765,(25:0.012501,(24:0.017858,23:0.027239):0.005629):0.024966):0.014352):0.025400):0.042370,(13:0.106553,(20:0.070782,5: [...]
+   tree rep.18100000 = ((3:0.103935,6:0.082115):0.019543,(2:0.161621,((12:0.056494,11:0.066651):0.030825,(((20:0.081605,5:0.086038):0.089817,((8:0.078221,(21:0.020067,(((23:0.010115,24:0.004873):0.010292,25:0.010950):0.027952,22:0.033582):0.015403):0.012909):0.032520,13:0.090987):0.000374):0.008069,(((14:0.070452,19:0.081686):0.029373,((((18:0.032987,17:0.029234):0.027796,9:0.114765):0.029718,4:0.106189):0.010372,(10:0.067369,((15:0.027753,16:0.024404):0.041177,7:0.129340):0.006753):0.00 [...]
+   tree rep.18120000 = (6:0.072138,((2:0.161624,(((8:0.097127,(21:0.042215,(22:0.024564,(25:0.005018,(23:0.013291,24:0.003892):0.004060):0.028948):0.008169):0.023272):0.014977,((5:0.088235,20:0.075132):0.086133,(((11:0.031407,12:0.084928):0.054072,((4:0.113262,((18:0.039545,17:0.013980):0.050707,9:0.117717):0.011546):0.048219,(10:0.085069,(7:0.124866,(16:0.025481,15:0.021804):0.051988):0.001110):0.002657):0.016197):0.007719,((14:0.100352,19:0.099674):0.030486,(29:0.092573,(28:0.026055,(2 [...]
+   tree rep.18140000 = ((2:0.137712,((((8:0.090628,((((23:0.010879,24:0.013128):0.006588,25:0.003234):0.021330,22:0.040855):0.018359,21:0.044727):0.041204):0.021000,((20:0.079793,5:0.074812):0.087714,13:0.085952):0.016132):0.003245,(((19:0.080176,14:0.079158):0.035643,((28:0.037249,(27:0.033278,26:0.029149):0.058249):0.027506,29:0.104121):0.016426):0.001838,((4:0.143449,(((15:0.034671,16:0.035263):0.044415,7:0.117395):0.028002,((17:0.018917,18:0.052157):0.042293,9:0.092808):0.030373):0.0 [...]
+   tree rep.18160000 = (((2:0.176728,((12:0.063757,11:0.020104):0.040448,((10:0.097232,(((9:0.106074,(18:0.037241,17:0.025064):0.051101):0.016301,4:0.114152):0.008400,(7:0.098947,(16:0.023540,15:0.045306):0.035942):0.024076):0.006533):0.014671,(((8:0.094109,(((25:0.002101,(24:0.011186,23:0.011444):0.006808):0.015256,22:0.033536):0.002839,21:0.042443):0.049926):0.053131,(13:0.096946,((29:0.088761,(19:0.088636,14:0.094782):0.019698):0.004164,((26:0.031612,27:0.036192):0.075570,28:0.069176) [...]
+   tree rep.18180000 = (6:0.124816,((((11:0.032119,12:0.048725):0.054812,((8:0.077953,((((24:0.009571,25:0.009121):0.003234,23:0.004941):0.029940,22:0.027864):0.016486,21:0.021042):0.069893):0.029866,((((28:0.069733,(26:0.031173,27:0.042804):0.070153):0.024846,29:0.102530):0.011368,((14:0.082802,19:0.083435):0.028534,((((18:0.045287,17:0.028749):0.042759,9:0.141244):0.032859,4:0.135262):0.012448,((7:0.103114,(16:0.025511,15:0.041061):0.039044):0.019520,10:0.096863):0.005052):0.026785):0. [...]
+   tree rep.18200000 = (3:0.087588,((2:0.167969,(13:0.091069,((((20:0.105084,5:0.095347):0.121118,(11:0.023780,12:0.063179):0.064316):0.002908,((((27:0.036839,26:0.029541):0.097370,28:0.037866):0.025320,29:0.074797):0.012076,((7:0.102990,((((17:0.024964,18:0.022284):0.038768,9:0.111566):0.023065,4:0.108897):0.016619,((15:0.031382,16:0.033980):0.039156,10:0.085767):0.002428):0.008962):0.013949,(19:0.067645,14:0.130379):0.025673):0.002413):0.010963):0.002410,(8:0.066354,(21:0.026404,(22:0. [...]
+   tree rep.18220000 = (6:0.049289,(3:0.126983,(2:0.188686,(((((28:0.050437,(26:0.021318,27:0.038757):0.053367):0.010500,29:0.111313):0.009332,((19:0.068804,14:0.136960):0.034377,(((4:0.147799,((18:0.036782,17:0.024332):0.053500,9:0.078093):0.025029):0.007374,((16:0.033659,15:0.030936):0.041667,10:0.125192):0.003959):0.008946,7:0.094496):0.020565):0.001759):0.011647,((13:0.096149,(8:0.056967,((22:0.043314,((24:0.011442,23:0.014991):0.011600,25:0.005573):0.031879):0.021759,21:0.024907):0. [...]
+   tree rep.18240000 = (3:0.115882,(6:0.122718,(((11:0.031887,12:0.069869):0.026662,((((29:0.080009,((27:0.036873,26:0.022517):0.070679,28:0.039231):0.023963):0.023693,(19:0.077522,14:0.092930):0.015263):0.003091,(((15:0.036143,16:0.036159):0.051305,(10:0.073069,((9:0.141513,(17:0.029801,18:0.026841):0.027472):0.021973,4:0.136962):0.013317):0.006331):0.020732,7:0.123982):0.022860):0.013549,((8:0.091293,((22:0.029798,(25:0.003355,(23:0.018426,24:0.021055):0.003889):0.023493):0.015197,21:0 [...]
+   tree rep.18260000 = ((3:0.162387,6:0.088528):0.010847,(((11:0.029450,12:0.067621):0.031251,(((8:0.095995,(21:0.047656,(((24:0.007589,23:0.013832):0.009378,25:0.001127):0.015366,22:0.052159):0.016199):0.029879):0.016907,(13:0.097591,(20:0.100970,5:0.086402):0.064641):0.020098):0.001741,((((27:0.036861,26:0.020368):0.083570,28:0.032049):0.031471,29:0.082782):0.009655,((7:0.121672,((15:0.041371,16:0.021784):0.067242,((10:0.091992,4:0.149539):0.018573,(9:0.095948,(17:0.017871,18:0.034927) [...]
+   tree rep.18280000 = ((6:0.111239,(2:0.153480,((11:0.039818,12:0.076491):0.054834,(((29:0.076607,(28:0.046139,(27:0.051114,26:0.008911):0.066021):0.033979):0.009530,((((7:0.116398,(15:0.042125,16:0.011785):0.048552):0.012283,10:0.106617):0.002183,((9:0.100306,(17:0.038825,18:0.030935):0.049594):0.044046,4:0.146776):0.001603):0.025891,(19:0.099183,14:0.075618):0.032694):0.011315):0.011926,(((20:0.092443,5:0.085559):0.096173,(8:0.141287,((((25:0.013170,23:0.027695):0.000829,24:0.009532): [...]
+   tree rep.18300000 = ((6:0.078354,3:0.129510):0.005191,(2:0.136312,((11:0.041223,12:0.056889):0.037064,((((20:0.058806,5:0.132675):0.102105,13:0.125593):0.004793,(8:0.095491,(21:0.078363,(((23:0.008492,24:0.007402):0.007878,25:0.009373):0.021985,22:0.036624):0.024204):0.027395):0.030732):0.004330,(((19:0.093053,14:0.102354):0.030205,((4:0.127271,(9:0.098871,(17:0.021939,18:0.044366):0.049126):0.019497):0.022216,((7:0.097998,(15:0.043020,16:0.013155):0.055536):0.004938,10:0.067497):0.01 [...]
+   tree rep.18320000 = (6:0.092242,((((((((25:0.008124,(24:0.009882,23:0.016498):0.014553):0.016065,22:0.027545):0.020206,21:0.034644):0.037775,8:0.074682):0.018813,((((10:0.096853,((((18:0.041221,17:0.022049):0.023838,9:0.098382):0.042139,4:0.113375):0.009419,((16:0.029144,15:0.041215):0.063041,7:0.089979):0.009036):0.000608):0.028072,(((26:0.025515,27:0.036296):0.064138,28:0.035558):0.032781,29:0.114038):0.007539):0.007647,(14:0.101501,19:0.097117):0.020139):0.009898,(13:0.089012,(5:0. [...]
+   tree rep.18340000 = (6:0.120480,(((((((((23:0.010677,(25:0.010515,24:0.008113):0.001188):0.021358,22:0.028534):0.015049,21:0.026789):0.036956,8:0.087524):0.021555,(13:0.118366,(5:0.096012,20:0.113422):0.064029):0.005294):0.005872,(((14:0.106989,19:0.077199):0.027810,(((7:0.078737,(16:0.042554,15:0.020052):0.036258):0.016067,10:0.109918):0.009024,(((18:0.043856,17:0.020447):0.043555,9:0.080403):0.049108,4:0.108800):0.007351):0.024674):0.002354,(((26:0.021876,27:0.029961):0.049098,28:0. [...]
+   tree rep.18360000 = (((2:0.174238,(((((29:0.090424,((27:0.046970,26:0.018602):0.072814,28:0.042849):0.015069):0.007791,((19:0.066532,14:0.070149):0.026013,(7:0.110053,(((4:0.129561,(9:0.100585,(18:0.047654,17:0.021419):0.035509):0.028613):0.006631,(15:0.032524,16:0.030273):0.044601):0.001579,10:0.062955):0.007501):0.016444):0.009104):0.008979,(8:0.092865,((22:0.027376,((24:0.005204,23:0.006413):0.012951,25:0.005144):0.022824):0.012275,21:0.059744):0.022277):0.025363):0.001170,(13:0.08 [...]
+   tree rep.18380000 = (((2:0.162722,((((13:0.096650,((21:0.026790,(22:0.029832,((23:0.011496,24:0.010850):0.010707,25:0.009772):0.026697):0.031673):0.024942,8:0.068264):0.045703):0.007605,(((28:0.063906,(27:0.041799,26:0.023676):0.086297):0.033453,29:0.089992):0.013117,((19:0.100030,14:0.087097):0.009748,(10:0.077155,(((15:0.050268,16:0.020425):0.035458,7:0.110777):0.006861,(4:0.111763,(9:0.113667,(18:0.041479,17:0.021171):0.037371):0.029709):0.006063):0.005845):0.027832):0.012935):0.00 [...]
+   tree rep.18400000 = ((6:0.088491,3:0.122933):0.018840,(2:0.162159,(((13:0.112283,(((22:0.040560,(23:0.006663,(24:0.011515,25:0.011133):0.003623):0.017381):0.018768,21:0.057445):0.059678,8:0.076820):0.041337):0.006852,((((19:0.071934,14:0.078836):0.026377,((7:0.109123,10:0.101087):0.014658,((4:0.138018,(9:0.102845,(18:0.045376,17:0.015666):0.041380):0.024342):0.014013,(15:0.055268,16:0.027876):0.042608):0.016276):0.013748):0.015985,((28:0.038696,(27:0.042560,26:0.019560):0.074486):0.02 [...]
+   tree rep.18420000 = ((3:0.118718,6:0.092103):0.017363,(2:0.169292,((11:0.021959,12:0.066734):0.029940,(((4:0.114564,((18:0.037125,17:0.016377):0.041250,9:0.134766):0.027163):0.012001,(((16:0.029717,15:0.021322):0.040302,7:0.103159):0.010867,10:0.100004):0.002551):0.021500,((((21:0.027841,((25:0.007424,(23:0.007833,24:0.008424):0.007806):0.038961,22:0.025471):0.007887):0.059346,8:0.045755):0.035581,(((26:0.022133,27:0.043713):0.070832,28:0.036079):0.031056,29:0.098616):0.008112):0.0152 [...]
+   tree rep.18440000 = ((((12:0.046028,11:0.034960):0.067953,((((5:0.120792,20:0.105863):0.085126,13:0.137826):0.010906,(8:0.134723,(21:0.066496,(((23:0.010539,24:0.009385):0.004287,25:0.008954):0.039346,22:0.045524):0.009936):0.017648):0.061850):0.015913,(((28:0.055415,(26:0.031355,27:0.024696):0.090370):0.018587,29:0.094384):0.008586,(((4:0.154921,((18:0.034656,17:0.035142):0.060153,9:0.108269):0.025598):0.021674,(10:0.121347,((16:0.008543,15:0.044057):0.042404,7:0.115287):0.010305):0. [...]
+   tree rep.18460000 = ((6:0.077581,3:0.170930):0.006810,(((12:0.052665,11:0.041544):0.031681,((((22:0.021750,((24:0.019145,23:0.010424):0.003509,25:0.022483):0.029187):0.023695,21:0.039800):0.031644,8:0.099946):0.022155,(((5:0.080524,20:0.076557):0.100225,(13:0.070074,((((18:0.043581,17:0.013681):0.041598,9:0.119403):0.015717,4:0.146974):0.015087,(10:0.087125,((16:0.025744,15:0.028704):0.047815,7:0.097786):0.021264):0.008350):0.019984):0.001384):0.017519,((29:0.082926,((26:0.019608,27:0 [...]
+   tree rep.18480000 = ((3:0.140570,(((11:0.041847,12:0.056192):0.035886,(((((7:0.104884,(5:0.075420,20:0.087134):0.060679):0.028473,(29:0.103173,(28:0.047712,(26:0.026945,27:0.036899):0.048454):0.030792):0.020742):0.000022,((((9:0.144443,(18:0.040159,17:0.019372):0.045414):0.022849,(16:0.028253,15:0.037861):0.057382):0.007536,4:0.139325):0.015766,10:0.076210):0.010155):0.012853,(14:0.086949,19:0.083395):0.031358):0.017374,((8:0.098613,((((24:0.007204,23:0.005113):0.000474,25:0.009702):0 [...]
+   tree rep.18500000 = (6:0.096901,(3:0.139534,(((12:0.074230,11:0.014462):0.045517,((29:0.115175,(((27:0.036760,26:0.016349):0.054516,28:0.043921):0.036730,((19:0.069122,14:0.079682):0.033935,(7:0.067484,(((9:0.088671,(18:0.031728,17:0.025213):0.034180):0.025304,4:0.130256):0.013152,((15:0.040582,16:0.023563):0.037703,10:0.088256):0.003936):0.009060):0.025150):0.004207):0.000179):0.005818,((13:0.075290,(((22:0.029968,(25:0.014376,(23:0.015917,24:0.009137):0.003143):0.015187):0.011794,21 [...]
+   tree rep.18520000 = ((6:0.082870,3:0.105088):0.014980,(2:0.131789,((11:0.047489,12:0.067468):0.043269,(((13:0.099315,8:0.102800):0.008649,((5:0.129512,20:0.105115):0.036529,(21:0.026719,((25:0.004277,(24:0.012624,23:0.009937):0.009602):0.020153,22:0.053163):0.021356):0.029028):0.040261):0.005111,(((14:0.107717,19:0.045760):0.051601,(7:0.111612,((16:0.023193,15:0.032136):0.051141,(10:0.096546,((9:0.131977,(18:0.044247,17:0.023470):0.049209):0.031203,4:0.148046):0.006231):0.003067):0.00 [...]
+   tree rep.18540000 = (((2:0.144999,(((((7:0.097982,(15:0.062861,16:0.036745):0.038735):0.010779,10:0.111809):0.004955,(4:0.145780,((18:0.033749,17:0.017453):0.034678,9:0.105550):0.026831):0.004975):0.021756,((((19:0.092305,14:0.086926):0.049141,(29:0.069733,((27:0.037822,26:0.032687):0.088648,28:0.047045):0.024707):0.008963):0.006871,((21:0.051013,(22:0.047289,((24:0.010396,23:0.008164):0.006537,25:0.004887):0.026561):0.017130):0.051041,8:0.105111):0.027796):0.008357,(13:0.101998,(20:0 [...]
+   tree rep.18560000 = ((2:0.182913,((11:0.023477,12:0.086986):0.066108,((((((25:0.011268,(24:0.004465,23:0.021167):0.002966):0.020369,21:0.062116):0.002251,22:0.032985):0.055931,8:0.133527):0.026063,(13:0.086282,(20:0.132462,5:0.093209):0.056863):0.009612):0.000406,((((27:0.044818,26:0.023457):0.088215,28:0.063272):0.016818,29:0.115699):0.034424,((((((17:0.018639,18:0.032318):0.030990,9:0.112637):0.036232,4:0.134230):0.027420,10:0.086617):0.008004,((15:0.059327,16:0.024666):0.073606,7:0 [...]
+   tree rep.18580000 = ((3:0.122156,6:0.069544):0.002093,(2:0.165920,(((8:0.072759,((22:0.026382,(25:0.006459,(23:0.013872,24:0.008217):0.003068):0.027030):0.021852,21:0.050815):0.039335):0.030176,(((((19:0.095365,14:0.093128):0.022087,(29:0.103948,(28:0.062380,(27:0.033923,26:0.038584):0.075314):0.016873):0.010192):0.004639,(10:0.088329,((7:0.081590,(15:0.035785,16:0.040095):0.050248):0.005630,(4:0.144201,(9:0.125842,(18:0.033842,17:0.029362):0.060362):0.010769):0.004366):0.007500):0.01 [...]
+   tree rep.18600000 = ((6:0.089563,3:0.132206):0.007444,(2:0.214648,((11:0.051559,12:0.070844):0.044459,((((21:0.038689,((25:0.014817,(23:0.015608,24:0.012761):0.006828):0.020899,22:0.041073):0.034425):0.051175,8:0.108054):0.012894,(((7:0.099667,((10:0.087858,(15:0.042422,16:0.026256):0.037368):0.006404,(4:0.150785,((17:0.017636,18:0.028632):0.038749,9:0.096485):0.017040):0.005754):0.011532):0.025974,(14:0.068671,19:0.074647):0.037383):0.004806,(29:0.076913,((27:0.022206,26:0.039893):0. [...]
+   tree rep.18620000 = (6:0.074132,((2:0.182355,((12:0.060569,11:0.020442):0.031966,(((19:0.111576,14:0.062877):0.030504,((29:0.083108,(28:0.043870,(26:0.042665,27:0.023338):0.081896):0.025080):0.013633,((((16:0.025581,15:0.028332):0.060371,(((18:0.059096,17:0.008931):0.057034,9:0.093806):0.031578,4:0.127536):0.002196):0.036435,10:0.088671):0.009462,7:0.077052):0.018853):0.002696):0.004680,((13:0.091939,(5:0.117648,20:0.086531):0.085299):0.007134,((21:0.038484,(((23:0.017416,24:0.009532) [...]
+   tree rep.18640000 = ((6:0.077763,(2:0.157468,((11:0.023256,12:0.081112):0.067385,(((14:0.081443,19:0.093764):0.018074,((7:0.123543,((4:0.119317,((17:0.017693,18:0.033129):0.033368,9:0.106064):0.017995):0.002995,(10:0.072825,(15:0.046783,16:0.029328):0.046572):0.004184):0.011414):0.020945,(29:0.077773,((27:0.033289,26:0.028395):0.075918,28:0.037281):0.029117):0.006024):0.000604):0.002539,((13:0.070807,(20:0.113368,5:0.085415):0.114565):0.007831,(((((24:0.009578,23:0.007178):0.009735,25 [...]
+   tree rep.18660000 = ((6:0.073730,3:0.091521):0.008950,(((12:0.065925,11:0.049118):0.035707,(((13:0.083867,(5:0.082029,20:0.100748):0.065106):0.008391,(8:0.087259,((((24:0.012361,23:0.006963):0.006945,25:0.004194):0.037699,22:0.035725):0.015524,21:0.049298):0.030920):0.041578):0.011651,((19:0.078135,14:0.082543):0.033313,((((26:0.020278,27:0.033264):0.063422,28:0.031335):0.032752,29:0.069126):0.013848,(7:0.104180,((4:0.184737,(9:0.118558,(18:0.038151,17:0.013524):0.049445):0.025643):0. [...]
+   tree rep.18680000 = (6:0.090733,(3:0.166891,(((11:0.064812,12:0.054160):0.019769,((13:0.111824,(20:0.107931,5:0.100641):0.076874):0.001042,(((21:0.050093,(22:0.029927,(25:0.003389,(24:0.013695,23:0.010246):0.006725):0.041391):0.008984):0.045618,8:0.101388):0.020870,((((((15:0.032587,16:0.017910):0.046945,7:0.092716):0.008009,10:0.084557):0.005456,(((17:0.030949,18:0.027182):0.048132,9:0.126794):0.034910,4:0.112449):0.007745):0.030825,(14:0.096286,19:0.092010):0.023880):0.008296,(((27: [...]
+   tree rep.18700000 = (6:0.101474,(3:0.137565,((((13:0.103625,(((14:0.105624,19:0.073837):0.021400,((10:0.081035,(7:0.111414,(15:0.029342,16:0.020020):0.040993):0.004841):0.008834,(4:0.125660,((17:0.015428,18:0.048968):0.055269,9:0.101441):0.043081):0.021565):0.023172):0.002977,((20:0.123789,5:0.081448):0.104285,(((27:0.028200,26:0.033868):0.055176,28:0.050898):0.019815,29:0.097786):0.010580):0.008741):0.009938):0.005837,(((22:0.025281,(25:0.012290,(23:0.012454,24:0.014583):0.007287):0. [...]
+   tree rep.18720000 = ((6:0.075356,3:0.134264):0.000175,(2:0.137794,((13:0.078459,(((29:0.097961,(28:0.052657,(27:0.037514,26:0.012025):0.068837):0.018035):0.016529,(((10:0.082408,((15:0.049455,16:0.012307):0.053069,(4:0.117774,(9:0.081203,(18:0.040027,17:0.031149):0.047846):0.035820):0.006981):0.000314):0.005179,7:0.086934):0.027402,(19:0.068385,14:0.111432):0.029095):0.011825):0.007992,((20:0.081814,5:0.109807):0.110457,(8:0.102315,((22:0.033022,(25:0.017304,(24:0.004649,23:0.019037): [...]
+   tree rep.18740000 = ((2:0.230925,(((((8:0.123301,13:0.086241):0.004323,(((29:0.089550,(28:0.033324,(27:0.051910,26:0.022495):0.069978):0.019866):0.006534,((((15:0.022436,16:0.031320):0.035240,7:0.088572):0.007488,(4:0.108877,(9:0.093314,(18:0.043432,17:0.037466):0.074039):0.025470):0.005213):0.004146,10:0.065792):0.019897):0.005707,(19:0.085227,14:0.094795):0.023741):0.012720):0.011134,(20:0.124130,5:0.111570):0.091473):0.018125,(21:0.017509,(((23:0.014949,24:0.005652):0.003482,25:0.0 [...]
+   tree rep.18760000 = ((2:0.197530,(((((28:0.058829,(27:0.014300,26:0.037538):0.045120):0.027243,(29:0.103334,(19:0.100446,14:0.108191):0.052014):0.011790):0.004425,((20:0.130216,5:0.068120):0.104425,((((15:0.036371,16:0.022504):0.053550,(4:0.154305,(9:0.112939,(18:0.045912,17:0.023843):0.037556):0.021988):0.018617):0.002257,10:0.115863):0.006813,7:0.124530):0.010263):0.018001):0.008832,(13:0.106446,(8:0.102358,((((23:0.012144,24:0.014394):0.005406,25:0.005448):0.035270,22:0.039661):0.0 [...]
+   tree rep.18780000 = ((((12:0.067009,11:0.041593):0.033017,(((21:0.063445,(22:0.036666,((23:0.013604,24:0.011120):0.003221,25:0.017209):0.032590):0.006769):0.037802,8:0.086325):0.026007,(13:0.127336,((((14:0.129942,19:0.080878):0.021442,((((16:0.028674,15:0.056185):0.056752,7:0.111687):0.029162,(4:0.122937,(9:0.128059,(18:0.036135,17:0.035209):0.040277):0.018555):0.039964):0.011322,10:0.108815):0.037229):0.001133,((28:0.028826,(26:0.019954,27:0.037745):0.078305):0.039093,29:0.081952):0 [...]
+   tree rep.18800000 = ((3:0.086910,6:0.078548):0.008269,(2:0.152513,(((((19:0.048236,14:0.073059):0.051955,(((28:0.042663,(27:0.042270,26:0.027751):0.072666):0.047343,((4:0.124955,((18:0.040446,17:0.012939):0.035407,9:0.100693):0.027704):0.018963,(10:0.069160,(7:0.099859,(15:0.041980,16:0.016908):0.036207):0.003279):0.004429):0.021166):0.002984,29:0.092106):0.002208):0.004727,(13:0.093907,(20:0.122638,5:0.087352):0.083008):0.002408):0.001561,(8:0.105616,((((24:0.010126,23:0.009461):0.00 [...]
+   tree rep.18820000 = ((6:0.084442,(2:0.167321,(((((12:0.055886,11:0.023056):0.073714,((29:0.131571,(28:0.067167,(27:0.030013,26:0.046301):0.041354):0.015184):0.005803,((19:0.111998,14:0.078100):0.020913,((4:0.116839,((18:0.038315,17:0.021648):0.031127,9:0.142975):0.025255):0.005821,(10:0.078407,((15:0.038725,16:0.027304):0.033990,7:0.081611):0.020830):0.008545):0.021551):0.001232):0.003922):0.005096,(20:0.081282,5:0.102380):0.100437):0.001421,(8:0.064518,((25:0.007598,(24:0.011784,23:0 [...]
+   tree rep.18840000 = (((((12:0.057483,11:0.014603):0.072339,(((20:0.082443,5:0.104768):0.105405,(((((4:0.098893,((18:0.027415,17:0.027625):0.032943,9:0.114786):0.037794):0.007685,(7:0.117074,(15:0.025648,16:0.027646):0.035496):0.010181):0.000290,10:0.105847):0.031592,(19:0.054756,14:0.125823):0.053430):0.004439,((28:0.036115,(27:0.034447,26:0.029952):0.064034):0.015262,29:0.100835):0.023652):0.010302):0.019623,((8:0.078838,(21:0.044995,(22:0.032474,((23:0.008829,24:0.013784):0.015615,2 [...]
+   tree rep.18860000 = ((((12:0.071210,11:0.022270):0.057581,((((21:0.032891,(22:0.037410,(25:0.005464,(24:0.012027,23:0.010838):0.002968):0.026842):0.011448):0.014332,8:0.119355):0.036224,(((14:0.071707,19:0.057321):0.031228,(29:0.078219,((26:0.022919,27:0.035592):0.086421,28:0.031864):0.025832):0.031531):0.009424,((((16:0.029833,15:0.050062):0.035157,7:0.102142):0.012654,(4:0.115427,((18:0.027344,17:0.016367):0.051169,9:0.114062):0.011558):0.007208):0.004595,10:0.095405):0.015210):0.00 [...]
+   tree rep.18880000 = ((3:0.092687,6:0.100354):0.013954,((((10:0.078033,(((9:0.116931,(17:0.022030,18:0.027051):0.036109):0.025369,4:0.137807):0.005741,(7:0.072265,(15:0.025459,16:0.016635):0.052829):0.013753):0.000692):0.021824,((((28:0.054087,(27:0.032889,26:0.025049):0.076494):0.029500,29:0.066594):0.019083,(14:0.084173,19:0.059479):0.009142):0.017977,((13:0.077688,(20:0.095655,5:0.084913):0.084268):0.003693,(8:0.076860,(21:0.059717,((25:0.019412,(23:0.006405,24:0.013324):0.002460):0 [...]
+   tree rep.18900000 = ((6:0.113030,3:0.127107):0.017392,(2:0.197277,((((13:0.097220,(20:0.073900,5:0.102856):0.108328):0.011363,(8:0.087255,(((25:0.012621,(23:0.016920,24:0.007822):0.010148):0.019473,22:0.060042):0.019196,21:0.027310):0.040866):0.018305):0.006319,((29:0.126863,((27:0.030540,26:0.016428):0.074972,28:0.064915):0.026387):0.006772,((14:0.076044,19:0.064367):0.047233,((10:0.094253,(7:0.093166,(15:0.037355,16:0.032895):0.039014):0.002732):0.004863,(4:0.093979,(9:0.070596,(17: [...]
+   tree rep.18920000 = (6:0.095894,(3:0.105672,(2:0.170534,((((((14:0.069210,19:0.080340):0.036549,(((27:0.034837,26:0.028795):0.049128,28:0.076752):0.022267,29:0.102429):0.001946):0.005585,((10:0.085128,((15:0.040215,16:0.025255):0.066265,7:0.082863):0.006067):0.013214,(4:0.133178,(9:0.099655,(17:0.016598,18:0.038361):0.034949):0.015987):0.008733):0.023012):0.009131,(13:0.114015,(8:0.088607,((((24:0.012024,23:0.014860):0.007357,25:0.011046):0.023600,22:0.029599):0.018067,21:0.035294):0. [...]
+   tree rep.18940000 = ((((11:0.019415,12:0.080944):0.030565,(((13:0.088775,(5:0.077276,20:0.126003):0.130773):0.014840,(((((24:0.011600,23:0.012086):0.004050,25:0.005915):0.020034,22:0.042842):0.012184,21:0.049366):0.039980,8:0.096675):0.016495):0.008529,((((26:0.029457,27:0.034074):0.053575,28:0.053643):0.033085,29:0.109984):0.016998,((14:0.098369,19:0.059896):0.043033,((10:0.109281,7:0.101577):0.000480,(4:0.164337,(((18:0.025883,17:0.028864):0.040979,9:0.101698):0.037957,(16:0.032427, [...]
+   tree rep.18960000 = ((2:0.183164,((((20:0.091916,5:0.107102):0.078846,13:0.101827):0.001476,((8:0.084156,(((25:0.006346,(23:0.008951,24:0.010589):0.002900):0.035294,22:0.023961):0.016076,21:0.040513):0.029296):0.020361,(29:0.103920,(((27:0.043318,26:0.021569):0.058361,28:0.048520):0.043462,(((((15:0.029730,16:0.025515):0.048141,10:0.074104):0.009464,((9:0.107213,(17:0.045746,18:0.039873):0.047093):0.032371,4:0.126245):0.010086):0.005743,7:0.075948):0.020141,(19:0.074097,14:0.100495):0 [...]
+   tree rep.18980000 = ((2:0.142711,((((((21:0.051350,((24:0.013951,23:0.010363):0.001253,25:0.001153):0.043012):0.002832,22:0.039897):0.037108,8:0.092718):0.009556,13:0.106634):0.003405,(((14:0.067876,19:0.057485):0.020702,(7:0.069934,((10:0.079998,(((18:0.028093,17:0.020267):0.033635,9:0.103603):0.033326,4:0.107210):0.016049):0.003899,(16:0.045326,15:0.050386):0.057917):0.008013):0.015715):0.012877,((5:0.097356,20:0.096976):0.053418,(((26:0.021322,27:0.042130):0.047016,28:0.048904):0.0 [...]
+   tree rep.19000000 = (3:0.141449,(6:0.123270,((((((5:0.124357,20:0.069042):0.092150,((((26:0.028384,27:0.022779):0.069319,28:0.044057):0.030403,29:0.114292):0.003599,((14:0.083260,19:0.075079):0.041527,(((4:0.104762,((18:0.036135,17:0.028015):0.029202,9:0.104631):0.024802):0.009234,((16:0.029970,15:0.051711):0.051667,7:0.073035):0.011042):0.007263,10:0.094341):0.025174):0.011611):0.011802):0.002375,((21:0.023742,(((23:0.018532,24:0.011343):0.010466,25:0.002023):0.010768,22:0.043857):0. [...]
+   tree rep.19020000 = ((6:0.083407,3:0.156433):0.001853,(2:0.181074,((((((((23:0.001061,(24:0.009832,25:0.006428):0.008884):0.031269,22:0.020637):0.021514,21:0.031150):0.068164,8:0.076413):0.028473,(20:0.068041,5:0.087297):0.102043):0.011057,(((10:0.089951,((7:0.113590,(15:0.019810,16:0.058863):0.025670):0.018519,(4:0.132816,(9:0.109945,(17:0.009042,18:0.041213):0.036941):0.042124):0.005525):0.004712):0.018021,(19:0.082759,14:0.094005):0.044619):0.007814,((28:0.051419,(27:0.032274,26:0. [...]
+   tree rep.19040000 = ((3:0.138383,(2:0.170401,(((10:0.116917,(((16:0.017217,15:0.034966):0.057863,7:0.101679):0.021261,(4:0.110905,(9:0.139664,(18:0.032870,17:0.020539):0.036820):0.025806):0.003732):0.014651):0.019506,((((14:0.080813,19:0.092590):0.030212,((28:0.031792,(26:0.018182,27:0.039443):0.088930):0.025247,29:0.089453):0.017602):0.004799,((5:0.087907,20:0.085929):0.099866,13:0.098891):0.010124):0.005538,(8:0.045370,((22:0.033974,(25:0.005507,(24:0.018384,23:0.003544):0.004487):0 [...]
+   tree rep.19060000 = ((2:0.174978,((12:0.063886,11:0.030176):0.040455,(((19:0.054217,14:0.123236):0.045782,((29:0.064937,((27:0.028152,26:0.023836):0.074602,28:0.054764):0.014798):0.016286,(((20:0.099435,5:0.112776):0.090472,13:0.078781):0.000535,(8:0.098596,(21:0.059627,((25:0.000962,(24:0.013726,23:0.010155):0.017407):0.027169,22:0.048932):0.017628):0.031567):0.024686):0.018173):0.002214):0.004255,((10:0.110254,(((9:0.118132,(18:0.034694,17:0.012603):0.042862):0.041457,4:0.121017):0. [...]
+   tree rep.19080000 = (((6:0.099449,3:0.152721):0.012603,(((((14:0.087768,19:0.088616):0.045438,(29:0.078175,(28:0.046153,(26:0.028716,27:0.038472):0.064020):0.029476):0.032165):0.006523,(((5:0.124197,20:0.084508):0.114553,13:0.111876):0.002498,(((22:0.032982,((24:0.006086,23:0.009311):0.005958,25:0.002733):0.024391):0.024603,21:0.045997):0.032688,8:0.108682):0.025790):0.011651):0.005947,(((4:0.131238,((16:0.038818,15:0.026548):0.069371,7:0.118249):0.010151):0.001848,((18:0.039059,17:0. [...]
+   tree rep.19100000 = (((2:0.226554,((12:0.059503,11:0.026737):0.040639,((((29:0.097704,(28:0.043231,(26:0.032403,27:0.022191):0.086615):0.036846):0.010100,(10:0.092400,((7:0.083377,(16:0.015662,15:0.040979):0.043031):0.016053,(((18:0.038175,17:0.026095):0.027892,9:0.083898):0.020837,4:0.121686):0.022610):0.002061):0.026234):0.001037,(14:0.088626,19:0.068118):0.045870):0.007464,((8:0.107738,(((25:0.001010,(23:0.024108,24:0.009731):0.006780):0.011552,22:0.054592):0.018052,21:0.058859):0. [...]
+   tree rep.19120000 = ((((((5:0.145296,20:0.076875):0.071410,(13:0.095922,((21:0.057135,(22:0.030314,(25:0.005698,(23:0.014447,24:0.009176):0.009710):0.025203):0.019741):0.040089,8:0.106007):0.006627):0.012339):0.014328,((((26:0.018392,27:0.042694):0.067058,28:0.028137):0.036721,29:0.091225):0.008237,((14:0.095675,19:0.071261):0.025736,(10:0.121621,(((16:0.020331,15:0.033224):0.054926,7:0.077248):0.028763,(4:0.127401,((18:0.050501,17:0.013347):0.044890,9:0.099108):0.024411):0.003359):0. [...]
+   tree rep.19140000 = ((6:0.080748,(((((((7:0.081507,(16:0.031441,15:0.037880):0.047023):0.010751,(((18:0.047587,17:0.016335):0.038576,9:0.130861):0.006506,4:0.126837):0.009977):0.010025,10:0.096091):0.033967,((19:0.088160,14:0.114408):0.022805,(29:0.078228,(28:0.038578,(26:0.031770,27:0.033076):0.055975):0.024948):0.004590):0.004081):0.008306,(((21:0.040917,((25:0.004698,(24:0.007729,23:0.011040):0.005901):0.012475,22:0.035948):0.035606):0.012351,8:0.093977):0.013913,((5:0.064454,20:0. [...]
+   tree rep.19160000 = ((6:0.078759,((((((29:0.086278,(28:0.049578,(27:0.037178,26:0.038481):0.061169):0.015809):0.021461,(19:0.059481,14:0.096608):0.029256):0.003259,((10:0.094511,((15:0.029251,16:0.022748):0.043131,7:0.120262):0.010352):0.019548,((9:0.119660,(18:0.027952,17:0.026740):0.064987):0.027299,4:0.115627):0.014459):0.024096):0.011359,((20:0.075837,5:0.106623):0.091256,(((21:0.077621,(22:0.037930,((24:0.012916,25:0.010476):0.003981,23:0.027683):0.045425):0.018043):0.031725,8:0. [...]
+   tree rep.19180000 = ((6:0.087233,3:0.118067):0.024453,((((5:0.096501,20:0.091223):0.077410,(((((((16:0.017739,15:0.035583):0.035311,(((18:0.035327,17:0.018159):0.044048,9:0.138674):0.025861,4:0.107757):0.013249):0.005353,10:0.087078):0.026002,7:0.105968):0.031854,(19:0.108583,14:0.057543):0.026593):0.009383,(29:0.105712,(28:0.038460,(26:0.025850,27:0.045637):0.073085):0.018867):0.023008):0.022166,((((22:0.027517,((24:0.004634,23:0.010381):0.007461,25:0.007249):0.036586):0.016302,21:0. [...]
+   tree rep.19200000 = ((3:0.125800,((((((16:0.020621,15:0.037614):0.055992,((4:0.090823,((18:0.028117,17:0.026879):0.035835,9:0.078483):0.015646):0.014338,7:0.116308):0.000859):0.004837,10:0.114606):0.021243,(((((((23:0.009197,24:0.011938):0.005109,25:0.003326):0.016669,22:0.055318):0.016539,21:0.056803):0.032448,8:0.105433):0.010931,((5:0.106921,20:0.081902):0.056245,13:0.082745):0.016021):0.013673,((28:0.051998,(26:0.024125,27:0.048312):0.061898):0.022249,(29:0.109495,(19:0.095455,14: [...]
+   tree rep.19220000 = ((3:0.123268,(2:0.169786,(((((19:0.095170,14:0.101780):0.036610,((10:0.082540,(7:0.123836,(16:0.025865,15:0.036573):0.035204):0.011167):0.011075,(((18:0.041053,17:0.029909):0.031224,9:0.106782):0.024518,4:0.081190):0.008433):0.016913):0.004345,(29:0.085613,((26:0.035524,27:0.026430):0.066240,28:0.060352):0.010126):0.033936):0.010363,(13:0.115962,((5:0.093524,20:0.082862):0.085100,(((22:0.037355,((23:0.009343,24:0.008114):0.008216,25:0.005349):0.034322):0.009311,21: [...]
+   tree rep.19240000 = (((((((29:0.119413,((27:0.043553,26:0.047671):0.077592,28:0.057069):0.020727):0.022879,((19:0.059241,14:0.081942):0.033879,(7:0.118113,((4:0.147213,((18:0.031715,17:0.040433):0.028990,9:0.101488):0.039799):0.000693,(10:0.099752,(15:0.038098,16:0.022705):0.046716):0.010875):0.022496):0.011309):0.009983):0.007895,((13:0.089606,(8:0.100781,(((25:0.003794,(24:0.013352,23:0.006613):0.014527):0.037654,22:0.022694):0.014239,21:0.039385):0.025865):0.025360):0.002910,(20:0. [...]
+   tree rep.19260000 = ((((((10:0.116494,(7:0.110994,((15:0.038668,16:0.022195):0.054312,((9:0.091794,(17:0.013546,18:0.031081):0.054902):0.012041,4:0.137188):0.013639):0.002480):0.030935):0.019883,(((20:0.114951,5:0.109508):0.093027,(13:0.093731,(8:0.089034,((22:0.040608,((24:0.007941,23:0.016096):0.004202,25:0.005117):0.009265):0.015720,21:0.076668):0.032690):0.023465):0.011860):0.011608,((((27:0.032857,26:0.014504):0.044742,28:0.049921):0.014337,29:0.102646):0.006579,(19:0.084693,14:0 [...]
+   tree rep.19280000 = (((((((21:0.053948,(22:0.018830,(25:0.004087,(24:0.018287,23:0.014504):0.001427):0.025968):0.018584):0.040451,8:0.097154):0.049091,(((((((18:0.021769,17:0.021383):0.050890,9:0.134287):0.026931,4:0.146820):0.009433,(10:0.132646,((15:0.035835,16:0.027603):0.044910,7:0.116102):0.005263):0.020495):0.023547,(19:0.065778,14:0.091867):0.028988):0.004344,(29:0.107492,(28:0.051280,(27:0.025312,26:0.041023):0.065750):0.025222):0.024962):0.010709,13:0.110044):0.002581):0.0063 [...]
+   tree rep.19300000 = ((3:0.129381,6:0.067648):0.013748,(2:0.153591,(((20:0.065416,5:0.096901):0.100573,((13:0.072487,((21:0.052714,((23:0.003735,(25:0.007679,24:0.010566):0.003150):0.032415,22:0.024574):0.030021):0.027313,8:0.098956):0.017328):0.005721,((7:0.086637,((((15:0.045974,16:0.012752):0.063308,((18:0.028795,17:0.016881):0.045918,9:0.067726):0.024262):0.005769,4:0.120359):0.020122,10:0.062792):0.013467):0.009908,((29:0.097065,(19:0.052171,14:0.103320):0.033170):0.005358,(28:0.0 [...]
+   tree rep.19320000 = (((((11:0.042578,12:0.081535):0.065018,(((20:0.097513,5:0.085496):0.092196,((29:0.073572,(28:0.034087,(27:0.030994,26:0.020610):0.085911):0.029210):0.023744,((7:0.093919,(((9:0.113936,(17:0.024909,18:0.033310):0.032512):0.022430,4:0.094133):0.018829,(10:0.098120,(15:0.040018,16:0.024076):0.057217):0.002722):0.015308):0.034681,(19:0.062132,14:0.080032):0.039222):0.004175):0.009049):0.000602,((((22:0.042205,((23:0.015889,24:0.023702):0.003811,25:0.012515):0.019742):0 [...]
+   tree rep.19340000 = (((((((22:0.023602,((23:0.014234,24:0.019138):0.000094,25:0.008465):0.030397):0.028191,21:0.046600):0.028534,8:0.114143):0.018574,((13:0.099867,((5:0.076437,20:0.081968):0.085330,((10:0.093396,((16:0.039980,15:0.030014):0.043904,7:0.108719):0.011196):0.013011,(4:0.120401,((18:0.045524,17:0.015030):0.036398,9:0.111651):0.021530):0.020902):0.011614):0.009566):0.005540,(29:0.088937,((19:0.083231,14:0.098338):0.032373,(28:0.055859,(26:0.024908,27:0.032601):0.082603):0. [...]
+   tree rep.19360000 = (6:0.093948,(3:0.140894,(2:0.178835,((((((28:0.052299,(26:0.053953,27:0.027327):0.065255):0.022544,29:0.116667):0.016307,((((4:0.117333,((18:0.039999,17:0.012367):0.056817,9:0.080663):0.026765):0.008413,(10:0.096159,(16:0.028956,15:0.040359):0.053672):0.006041):0.016653,((5:0.084456,20:0.115397):0.073856,7:0.059593):0.036604):0.029549,(19:0.074148,14:0.129227):0.021615):0.001069):0.005720,(8:0.068724,(21:0.073667,((25:0.005677,(24:0.017922,23:0.004308):0.004530):0. [...]
+   tree rep.19380000 = ((3:0.137522,(2:0.246175,((11:0.031771,12:0.074215):0.049825,(((13:0.117983,((22:0.033731,(21:0.041792,(25:0.002629,(24:0.006612,23:0.016999):0.016139):0.025012):0.005807):0.044602,8:0.146478):0.006839):0.008739,(5:0.158455,20:0.051523):0.141435):0.004670,(((28:0.046954,(26:0.023543,27:0.041341):0.067557):0.029705,29:0.133925):0.020404,(((16:0.024995,15:0.034190):0.084624,(10:0.121241,((((18:0.045247,17:0.014724):0.022556,9:0.130473):0.049079,4:0.172455):0.005321,7 [...]
+   tree rep.19400000 = (((11:0.042657,12:0.067291):0.038950,(((((22:0.017909,(25:0.008280,(23:0.015020,24:0.006834):0.011397):0.035968):0.023805,21:0.039621):0.026139,8:0.087581):0.041338,((5:0.117008,20:0.096594):0.090568,13:0.076712):0.019296):0.003145,(((29:0.083248,((26:0.046766,27:0.018995):0.071785,28:0.057329):0.012503):0.010277,(14:0.072556,19:0.076803):0.034257):0.001243,((10:0.085352,((16:0.030017,15:0.040511):0.053562,7:0.138890):0.020511):0.007664,(4:0.132802,((18:0.036031,17 [...]
+   tree rep.19420000 = (6:0.079135,((((11:0.024673,12:0.059049):0.034523,(((((26:0.029328,27:0.043410):0.056937,28:0.060322):0.023084,29:0.113700):0.005151,((((((18:0.038057,17:0.034341):0.059322,9:0.083700):0.029146,4:0.107639):0.005083,((16:0.018247,15:0.044299):0.036366,10:0.099393):0.009531):0.010392,7:0.094425):0.009309,(14:0.103073,19:0.062104):0.036549):0.005698):0.009472,(((((25:0.011879,(24:0.014292,23:0.013540):0.004017):0.022592,22:0.037344):0.015636,21:0.036333):0.013814,8:0. [...]
+   tree rep.19440000 = ((((((((19:0.080272,14:0.081232):0.041045,(7:0.083545,(10:0.119634,((16:0.029159,15:0.021732):0.035797,((9:0.094613,(18:0.045691,17:0.033210):0.029946):0.038852,4:0.138022):0.024873):0.000730):0.006976):0.028234):0.005919,((5:0.101633,20:0.107323):0.137376,(29:0.096394,((26:0.024490,27:0.039898):0.072401,28:0.036239):0.032750):0.008610):0.013963):0.022931,((8:0.106163,(21:0.055943,((25:0.007384,(24:0.006931,23:0.004878):0.004143):0.032501,22:0.029069):0.026707):0.0 [...]
+   tree rep.19460000 = ((((((13:0.080926,(((22:0.053641,((24:0.008450,23:0.005932):0.007762,25:0.002271):0.016604):0.022922,21:0.056048):0.044676,8:0.084780):0.043910):0.004547,(((((27:0.038264,26:0.014903):0.068578,28:0.039406):0.024956,29:0.078204):0.011090,(20:0.077406,5:0.084327):0.147984):0.009248,(((10:0.107363,((15:0.031937,16:0.016913):0.043344,7:0.095196):0.003461):0.006237,(((17:0.012950,18:0.046969):0.051608,9:0.124028):0.019065,4:0.136581):0.014109):0.020977,(19:0.076963,14:0 [...]
+   tree rep.19480000 = ((3:0.195088,6:0.090247):0.011442,(((((((7:0.127838,(10:0.127796,((9:0.102386,(18:0.059582,17:0.018075):0.041853):0.026552,((16:0.019399,15:0.029830):0.038932,4:0.124498):0.017140):0.001632):0.008881):0.013672,(19:0.078865,14:0.086439):0.019857):0.005078,(29:0.084308,(28:0.050291,(26:0.022625,27:0.033614):0.079402):0.026103):0.010689):0.016656,(((21:0.048784,(((23:0.011700,24:0.017035):0.006380,25:0.014157):0.029338,22:0.039052):0.014508):0.037181,8:0.086608):0.022 [...]
+   tree rep.19500000 = (6:0.100516,(3:0.166214,((((4:0.157782,((9:0.125347,(18:0.036610,17:0.016833):0.046662):0.061942,(10:0.122677,((16:0.043705,15:0.037665):0.025994,7:0.099391):0.024572):0.009123):0.003837):0.020340,(((19:0.105955,14:0.042801):0.048078,((28:0.047416,(26:0.020931,27:0.028232):0.103599):0.022921,29:0.077374):0.019374):0.004074,(((5:0.116425,20:0.158932):0.184663,13:0.139001):0.000026,(((22:0.068005,(25:0.014634,(23:0.007011,24:0.009384):0.007889):0.023572):0.015041,21: [...]
+   tree rep.19520000 = (6:0.088630,(3:0.108392,((((((7:0.085060,((4:0.122872,((17:0.020119,18:0.027819):0.038681,9:0.100519):0.019423):0.016387,((15:0.041311,16:0.022860):0.081415,10:0.095788):0.016059):0.007577):0.038762,(19:0.115522,14:0.064482):0.039154):0.006906,(((27:0.052862,26:0.016239):0.077031,28:0.047768):0.029873,29:0.093468):0.011342):0.011266,(((20:0.126117,5:0.110051):0.080729,13:0.085682):0.016490,((21:0.032673,(((24:0.014389,23:0.011249):0.003412,25:0.004509):0.027858,22: [...]
+   tree rep.19540000 = (((((((7:0.097951,(10:0.079055,((15:0.040684,16:0.022178):0.049487,(4:0.083359,(9:0.092472,(17:0.021775,18:0.027072):0.045218):0.030138):0.021999):0.000740):0.008176):0.013869,(19:0.079682,14:0.081405):0.006967):0.010213,(((27:0.030976,26:0.021985):0.068181,28:0.036983):0.028128,29:0.077062):0.007425):0.016184,((((22:0.022535,((24:0.014332,25:0.010675):0.002670,23:0.001460):0.025924):0.014274,21:0.045481):0.039109,8:0.083632):0.019967,((20:0.131392,5:0.075218):0.09 [...]
+   tree rep.19560000 = (3:0.115643,((((12:0.062809,11:0.028115):0.035026,(((19:0.085452,14:0.095485):0.017887,((10:0.081041,(((15:0.031562,16:0.029087):0.035619,7:0.122091):0.017640,(4:0.118119,((18:0.024412,17:0.023088):0.040017,9:0.098142):0.012540):0.015729):0.005290):0.016321,((28:0.041404,(27:0.030842,26:0.024454):0.070189):0.012902,29:0.097045):0.028454):0.001559):0.014794,((13:0.087608,(20:0.101729,5:0.105740):0.132129):0.004753,(8:0.085394,((22:0.027394,(25:0.002524,(23:0.014506, [...]
+   tree rep.19580000 = (((2:0.227890,((11:0.037864,12:0.068924):0.039505,((((28:0.039266,(26:0.033650,27:0.028976):0.063779):0.028795,29:0.079625):0.018550,((((10:0.095372,(16:0.028943,15:0.029727):0.064730):0.012070,((9:0.136714,(18:0.038647,17:0.020744):0.022376):0.018835,4:0.102197):0.018379):0.009088,7:0.092531):0.011924,(14:0.099319,19:0.067490):0.025646):0.010548):0.006494,((8:0.147757,(21:0.036499,(22:0.032687,((24:0.005283,23:0.015571):0.001552,25:0.002936):0.024153):0.013400):0. [...]
+   tree rep.19600000 = ((3:0.130712,6:0.072771):0.018858,(2:0.191002,((11:0.032169,12:0.051806):0.051572,((((29:0.090185,(28:0.053634,(26:0.037162,27:0.025876):0.048578):0.026218):0.000561,(((((16:0.034864,15:0.028035):0.064931,(4:0.125277,(9:0.109245,(18:0.028502,17:0.018463):0.033420):0.020062):0.006106):0.004824,10:0.103122):0.006228,7:0.093355):0.024348,(14:0.075374,19:0.090240):0.017766):0.003505):0.007584,(5:0.092358,20:0.134866):0.083042):0.001527,(13:0.072528,(8:0.118649,((22:0.0 [...]
+   tree rep.19620000 = (6:0.102109,((((12:0.054208,11:0.027398):0.035347,((((8:0.115101,(21:0.027839,(22:0.021167,((23:0.013207,24:0.012715):0.007609,25:0.008595):0.025608):0.018337):0.016480):0.007507,13:0.076275):0.002011,(20:0.079695,5:0.091311):0.105420):0.004298,((29:0.071625,((27:0.044396,26:0.022807):0.060208,28:0.027995):0.020839):0.016862,(((7:0.067761,(15:0.019610,16:0.026102):0.039361):0.016352,(10:0.093860,(4:0.113739,(9:0.091994,(18:0.042255,17:0.013533):0.046017):0.011936): [...]
+   tree rep.19640000 = (6:0.113406,((((12:0.055592,11:0.019016):0.040865,((((((27:0.024912,26:0.027427):0.060219,28:0.053429):0.024421,29:0.102748):0.005037,((19:0.090043,14:0.109634):0.029122,((((9:0.115475,(18:0.054287,17:0.016373):0.026995):0.049807,4:0.111202):0.014719,(7:0.089206,(15:0.033672,16:0.030876):0.044110):0.009164):0.005716,10:0.088259):0.013234):0.005698):0.010812,((20:0.083782,5:0.079140):0.066136,13:0.111407):0.002486):0.001518,(8:0.095607,((22:0.023722,(25:0.003456,(23 [...]
+   tree rep.19660000 = ((3:0.164244,6:0.074894):0.004168,(((((((14:0.079407,19:0.154521):0.032970,((28:0.057707,(26:0.028807,27:0.048709):0.066939):0.027252,29:0.105162):0.029443):0.000254,((5:0.092386,20:0.093892):0.117729,((((16:0.035254,15:0.026164):0.054993,(4:0.130113,(9:0.116899,(18:0.041073,17:0.031297):0.053728):0.024750):0.021558):0.014554,10:0.125699):0.010147,7:0.166653):0.024490):0.007837):0.007381,13:0.124747):0.003654,(((21:0.070267,22:0.026684):0.005842,((23:0.011392,24:0. [...]
+   tree rep.19680000 = ((3:0.170232,6:0.067003):0.001340,(((((((14:0.121789,19:0.070658):0.041439,(5:0.074704,20:0.110574):0.100968):0.002807,((28:0.047110,(26:0.028926,27:0.035019):0.063669):0.032789,29:0.111211):0.010212):0.003912,((10:0.092488,(7:0.081851,(16:0.025147,15:0.036126):0.050709):0.008671):0.003949,((9:0.107019,(18:0.024184,17:0.020398):0.046129):0.011067,4:0.117806):0.010818):0.034234):0.001872,(13:0.116431,((21:0.058245,(22:0.023287,(25:0.019068,(24:0.011182,23:0.009566): [...]
+   tree rep.19700000 = ((3:0.132076,6:0.098843):0.000603,(2:0.178457,((((5:0.118799,20:0.085556):0.088181,((19:0.093408,14:0.108984):0.028188,((29:0.150322,(28:0.070189,(26:0.033898,27:0.031254):0.055206):0.020799):0.015564,(7:0.108204,(10:0.078252,((16:0.015358,15:0.048753):0.074291,(4:0.115752,((18:0.031464,17:0.011866):0.047637,9:0.136362):0.017413):0.013580):0.001229):0.012593):0.029092):0.003972):0.013605):0.010320,(13:0.098499,((21:0.054494,(22:0.026534,((24:0.011131,23:0.006619):0 [...]
+   tree rep.19720000 = (6:0.065968,(3:0.137485,(2:0.225200,(((((5:0.099717,20:0.113906):0.126202,13:0.117179):0.000065,(8:0.142378,(21:0.072854,(((23:0.010933,24:0.015554):0.001199,25:0.009847):0.034015,22:0.049364):0.007093):0.028421):0.011464):0.012680,(((((16:0.014975,15:0.038027):0.042001,7:0.123481):0.015087,(4:0.167660,((18:0.050253,17:0.015898):0.060892,9:0.109310):0.010344):0.019874):0.002920,10:0.097079):0.026817,((28:0.053398,(26:0.024932,27:0.028267):0.063917):0.024343,(29:0.0 [...]
+   tree rep.19740000 = (3:0.154972,(6:0.072623,(2:0.238110,((11:0.007035,12:0.072514):0.082829,((((28:0.039121,(27:0.036218,26:0.027544):0.073669):0.036019,29:0.079945):0.007863,((14:0.087189,19:0.066570):0.028774,((4:0.117651,((17:0.021378,18:0.047036):0.056185,9:0.088277):0.035015):0.007461,(10:0.092022,((15:0.037991,16:0.031200):0.054371,7:0.128496):0.009166):0.000196):0.013324):0.004884):0.013502,(((20:0.086572,5:0.085691):0.108644,13:0.093505):0.002487,((((25:0.001841,(24:0.008945,2 [...]
+   tree rep.19760000 = ((((((21:0.063578,(22:0.015478,((24:0.013214,23:0.012965):0.008677,25:0.008726):0.044504):0.019285):0.066722,8:0.065529):0.021895,((13:0.084648,(20:0.082862,5:0.097265):0.131155):0.001820,((29:0.112358,((27:0.035871,26:0.038787):0.049476,28:0.049102):0.034162):0.001780,((19:0.081259,14:0.101918):0.013084,(7:0.122598,((10:0.083534,(15:0.042718,16:0.019012):0.056250):0.000141,(4:0.132858,(9:0.103992,(18:0.047083,17:0.016006):0.034589):0.023480):0.011632):0.008620):0. [...]
+   tree rep.19780000 = ((6:0.098733,3:0.158336):0.005766,(((11:0.029982,12:0.072652):0.051839,(((10:0.120878,(((15:0.041011,16:0.034542):0.075400,7:0.115875):0.013307,(4:0.175451,((17:0.027414,18:0.030564):0.031457,9:0.084385):0.019963):0.002483):0.011651):0.022240,(((14:0.073112,19:0.084422):0.032452,29:0.102859):0.004062,((27:0.032439,26:0.034889):0.066190,28:0.035375):0.037143):0.011651):0.011167,(((21:0.047792,(((23:0.011105,24:0.013813):0.004021,25:0.008626):0.021132,22:0.027764):0. [...]
+   tree rep.19800000 = ((6:0.107774,3:0.126741):0.000741,(2:0.155895,((11:0.043249,12:0.051927):0.046271,((((20:0.082461,5:0.131113):0.068899,13:0.100867):0.002352,(((14:0.098401,19:0.064597):0.022330,(((10:0.080004,(15:0.054086,16:0.027833):0.062861):0.007452,(((17:0.020065,18:0.024206):0.059190,9:0.104900):0.031374,4:0.125147):0.012514):0.010173,7:0.105102):0.014857):0.015316,(((27:0.034584,26:0.015569):0.059681,28:0.070947):0.007654,29:0.086641):0.011042):0.007338):0.009386,(8:0.07685 [...]
+   tree rep.19820000 = ((2:0.184174,((11:0.039982,12:0.071834):0.063156,((13:0.096876,((5:0.077847,20:0.069966):0.113127,(((((23:0.017797,24:0.015329):0.011704,25:0.013091):0.025729,22:0.032639):0.014262,21:0.040812):0.062971,8:0.099488):0.023522):0.001185):0.009149,((19:0.090088,14:0.077536):0.041042,(((((16:0.020446,15:0.040918):0.060875,(4:0.109632,((18:0.040513,17:0.031185):0.052378,9:0.103529):0.029252):0.019125):0.013238,7:0.093806):0.025934,10:0.083653):0.014278,(29:0.079743,(28:0 [...]
+   tree rep.19840000 = ((2:0.156238,((11:0.035947,12:0.054271):0.038099,((13:0.085111,(5:0.086248,20:0.089848):0.099658):0.005874,(((((25:0.007124,(24:0.009923,23:0.005507):0.004432):0.035131,22:0.041504):0.019533,21:0.032226):0.023493,8:0.079305):0.023674,((29:0.078360,(19:0.064486,14:0.087347):0.052150):0.001129,(((4:0.140880,((18:0.033730,17:0.018793):0.049077,9:0.084477):0.022793):0.016181,(10:0.107058,((16:0.032346,15:0.041181):0.047890,7:0.116088):0.009595):0.005257):0.026645,(28:0 [...]
+   tree rep.19860000 = (((((11:0.021065,12:0.069172):0.048918,(((21:0.046047,(22:0.018788,(25:0.014605,(24:0.014500,23:0.009461):0.013016):0.017693):0.014546):0.047656,8:0.108635):0.018706,(((((19:0.067307,14:0.104001):0.022314,((4:0.138820,((18:0.032320,17:0.026741):0.044149,9:0.120210):0.029164):0.004884,(((16:0.022803,15:0.046672):0.039275,7:0.103517):0.007160,10:0.103575):0.002356):0.025634):0.004153,(29:0.118052,((26:0.026055,27:0.037914):0.067533,28:0.042935):0.013823):0.013749):0. [...]
+   tree rep.19880000 = (((((12:0.080885,11:0.013898):0.075646,(((20:0.117436,5:0.098906):0.080537,(((((27:0.029175,26:0.034605):0.068369,28:0.049482):0.046915,29:0.120597):0.003442,(19:0.105957,14:0.121231):0.049369):0.011185,(((10:0.117165,(4:0.181018,(9:0.130678,(18:0.028228,17:0.039252):0.035735):0.034338):0.010262):0.000672,(15:0.038004,16:0.036116):0.062220):0.022399,7:0.086083):0.023864):0.007014):0.007213,(13:0.092433,((21:0.051568,((23:0.010067,(24:0.007569,25:0.012795):0.002427) [...]
+   tree rep.19900000 = ((2:0.149163,(((((19:0.063758,14:0.080982):0.039506,((28:0.049483,(27:0.025819,26:0.026432):0.086003):0.022793,29:0.078973):0.016985):0.014992,((8:0.085721,(21:0.040836,(22:0.026660,(25:0.002427,(23:0.009556,24:0.007697):0.009474):0.016124):0.025466):0.041044):0.030045,((20:0.103862,5:0.111923):0.086406,13:0.083957):0.003330):0.003673):0.002271,(10:0.069669,((7:0.085474,(15:0.039397,16:0.013436):0.053955):0.007129,(((17:0.014281,18:0.045933):0.051703,9:0.128454):0. [...]
+   tree rep.19920000 = ((2:0.179436,((12:0.070377,11:0.027321):0.047338,((8:0.059816,(((25:0.010098,(23:0.004214,24:0.012710):0.008582):0.020429,22:0.027993):0.010838,21:0.058277):0.024841):0.049917,((((19:0.090803,14:0.091839):0.023423,(7:0.085044,(10:0.071106,((((18:0.040107,17:0.019611):0.056858,9:0.087613):0.017433,4:0.089782):0.019272,(15:0.040529,16:0.023402):0.069883):0.006377):0.019070):0.022833):0.012662,(29:0.115165,((27:0.033027,26:0.029419):0.055265,28:0.036292):0.014321):0.0 [...]
+   tree rep.19940000 = (6:0.079201,(3:0.120628,(2:0.214353,((12:0.069711,11:0.037292):0.045117,(((13:0.149856,(20:0.103151,5:0.110272):0.089795):0.007314,(8:0.125497,(21:0.061594,((25:0.011028,(24:0.007214,23:0.008326):0.012906):0.025799,22:0.048953):0.023291):0.017009):0.022904):0.007304,(((27:0.050113,26:0.035024):0.073182,28:0.056720):0.022636,(((19:0.064069,14:0.085149):0.019665,29:0.099249):0.019315,((((15:0.034876,16:0.047711):0.071988,10:0.096750):0.002412,(((18:0.027059,17:0.0250 [...]
+   tree rep.19960000 = ((((12:0.060794,11:0.033659):0.027547,((5:0.138697,20:0.084665):0.112616,(((29:0.087806,(28:0.027607,(26:0.020261,27:0.036656):0.059056):0.036056):0.020689,((10:0.092977,((((18:0.051240,17:0.042850):0.058051,9:0.119432):0.025584,4:0.146860):0.003069,(7:0.094007,(16:0.037552,15:0.024831):0.058028):0.008260):0.001903):0.033482,(19:0.091035,14:0.104037):0.008081):0.008708):0.011234,(13:0.072220,((21:0.027669,((25:0.006294,(24:0.006126,23:0.009778):0.013359):0.031632,2 [...]
+   tree rep.19980000 = ((6:0.077440,3:0.129200):0.024683,(((((((19:0.076669,14:0.094268):0.042734,((((15:0.033461,16:0.022818):0.046899,(((17:0.014933,18:0.036161):0.039299,9:0.120072):0.035065,4:0.116107):0.008422):0.001115,10:0.099222):0.011995,7:0.067431):0.012274):0.007834,(29:0.086585,((27:0.040461,26:0.025743):0.086865,28:0.046135):0.029269):0.002877):0.016571,(20:0.087191,5:0.085727):0.074135):0.008042,(13:0.091054,(8:0.122022,((22:0.035040,((23:0.005229,24:0.012051):0.002720,25:0 [...]
+   tree rep.20000000 = ((((12:0.051965,11:0.032413):0.016147,((13:0.093697,(8:0.066742,(21:0.037929,((25:0.008367,(24:0.006328,23:0.003276):0.005185):0.010758,22:0.042690):0.016283):0.028262):0.011734):0.005941,((20:0.077149,5:0.125902):0.096643,((((27:0.036265,26:0.019580):0.076233,28:0.039385):0.025821,29:0.067186):0.014647,((7:0.135274,(((4:0.125846,((18:0.039436,17:0.020257):0.022057,9:0.128686):0.029392):0.033122,(15:0.035315,16:0.017169):0.050937):0.000439,10:0.094141):0.002143):0. [...]
+end;
diff --git a/doc/source/examples/pythonidae.mle.nex b/doc/source/examples/pythonidae.mle.nex
new file mode 100644
index 0000000..ae41727
--- /dev/null
+++ b/doc/source/examples/pythonidae.mle.nex
@@ -0,0 +1,46 @@
+#NEXUS
+
+
+BEGIN TAXA;
+    DIMENSIONS NTAX=33;
+    TAXLABELS
+        Python_regius
+        Python_sebae
+        Python_molurus
+        Python_curtus
+        Morelia_bredli
+        Morelia_spilota
+        Morelia_tracyae
+        Morelia_clastolepis
+        Morelia_kinghorni
+        Morelia_nauta
+        Morelia_amethistina
+        Morelia_oenpelliensis
+        Antaresia_maculosa
+        Antaresia_perthensis
+        Antaresia_stimsoni
+        Antaresia_childreni
+        Morelia_carinata
+        Morelia_viridisN
+        Morelia_viridisS
+        Apodora_papuana
+        Liasis_olivaceus
+        Liasis_fuscus
+        Liasis_mackloti
+        Antaresia_melanocephalus
+        Antaresia_ramsayi
+        Liasis_albertisii
+        Bothrochilus_boa
+        Morelia_boeleni
+        Python_timoriensis
+        Python_reticulatus
+        Xenopeltis_unicolor
+        Candoia_aspera
+        Loxocemus_bicolor
+  ;
+END;
+
+BEGIN TREES;
+    TREE 0 = [&U] (((Python_regius:0.1058922755,((Python_sebae:0.0629755585,Python_molurus:0.0335903967):0.02165,Python_curtus:0.1067094932):0.016163):0.032743,(((((Morelia_bredli:0.0274921037,Morelia_spilota:0.0241663426):0.026356,((Morelia_tracyae:0.0377936102,((Morelia_clastolepis:0.0045446653,(Morelia_kinghorni:0.0075825724,Morelia_nauta:0.0086155842):0.004182):0.018597,Morelia_amethistina:0.0227641045):0.007181):0.024796,Morelia_oenpelliensis:0.0579745143):0.004283):0.031732,((Antar [...]
+END;
+
diff --git a/doc/source/examples/pythonidae.nex b/doc/source/examples/pythonidae.nex
new file mode 100644
index 0000000..11860d2
--- /dev/null
+++ b/doc/source/examples/pythonidae.nex
@@ -0,0 +1,97 @@
+#NEXUS
+
+[
+    Python Cytochrome-B data GenBank, downloaded 2009-11-25.
+
+    References:
+
+    AUTHORS   Rawlings,L.H., Rabosky,D.L., Donnellan,S.C. and Hutchinson,M.N.
+    TITLE     Python phylogenetics: inference from morphology and mitochondrial
+              DNA
+    JOURNAL   Biol. J. Linn. Soc. Lond. 93 (3), 603-619 (2008)
+              Submitted (10-APR-2007) Evolutionary Biology Unit, South Australian
+              Museum, North Terrace, Adelaide, South Australia 5000, Australia
+
+    AUTHORS   Campbell,B.N.
+    TITLE     Hic Sunt Serpentes - Molecular phylogenetics and the Boidae
+              (Serpentes: Booidea)
+    JOURNAL   Thesis (1997) Queen's University, Kingston, Ontario, Canada
+
+    AUTHORS   Harvey,M.B., Ammerman,L.K., Barker,D.G. and Chippindale,P.T.
+    TITLE     Systematics of pythons of the Morelia amethistina complex with the
+              description of three new species
+    JOURNAL   Herpetological Monographs (2000)
+
+]
+
+begin taxa;
+    dimensions ntax=29;
+    taxlabels
+        'Python regius'
+        'Python sebae'
+        'Python brongersmai'
+        'Antaresia maculosa'
+        'Python timoriensis'
+        'Python molurus'
+        'Morelia carinata'
+        'Morelia boeleni'
+        'Antaresia perthensis'
+        'Morelia viridis'
+        'Aspidites ramsayi'
+        'Aspidites melanocephalus'
+        'Morelia oenpelliensis'
+        'Bothrochilus boa'
+        'Morelia bredli'
+        'Morelia spilota'
+        'Antaresia stimsoni'
+        'Antaresia childreni'
+        'Leiopython albertisii'
+        'Python reticulatus'
+        'Morelia tracyae'
+        'Morelia amethistina'
+        'Morelia nauta'
+        'Morelia kinghorni'
+        'Morelia clastolepis'
+        'Liasis fuscus'
+        'Liasis mackloti'
+        'Liasis olivaceus'
+        'Apodora papuana'
+  ;
+end;
+
+begin characters;
+    dimensions  nchar=1114;
+    format datatype=dna gap=- missing=? matchchar=.;
+    matrix
+'Python regius'                ATGCCCCACCACTATATCCTAACCCTCTTCGGCCTTCTACCAGTAGCAACCAACATCTCAACATGATGAAACTTCGGCTCAATACTACTAACATGTCTAATGTTACAAGTACTTACCGGCTTCTTCCTAGCTGTCCACTATACAGCAAACATCAACCTAGCATTTTCATCCATTATCCATATTACCCGTGACGTCCCCTACGGCTGACTAATACAAAACCTACACGCCATCGGCGCATCGATATTCTTTATCTGCATCTACATTCACATCGCACGAGGACTATACTACGGCTCCCACCTCAATAAAGAAACCTGGATATCAGGTATTACACTTCTCATCACACTGATGGCAACCGCCTTCTTCGGGTATGTACTCCCATGAGGACAAATATCCTTCTGAGCCGCAACAGTAATTACCAACCTACTCACTGCTGTACCGTACCTAGGCGCAACCATAACCAC [...]
+'Python sebae'                 ATGCCACACCATTATATCTTAACCCTATTCGGACTCCTACCAGTAGCAACCAACATTTCAACATGATGAAATTTCGGCTCAATACTACTAACATGTTTAGCCTTACAAACGCTCACAGGCTTCTTCCTAGCTGTCCACTACACAGCAAACATTAACCTAGCATTCTCATCTATCATTCACATCATCCGTGACGTCCCACATGGCTGAATAATACAAAACCTGCACGCCATCGGCGCATCTATATTCTTTATTTGCATTTACATCCACATCGCACGAGGCCTATACTATGGATCCTATCTTAACAAAGAAACCTGAATATCAGGTATCACACTCCTCATCATCCTAATAGCAACCGCGTTCTTCGGCTACGTCCTCCCATGAGGACAAATATCATTCTGAGCCGCAACAGTAATCACCAACCTACTCACTGCTGTACCCTACCTAGGAACAACTCTAACAAC [...]
+'Python brongersmai'           ------------------------------------------------------------------------TTCGGTTCAATATTACTCACTTGCCTAGTCCTACAAGTACTAACCGGCTTCTTCCTAGCCGTCCACTACACAGCAAACATCAACCTAGCATTTTCCTCTATTATACACATCACCCGCGACGTCCCATACGGCTGAATAATACAAAACTTACACGCYATCGGCGCATCTATATTTTTCATCTGCATCTATATCCACATCGCACGAGGACTATATTACGGCTCCTATCTCAATAAAGAAACCTGAATGTCTGGCATTACACTCCTCATCACACTAATAGCAACCGCTTTTTTCGGATATGTCCTCCCATGAGGACAGATGTCATTCTGAGCCGCAACCGTAATCACCAATCTACTAACTGCTGTACCATACCTAGGCACAACCCTAACAAC [...]
+'Antaresia maculosa'           ATGCCCCACCACTACATTCTAACCCTATTTGGTCTTCTACCTGTAGCAACAAATATCTCAACATGATGAAACTTCGGCTCAATATTACTAACATGTCTGGCCCTACAAGTATTGACCGGATTCTTCTTAGCCATCCACTACACAGCAAACATCAACTTAGCATTCTCATCTATTATTCACATCACCCGAGATGTCCCATATGGCTGAATAATACAAAACCTACACGCCATCGGAGCCTCCATATTCTTCATTTGCATTTACATTCACATTGCACGAGGACTATACTACGGATCCTACCTCAATAAAGAAACCTGAATGTCTGGCATCACCCTTCTTATCACACTAATAGCAACAGCCTTCTTCGGTTACGTTCTCCCATGGGGACAGATATCATTCTGAGCCGCAACCGTAATCACAAACTTACTTACCGCCGTCCCATACCTAGGCRTATCACTAACAAC [...]
+'Python timoriensis'           ------------------------------------------------------------------------TTCGGCTCACTACTATTAACATGTCTAGCCCTACAAGTATTAACTGGTTTTTTCCTAGCCGTTCACTACACAGCAAACATTAACCTGGCATTTTCATCCATCATTCACATCACCCGAGACGTCCCATACGGTTGAATGATACAAAACCTCCACGCCATCGGAGCATCCATATTTTTCATTTGTATTTACATCCACATCGCACGAGGCCTATACTACGGATCATATYTTAACAAAGAAACTTGAATATCAGGCATCACCCTACTCATCACATTAATAGCTACTGCTTTCTTCGGATATGTTCTTCCATGAGGACAAATATCATTCTGRGCCGCAACTGTAATTACAAACCTACTTACAGCCGTACCATACCTGGGCACATCATTAACAAC [...]
+'Python molurus'               ATGCCCCACCACTATATCCTAACCTTATTTGGCCTCCTACCAGTAGCAACCAACATCTCAACATGATGAAACTTCGGCTCAATACTATTAGCATGCTTAGCCCTACAAGTATTAACCGGATTCTTCCTAGCCGTCCACTACACAGCAAACATCAACCTAGCATTCTCATCTATCATTCACATCACCCGCGATGTTCCATACGGCTGAATAATACAAAACCTACACGCTATCGGCGCATCCATATTCTTCATCTGCATCTACATTCACATCGCACGAGGACTATACTACGGCTCCTATCTAAATAAAGAAACCTGAATATCCGGAATTACACTACTCATCACACTTATGGCAACCGCCTTCTTCGGATATGTCCTCCCATGAGGGCAAATATCATTCTGAGCTGCAACCGTAATTACCAACCTATTAACCGCCGTACCATACTTAGGCACAACCCTAACAAC [...]
+'Morelia carinata'             -----------------------------------------------------------------------CTTCGGCTCGATACTATTAACATGTTTAGCCCTACAAGTATTAACCGGCTTCTTCTTAGCTGTTCACTACACAGCAAACATTAACCTAGCATTCTCATCCATCATTCACATCACCCGAGACGTCCCATACGGCTGAATAATACAAAATCTGCACGCCATCGGAGCATCCATATTCTTCATCTGCATTTACATTCATATTGCACGAGGACTATACTATGGGTCTTACCTCAACAAAGAAACCTGAATATCTGGTATCACCCTGCTAATTATCCTAATAGCAACCGCCTTCTTCGGCTATGTCCTCCCATGAGGACAAATATCATTCTGAGCCGCAACCGTAATCACAAACCTACTCACCGCCGTACCCTACTTAGGCACATCACTAACAAC [...]
+'Morelia boeleni'              ------------------------------------------------------------------------TTCGGCTCTATACTATTAACATGCTTAGGCTTACAAGTAATAACCGGCTTCTTCCTAGCCGTACACTACACAGCAAACATCAACTTAGCATTCTCATCCATCATCCACATCACCCGAGACGTCCCATACGGCTGAATAATACAAAACTTGCACGCTATCGGAGCATCTATATTCTTCATCTGCATTTACATCCACATCGCACGAGGGTTGTACTACGGATCATACCTTAACAAAGAAACCTGAATATCTGGCATTACCCTACTTATCACATTAATAGCAACTGCCTTCTTTGGATACGTTCTCCCATGAGGACAAATATCATTCTGAGSSGCWRCMGTWATCACAAACCTACTCACTGCCATCCCTTATCTAGGCACATCACTAACAAC [...]
+'Antaresia perthensis'         ------------------------------------------------------------------------TTCGGMTCAATACTACTAACATGTTTAGCCTTACAAGTACTAACCGGCTTTTTTTTGGCCGTCCACTACACAGCAAACATCAACCTGGCATTTTCATCCATCATTCACATTACCCGAGACGTCCCATATGGCTGAATAATACAAAACCTGCACGCCATCGGAGCATCCATATTCTTCATTTGCATCTACATTCACATTGCACGCGGACTCTACTACGGATCCTACCTCAACAAAGAAACCTGGATATCGGGAATTACCCTCCTCATCACACTGATAGCTACCGCCTTCTTCGGCTACGTCCTCCCATGAGGACAGATATCATTCTGAGCCGCAACAGTAATCACCAACCTACTCACCGCTGTACCCTACCTAGGCACATCACTAACAAC [...]
+'Morelia viridis'              ------------------------------------------------------------------------TTCGGYTCAATACTATTAACATGCCTAGCCCTACAAGTATTAACCGGCTTCTTCCTAGCCGTTCACTACACAGSAAACATTAATCTAGCATTCTCATCCATCATCCACATCTCCCGAGATGTTCCATACGGTTGAATAATACAAAACCTACACGCCATCGGAGCATCCATATTCTTCATTTGCATCTACATCCATATTGCACGAGGATTATACTATGGATCCTACCTCAACAAAGAAACCTGAATATCCGGTATTACCCTACTCATCACACTAATAGCAACCGCCTTCTTCGGCTATGTCCTCCCATGAGGACAAATATCATTCTGAGCCGCAACCGTAATCACAAACCTACTTACCGCTGTACCCTACCTGGGTACATCACTAACAAC [...]
+'Aspidites ramsayi'            ------------------------------------------------------------------------TTCGGCTCAATACTACTAACATGCTTAGGTYTACAAGTACTAACCGGCTTYTTCCTAGCCGTCCACTACACCGCAAACATTAACCTGGCATTCTCATCTATCGTTCACATCACCCGAGATGTCCCATACGGCTGAATAATACAAAACCTACACGCCATCGGAGCATCCATATTCTTCATTTGTATCTACATTCACATTGCACGAGGATTATACTACGGATCCTACCTTAACAAAGAAACCTGAATATCGGGTATTACATTACTCATTACACTAATAGCAACCGCCTTCTTCGGATATGTCCTTCCATGAGGACAAATATCATTCTGAGCCGCAACCGTAATTACAAACCTACTCACCGCCGTACCATACCTAGGTACATCTCTAACAAC [...]
+'Aspidites melanocephalus'     ATGCCCCACCACTACATCCTAACCCTATTTGGCCTTCTGCCTGTAGCAACTAACATCTCAACATGATGAAACTTCGGCTCAATACTACTAACATGTTTAGGCCTACAAGTACTAACCGGCTTCTTCCTAGCCGTCCACTACACCGCAAACATTAACCTGGCATTCTCATCTATCGTTCACATCTCCCGAGATGTCCCATACGGCTGAATAATACAAAACCTACATGCAATCGGAGCATCCATATTCTTCATCTGTATCTACATTCACATTGCACGAGGATTATACTACGGATCCTACCTTAACAAAGAAACCTGAATATCAGGCATCACACTACTCATCACACTAATAGCGACCGCTTTCTTCGGATATGTGCTTACATGAGGACAAATATCATTATGAGCCGCAACCGTAATCACAAACCTACTCACCGCCGTGCCCTACCTAGGCACATCTCTAACAAC [...]
+'Morelia oenpelliensis'        ------------------------------------------------------------------------TTCGGCTCAATACTATTAACATGCCTAGCCCTACAAGTACTAACCGGCTTCTTCCTAGCCGTCCACTACACAGCAAATATCAACCTAGCATTTTCATCCATTATCCACATCACCCGTGACGTCCCATACGGTTGAATAATACAAAACCTACACGCCATCGGAGCATCCATATTCTTCATTTGCATTTATATTCACATCGCTCGAGGACTATACTATGGGTCATACCTTAACAAAGAAACCTGAATATCCGGTATCACCCTACTCATTACACTAATAGCAACCGCCTTCTTCGGATATGTTCTTCCATGAGGACAGATATCATTCTGAGCCGCAACCGTAATTACAAACCTACTTACCGCCGTACCATACCTAGGCACATCACTAACAAC [...]
+'Bothrochilus boa'             ------------------------------------------------------------------------TTTGGCTCAATATTATTAACATGCCTGGCCCTACAAGTACTAACCGGCTTCTTCCTGGCCGTCCACTACACAGCAAACATCAACCTGGCATTCTCATCCATTATTCACATCACCCGAGATGTCCCATATGGCTGAATAATACAAAACCTGCACGCCATCGGAGCATCCATATTCTTCATTTGCGTATACATTCACATCGCACGAGGACTATACTACGGGTCATACCTAAACAAAGAAACCTGAATATCTGGCATTACCCTGCTCATCACACTAATAGCGACCGCCTTCTTTGGATATGTCCTCCCGTGAGGACAGATATCATTCTGAGCCGCAACCGTAATTACAAACCTGTTAACAGCAGTACCCTACCTGGGCACATCACTAACAAC [...]
+'Morelia bredli'               ------------------------------------------------------------------------TTCGGCTCAATACTATTAACATGCCTAGCCCTGCAAATCCTAACCGGCTTCTTTTTAGCGGTCCACTACACAGCAAACATCAACCTAGCATTCTCATCCATCATCCACATTACCCGAGACGTCCCATACGGCTGAATAATACAAAASCTACACGCCATCGGAGCATCCCTATTCTTCATCTGCATCTACATCCATATCGCACGTGGGTTATACTACGGATCCTATCTCAACAAAGAAACCTGAATATCCGGTATTACCCTACTCATCACACTAATAGCAACTGCCTTCTTCGGTTATGTCCTTCCATGAGGACAAATATCATTCTRRGCCGCAACTGTAATTACAAATCTACTCACCGCCGTACCATACCTGGGCACATCACTAACAAC [...]
+'Morelia spilota'              ATGCCCCACCACTACATCCTAACCTTATTTGGCCTTCTCCCCGTAGCAACCAATATCTCAACATGATGAAACTTCGGCTCAATACTATTAACATGCCTAGCCCTACAAGTTCTAACCGGCTTCTTCTTAGCTGTCCACTACACAGCAAACATTAACCTGGCATTCTCATCCATCATTCACATTACCCGAGACGTCCCATACGGCTGGATAATACAAAACCTACACGCCATCGGAGCATCTATATTCTTCATTTGCATCTACATCCATATTGCACGTGGATTATACTACGGATCCTATCTCAACAAAGAAACCTGAATATCCGGCATTACCCTACTCATCACACTAATAGCAACCGCCTTCTTCGGTTACGTCCTCCCATGAGGACAAATGTCATTCTAAGCCGCAACTGTAATTACAAACCTACTCACCGCCGTACCCTACCTAGGCACATCTCTAACAAC [...]
+'Antaresia stimsoni'           ------------------------------------------------------------------------TTCGGCTCAATACTATTAACATGTCTAGCCCTACAAGTATTAACCGGCTTTTTCCTAGCCGTTCATTATACAGCAAACATTAACCTAGCATTTTCATCCATCGTTCACATTACCCGAGACGTCCCATACGGCTGAATAATACAAAACCTACACGCCATCGGAGCATCCATATTCTTTATTTGTATTTATATTCACATCGCACGCGGACTATACTATGGATCCTACCTCAACAAAGAAACCTGAATATCCGGTATCACCCTGCTCATCACACTAATAGCAACCGCCTTCTTCGGCTATGTCCTCCCATGAGGACAAATATCATTCTGAGCCGCAACCGTAATCACAAACCTACTCACCGCCGTACCATACCTAGGCACATCGCTAACAAC [...]
+'Antaresia childreni'          ATGCCCCACCACTACATTCTAACCCTATTCGGCCTTCTGCCTGTAGCAACCAACATCTCAACATGATGAAACTTCGGCTCAATACTATTAACATGTCTAGCCCTACAAGTATTAACCGGTTTTTTCTTAGCTGTTCACTATACAGCAAACATTAACCTAGCATTTTCATCCATCGTTCACATTACCCGAGACGTCCCATATGGCTGAATAATACAAAACCTACACGCCATCGGAGCATCCATATTCTTTATTTGTATTTATATTCACATCGCACGCGGACTATACTATGGATCCTACCTCAACAAAGAAACCTGAATATCCGGTATCACCCTGCTCATCACACTAATAGCAACCGCCTTCTTCGGCTATGTTCTCCCATGAGGACAAATATCATTCTGAGCCGCAACCGTAATCACAAACCTACTCACCGCCGTACCATACCTGGGCACATCACTAACAAC [...]
+'Leiopython albertisii'        ATGCCCCACCACTACATTTTAACCCTATTTGGCCTCCTACCCGTAGCAACCAACATCTCAACATGATGAAACTTTGGTTCAATACTATTAACATGCTTAGCTCTACAGGTACTAACCGGCTTCTTCCTAGCCGTCCACTACACAGCAAACATCAACCTAGCATTTTCATCCATCATCCACATTACCCGAGATGTCCCATTCGGCTGAATAATACAAAACCTACACGCCATCGGAGCATCCATATTCTTCATTTGCATCTACATTCACATCGCACGGGGGCTCTACTACGGATCATACCTAAACAAAGAAACCTGAATATCCGGCATTACCCTACTCATCACACTGATAGCAACCGCCTTCTTCGGATACGTCCTCCCATGAGGACAAATATCATTCTGAGCTGCAACCGTAATCACAAACCTACTAACCGCCGTACCCTACCTAGGCACATCACTAACAAC [...]
+'Python reticulatus'           ATGCCCCACCATTATATCCTAACCTTATTTGGCCTTCTACCAGTAGCAACCAACATCTCAACCTGATGAAACTTCGGCTCAATATTACTAACATGTCTAGCCTTACAAGTACTAACCGGCTTTTTCCTAGCCGTCCATTACACAGCAAACATTAACCTAGCATTTTCATCCATCATCCACATCACCCGAGACGTCCCATACGGCTGAATAATACAAAACCTTCACGCTATCGGAGCATCCATATTCTTCATCTGCATCTACATCCACATCGCACGAGGCCTATACTACGGATCATACCTCAACAAAGAAACCTGAATATCAGGCATCACCCTACTCATCACACTAATAGCCACCGCTTTTTTTGGTTACGTCCTTCCATGAGGACAAATATCATTCTGAGCCGCAACCGTAATTACAAACCTACTCACTGCCGTACCATACCTAGGTACATCACTAACAAC [...]
+'Morelia tracyae'              --------CCACTACATCCTAACCCTATTTGGCCTCCTACCAGTAGCAACCAACATTTCAACATGATGAAACTTCGGCTCAATACTACTAACATGTCTAGCTCTACAAGTACTAACCGGCTTCTTTCTAGCCGTACACTACACAGCAAACATTAACCTAGCATTTTCATCCATCATTCACATTACCCGAGACGTCCCATACGGCTGAATAATACAAAATCTACACGCTATCGGAGCATCCATATTCTTCATTTGCATTTACATCCACATCGCACGAGGACTATACTACGGATCTTACCTAAACAAAGAAACTTGAATATCAGGCATTACCCTACTCATCACACTAATAGCAACTGCCTTCTTTGGATACGTCCTCCCATGAGGACAAATATCATTCTGAGCCGCAACCGTAATTACAAACCTACTTACCGCCATCCCATACCTAGGCACATCTCTAACAAC [...]
+'Morelia amethistina'          --------CCACTACATCCTAACCTTATTTGGCCTCCTACCGGTAGCAACCAACATTTCAACATGATGAAACTTCGGCTCAATACTACTAACATGCCTGGCACTACAAGTACTAACCGGCTTCTTCCTAGCCGTACACTACACAGCAAACATTAACCTAGCATTCTCATCCATCATCCACATCACCCGAGACGTCCCATATGGCTGAATAATACAAAACCTGCACGCTATTGGAGCATCCATATTCTTCATCTGCATCTACATTCATATCGCACGAGGACTATACTACGGATCATACCTCAACAAAGAAACCTGAATATCCGGCATTACCCTGCTCATCACACTAATAGCAACCGCTTTCTTCGGATACGTCCTCCCATGAGGACAAATATCATTCTGAGCCGCAACCGTAATTACAAACCTACTTACCGCCATCCCATACCTAGGCACATCTCTAACAAC [...]
+'Morelia nauta'                --------CCACTACATCTTAACCTTATTTGGCCTCCTACCGGTAGCAACCAACATTTCAACATGATGAAACTTCGGCTCAATACTACTAACATGCCTAGCGCTACAAGTACTAACCGGCTTCTTCCTAGCCGTACACTACACAGCGAACATTAACCTAGCATTTTCATCCATCATCCACATTACCCGAGACGTCCCATATGGCTGAATAATACAGAACCTACACGCTATCGGAGCATCCATATTCTTCATCTGCATTTACATCCACATCGCACGAGGACTATACTACGGATCATACCTAAACAAAGAAACCTGAATATCCGGCATCACCCTGCTCATCACACTAATAGCAACTACCTTCTTCGGATACGTCCTCCCATGAGGACAAATATCATTCTGAGCCGCAACCGTAATTACAAACCTACTTACCGCTATTCCTTACCTAGGCACATCACTGACAAC [...]
+'Morelia kinghorni'            --------CCACTACATCTTAACCTTATTTGGCCTCCTACCAGTAGCAACCAACATTTCAACATGATGAAACTCCGGCTCAATACTACTAACATGTCTAGCACTACAAGTACTAACCGGCTTCTCCCTAGCTGTACACTACACAGCGAACATTAACCTAGCATTTTCATCCATCATCCACATTACCCGAGACGTCCCATATGGCTGAATAATACAGAACCTACACGCTATCGGAGCATCCATATTCTTCATCTGCATTTACATCCACATCGCACGAGGACTATACTACGGATCATACCTAAACAAAGAAACCTGAATATCCGGCATCACCCTGCTCATCACACTAATAGCAACTGCCTTCTTCGGATACGTTCTCCCATGAGGACAAATATCATTCTGAGCCGCAACCGTAATTACAAACCTACTTACCGCTATTCCTTACCTAGGCACATCACTGACAAC [...]
+'Morelia clastolepis'          --------CCACTACATCTTAACCCTATTTGGCCTCCTACCAGTAGCAACCAACATTTCAACATGATGAAACTTCGGCTCAATACTACTAACATGCCTAGCACTACAAGTACTAACCGGCTTCTTCCTAGCCGTACACTACACAGCGAACATTAACCTAGCATTCTCATCCATCATCCACATTACCCGAGACGTCCCATATGGCTGAATAATACAAAACCTACACGCTATCGGAGCATCCATATTCTTCATTTGCATTTACATTCACATCGCACGAGGACTATACTACGGATCTTACCTAAACAAAGAAACCTGAATATCCGGCATCACCCTGCTCATCACACTAATAGCAACTGCCTTCTTCGGATACGTCCTCCCATGAGGACAAATATCATTCTGAGCCGCAACCGTAATTACAAACCTACTTACCGCTATTCCTTACCTAGGCACATCACTGACAAC [...]
+'Liasis fuscus'                ------------------------------------------------ACCAATATTTCAACATGATGAAACTTCGGCTCAATACTACTAACATGTTTAGCCCTACAAGTATTAACCGGATTCTTCCTGGCTGTCCACTATACAGCAAATATTGACCTGGCATTCTCATCCATCATCCACATCACTCGAGACGTCCCATACGGCTGAATAATACAAAACCTACACGCCATCGGAGCATCAATATTCTTCATTTGTATCTACATCCACATCGCCCGAGGCCTATACTACGGATCATACCTCAACAAAGAAACCTGAATATCCGGCATCACCCTACTTATCACACTAATAGCAACCGCCTTCTTCGGGTACGTCCTTCCATGAGGACAAATATCATTCTGAGCCGCAACCGTAATCACAAACCTTCTTACCGCCGTACCCTACCTAGGCACATCCTTGACAAC [...]
+'Liasis mackloti'    ATGCCCCACCACTACGTTCTAACCCTATTTGGTCTCTTACCAGTAGCAACCAATATTTCAACATGATGAAACTTCGGCTCAATACTACTAACATGTTTAGCCCTACAAGTACTAACCGGATTCTTCCTGGCTGTCCACTACACAGCAAATATTAACCTGGCATTCTCATCCATCGTTCACATCACTCGAGATGTCCCATACGGCTGAATGATACAAAACCTACACGCCATCGGAGCATCTATATTCTTTATTTGTATCTACATCCACATCGCCCGAGGCCTATACTACGGATCATACCTTAACAAAGAAACCTGAATATCCGGTATTACCCTGCTTATCACACTAATAGCAACCGCCTTCTTCGGATACGTCCTTCCATGAGGACAAATATCATTCTGAGCCGCAACCGTAATTACAAACCTTCTCACCGCCGTACCCTACCTAGGCACATCCTTGACAACCTGGCTATGA [...]
+'Liasis olivaceus'             ATGCCCCACCACTACATTCTAACCCTGTTCGGCCTCTTACCAGTAGCAACCAACATTTCAACATGATGAAACTTCGGTTCAATACTACTAACATGCCTAGTCCTACAAGTATTAACCGGTTTCTTCCTAGCTGTCCACTACACAGCAAACATCAATCTAGCATTCTCATCCATCGTTCACATTACCCGAGACGTCCCATACGGCTGAATAATACAAAACCTACACGCTATCGGAGCATCTATATTCTTCATTTGCATCTACATCCATATCGCACGAGGTCTATACTACGGATCATACCTTAACAAAGAAACCTGAATATCTGGTATCACCCTACTCATCACACTAATAGCAACCGCTTTCTTCGGATATGTCCTTCCATGGGGACAAATATCATTCTGGGCCGCAACCGTAATCACAAACCTACTCACTGCCGTACCCTATCTAGGCACATCACTAACAAC [...]
+'Apodora papuana'               ATGCCCCACCATTACATCCTAACCCTGTTCGGCCTCCTACCAGTAGCAACCAACATTTCAACATGATGAAACTTCGGCTCAATACTACTAACATGCCTAGCCCTACAAGTATTAACTGGCTTCTTCCTGGCCGTACACTACACAGCAAACATCAACCTAGCATTCTCATCCATCATTCACATCACCCGAGATGTCCCATACGGCTGAATAATACAAAACTTACACGCCATCGGAGCATCCATATTCTTCATCTGTATCTACATCCATATTGCACGGGGCCTATACTACGGATCGTACCTAAATAAAGAAACCTGAATATCTGGCATCACCCTACTCATCACACTAATAGCAACCGCCTTCTTCGGATATGTCCTTCCATGAGGACAAATGTCATTCTGAGCCGCAACTGTAATCACAAATCTGCTCACTGCAGTACCCTACCTGGGTACATCACTAACAA [...]
+    ;
+end;
+
diff --git a/doc/source/examples/pythonidae.random.bd0301.tre b/doc/source/examples/pythonidae.random.bd0301.tre
new file mode 100644
index 0000000..2ed6573
--- /dev/null
+++ b/doc/source/examples/pythonidae.random.bd0301.tre
@@ -0,0 +1,143 @@
+#NEXUS
+[written Sun Jan 31 17:15:31 CST 2010 by Mesquite  version 2.72 (build 528) at glyphoglossus/10.0.1.2]
+BEGIN TREES;
+	Title Simulated_Trees;
+	TRANSLATE
+		1 Xenopeltis_unicolor,
+		2 Loxocemus_bicolor,
+		3 Morelia_spilota,
+		4 Morelia_bredli,
+		5 Morelia_carinata,
+		6 Morelia_amethistina,
+		7 Morelia_oenpelliensis,
+		8 Morelia_boeleni,
+		9 Morelia_viridisS,
+		10 Morelia_viridisN,
+		11 Liasis_olivaceus,
+		12 Liasis_mackloti,
+		13 Liasis_fuscus,
+		14 Liasis_albertisii,
+		15 Apodora_papuana,
+		16 Bothrochilus_boa,
+		17 Antaresia_maculosa,
+		18 Antaresia_stimsoni,
+		19 Antaresia_childreni,
+		20 Antaresia_perthensis,
+		21 Antaresia_melanocephalus,
+		22 Antaresia_ramsayi,
+		23 Python_reticulatus,
+		24 Python_timoriensis,
+		25 Python_sebae,
+		26 Python_molurus,
+		27 Python_curtus,
+		28 Python_regius,
+		29 Candoia_aspera,
+		30 Morelia_nauta,
+		31 Morelia_clastolepis,
+		32 Morelia_tracyae,
+		33 Morelia_kinghorni;
+	TREE  [!(#extinctions 14; generations: 39709)] 'Tree # 1 simulated by Birth/Death Process Trees' = ((((21:2.3350593241655426,((30:0.038794665275816355,17:0.038794665275816355):0.8278411765843889,4:0.8666358418601952):1.4684234823052804):3.2516188313688894,23:5.586678155534523):0.864785359254465,((18:3.3312622120012305,(((11:1.52056119128188,(12:0.6117932350513205,9:0.6117932350513205):0.9087679562303902):1.214986092913508,(2:0.40572142043739157,(29:0.024221235862369336,33:0.024221235862 [...]
+	TREE  [!(#extinctions 7; generations: 33225)] 'Tree # 2 simulated by Birth/Death Process Trees' = (((27:3.162029268365248,(18:2.3883929124914562,(30:1.299811057170669,(15:1.0674930804781402,(32:0.02881742994008188,2:0.02881742994008188):1.0386756505380481):0.23231797669246232):1.088581855320866):0.7736363558740387):6.92692315275791,((6:1.6738589502710228,1:1.6738589502710228):2.5614925181915273,(17:0.13311641673489663,26:0.13311641673489663):4.102235051727387):5.85360095266092):0.682943 [...]
+	TREE  [!(#extinctions 19; generations: 52122)] 'Tree # 3 simulated by Birth/Death Process Trees' = (((((20:0.1878557192397196,18:0.1878557192397196):6.863060827065965,(23:1.6906880436228036,(10:1.5703618532924346,11:1.5703618532924346):0.12032619033033272):5.360228502683391):0.05937070607990354,((4:2.2311211749519004,(25:0.9207723611452335,(3:0.7320841066687592,(31:0.2018055320564755,13:0.2018055320564755):0.5302785746123252):0.1886882544765277):1.310348813806433):0.28329506827430584,19 [...]
+	TREE  [!(#extinctions 20; generations: 47083)] 'Tree # 4 simulated by Birth/Death Process Trees' = (((((31:0.7249181870739579,29:0.7249181870739579):7.237887809326493,(17:6.355231025881894,((((28:0.33374458444846755,((16:1.7677669529663688E-4,19:1.7677669529663688E-4):0.2753235213194502,9:0.27550029801474685):0.05824428643372452):0.6456608857325888,18:0.9794054701810271):1.3882629954849879,2:2.367668465666329):0.6214020615384879,(4:0.8768328476388798,15:0.8768328476388798):2.11223767956 [...]
+	TREE  [!(#extinctions 15; generations: 48752)] 'Tree # 5 simulated by Birth/Death Process Trees' = ((((1:2.1723329224446823,19:2.1723329224446823):1.8102923986476105,((((29:0.723648125754956,33:0.723648125754956):1.1505488358897327,((15:0.864288636958484,(9:0.253831097028132,22:0.253831097028132):0.6104575399304025):0.4580324678121864,(4:1.7677669529663688E-4,7:1.7677669529663688E-4):1.3221443280755198):0.5518758568738491):0.0761124474331304,10:1.9503094090781268):1.4385323029809192,(14 [...]
+	TREE  [!(#extinctions 5; generations: 31062)] 'Tree # 6 simulated by Birth/Death Process Trees' = ((((((12:0.8804598947445802,1:0.8804598947445802):3.6109585940077418,((20:0.04136857530614305,10:0.04136857530614305):0.34215073628790926,(28:0.2827523400677902,21:0.2827523400677902):0.1007669715262707):4.10789917715843):1.6051236589526237,(14:4.893094909371375,(17:4.5846733934366215,22:4.5846733934366215):0.30842151593497913):1.203447238333603):0.04780401528930806,((23:4.803585346021296,( [...]
+	TREE  [!(#extinctions 20; generations: 53054)] 'Tree # 7 simulated by Birth/Death Process Trees' = ((((((24:3.1080117065082478,((4:0.7671832135948907,26:0.7671832135948907):2.292378732373643,8:3.0595619459683725):0.048449760539904546):0.19357862277683544,(3:2.7685801276898796,(13:1.91426504046319,30:1.91426504046319):0.8543150872269728):0.5330102015953322):2.6730726647394563,(9:4.799124534105554,7:4.799124534105573):1.1755384599190737):0.0430391488041006,(((((19:0.1601796713642937,23:0. [...]
+	TREE  [!(#extinctions 21; generations: 43322)] 'Tree # 8 simulated by Birth/Death Process Trees' = ((((1:1.7677669529663688E-4,8:1.7677669529663688E-4):3.997585711058615,(3:1.1722750795783339,2:1.1722750795783339):2.8254874081755563):3.0517936085752693,((32:0.5559027092568375,33:0.5559027092568375):1.3170759365856861,12:1.8729786458427586):5.1765774504875015):4.4192373305096515,(((6:6.341340674861228,(19:5.1242512845442,25:5.1242512845442):1.2170893903165048):2.5975061523112575,(((23:2. [...]
+	TREE  [!(#extinctions 11; generations: 40937)] 'Tree # 9 simulated by Birth/Death Process Trees' = (((((30:0.0021241489502897806,3:0.0021241489502897806):4.03837474191865,(6:3.2806199094102113,((20:0.38472506031821213,28:0.38472506031821213):0.22332778047488064,(10:1.7677669529663688E-4,14:1.7677669529663688E-4):0.6078760640978246):2.6725670686171776):0.7598789814583328):0.4117242091366529,2:4.452223100005203):3.2844087533788384,(17:6.7140071303865705,(((25:4.17723003732929,(31:0.147853 [...]
+	TREE  [!(#extinctions 20; generations: 38463)] 'Tree # 10 simulated by Birth/Death Process Trees' = ((((23:3.550943346581924,(((17:0.07628285934466959,9:0.07628285934466959):0.004567475798218379,(1:0.03712593461902378,25:0.03712593461902378):0.04372440052386375):0.17179614592313583,7:0.2526464810660224):3.298296865515946):3.471446090129537,((20:0.8592479777010834,2:0.8592479777010834):0.06400412414523171,4:0.9232521018463213):6.0991373348656674):3.6615504741469422,((13:1.136505019946055 [...]
+	TREE  [!(#extinctions 8; generations: 34731)] 'Tree # 11 simulated by Birth/Death Process Trees' = (((((1:1.2868878634747136,30:1.2868878634747136):4.018744131901495,((28:0.6513105499339232,(3:0.33093306472555956,25:0.33093306472555956):0.32037748520842063):2.738152348147349,13:3.3894628980810007):1.9161690972946834):4.118494795319858,((10:0.9451033109767062,32:0.9451033109767062):4.265044857856794,(21:4.233625303030518,(27:3.2996580143250522,(15:2.354208438819003,14:2.354208438819003): [...]
+	TREE  [!(#extinctions 14; generations: 34315)] 'Tree # 12 simulated by Birth/Death Process Trees' = (((((22:1.710028643549374,(8:1.2517327106517158,6:1.2517327106517158):0.4582959328976632):1.870022639768317,(7:3.481634817636679,(15:0.5334506293522375,11:0.5334506293522375):2.94818418828446):0.0984164656809154):4.249020260022012,((((19:0.48941430039343736,16:0.48941430039343736):1.2575152737207451,20:1.746929574114258):1.544003902989164,(9:1.7677669529663688E-4,27:1.7677669529663688E-4) [...]
+	TREE  [!(#extinctions 7; generations: 42885)] 'Tree # 13 simulated by Birth/Death Process Trees' = ((((25:0.56831736508115,20:0.56831736508115):9.019274495077578,((((5:0.04826286642271187,12:0.04826286642271187):1.5897596828165792,21:1.6380225492393081):2.04321038247706,14:3.6812329317156003):1.2690210595456097,1:4.950253991261129):4.637337868896613):0.9775022952975857,(2:4.134704176254002,((23:0.3036567423287932,15:0.3036567423287932):0.39469574165498356,(13:0.31329830424375393,4:0.313 [...]
+	TREE  [!(#extinctions 35; generations: 82778)] 'Tree # 14 simulated by Birth/Death Process Trees' = (((32:4.071984945262652,((((30:0.21101086779662026,1:0.21101086779662026):0.5039890660633859,8:0.7149999338599824):1.4033690396873437,33:2.118368973547363):1.0435387502822417,(29:0.8952382356427095,28:0.8952382356427095):2.2666694881866345):0.9100772214336252):5.239641975927711,((((6:1.1616053059070444,31:1.1616053059070188):1.6011630367967895,23:2.7627683427038185):2.878995805019208,(14: [...]
+	TREE  [!(#extinctions 17; generations: 46777)] 'Tree # 15 simulated by Birth/Death Process Trees' = (8:11.766455167874405,(((((((14:0.26946457564914844,16:0.26946457564914844):0.05090872587288873,12:0.3203733015220338):5.55887312627634,((10:2.248799115541852,(4:1.7677669529663688E-4,7:1.7677669529663688E-4):2.2486223388465554):0.5507955071762447,3:2.7995946227179855):3.079651805082007):1.2970257677733479,(25:5.3875636653728165,(11:1.8985053213955279,9:1.8985053213955279):3.4890583439794 [...]
+	TREE  [!(#extinctions 16; generations: 46402)] 'Tree # 16 simulated by Birth/Death Process Trees' = (((30:4.534192517799618,(18:0.12038775810374173,9:0.12038775810374173):4.413804759696139):6.4472916329308765,((31:0.9629809008768203,(29:0.5404234541492985,27:0.5404234541492985):0.42255744672762297):8.497926287060793,(((20:0.3522636357270361,15:0.3522636357270361):0.30606510706210316,3:0.6583287427890945):4.509322755753467,32:5.167651498541972):4.29325568939502):1.520576962792528):1.7691 [...]
+	TREE  [!(#extinctions 19; generations: 56180)] 'Tree # 17 simulated by Birth/Death Process Trees' = (((7:1.3478364982426367,2:1.3478364982426367):10.036819691763654,(12:0.5560937346704772,14:0.5560937346704772):10.828562455335266):2.408227355789484,((((24:3.0890545771786675,17:3.08905457717883):1.1684036370231248,(30:3.5733008409350377,(1:1.7677669529663688E-4,15:1.7677669529663688E-4):3.5731240642399813):0.684157373266409):5.496767777093504,((26:7.920861545038645,((((28:0.6150431969868 [...]
+	TREE  [!(#extinctions 17; generations: 37753)] 'Tree # 18 simulated by Birth/Death Process Trees' = (((6:0.34033646399111767,5:0.34033646399111767):6.454446731151579,((((17:1.7677669529663688E-4,10:1.7677669529663688E-4):5.727152515594094,((1:0.10695272926119685,29:0.10695272926119685):5.165794328735267,2:5.272747057996255):0.4545822342928134):0.4839541706679477,((22:3.56731316968905,(3:0.29906688188536734,15:0.29906688188536734):3.2682462878041227):1.5171875052923711,(16:3.457773647365 [...]
+	TREE  [!(#extinctions 27; generations: 73745)] 'Tree # 19 simulated by Birth/Death Process Trees' = ((26:10.804287205409942,((((8:1.2155467281422982,24:1.2155467281422982):2.7732760312633022,(16:2.9709311117327584,(((17:0.7738110672064692,13:0.7738110672064692):0.15513174675916502,(31:0.4157630949587063,7:0.4157630949587063):0.5131797190069931):0.29556336143726547,(6:1.0515469381109528,1:1.0515469381109528):0.17295923729203636):1.7464249363298754):1.0178916476723319):3.009119404223654,2 [...]
+	TREE  [!(#extinctions 10; generations: 31938)] 'Tree # 20 simulated by Birth/Death Process Trees' = (((25:3.971576165214506,(17:0.6098000255661603,13:0.6098000255661603):3.361776139648522):4.072612617901492,(((15:5.220805263453765,(((19:3.9849002949607613,((2:0.8722972880363862,3:0.8722972880363862):1.550497859310673,(22:1.9943223118532607,(5:1.4888612556135803,1:1.4888612556135803):0.5054610562395608):0.428472835494136):1.5621051476136618):0.26617131030640306,(7:3.774594541068951,(28:0 [...]
+	TREE  [!(#extinctions 10; generations: 36048)] 'Tree # 21 simulated by Birth/Death Process Trees' = ((23:5.842803660258621,13:5.842803660258621):4.1260807339073615,(((((8:1.9562888259496503,22:1.9562888259496503):1.0622141848211368,((19:1.7677669529663688E-4,27:1.7677669529663688E-4):2.2041251552395145,((17:0.35902943573164775,20:0.35902943573164775):1.6271125076816697,24:1.9861419434133212):0.21815998852144117):0.8142010788360523):1.6192744236925674,(29:3.878665313099562,14:3.878665313 [...]
+	TREE  [!(#extinctions 15; generations: 42305)] 'Tree # 22 simulated by Birth/Death Process Trees' = (((((((14:0.6272186739180423,20:0.6272186739180423):1.8124492720052714,(3:0.6431079647290976,(33:0.09723001101988148,28:0.09723001101988148):0.5458779537092442):1.7965599811942108):0.9147487075217913,(21:2.052258821335196,25:2.052258821335195):1.3021578321101044):0.66288921893111,(((18:0.8863349232984842,(26:0.24145603743119276,7:0.24145603743119276):0.644878885867331):0.12560412414523348 [...]
+	TREE  [!(#extinctions 13; generations: 51245)] 'Tree # 23 simulated by Birth/Death Process Trees' = ((((10:0.3463906792691378,26:0.3463906792691378):0.8724199792268181,(16:0.1937374364989211,2:0.1937374364989211):1.0250732219970249):15.378774920166597,((((17:2.3863394104847746,15:2.3863394104847684):1.4690004799787457,4:3.8553398904630756):1.7439210501471756,((3:0.0694760698583085,11:0.0694760698583085):1.5789052607488978,27:1.6483813306072306):3.950879610002555):0.487749382678853,23:6. [...]
+	TREE  [!(#extinctions 12; generations: 37454)] 'Tree # 24 simulated by Birth/Death Process Trees' = ((((((9:3.6437315390883174,14:3.6437315390883174):1.019655167119162,(24:1.945284746543999,(12:1.3694236335096992,17:1.3694236335096992):0.5758611130342317):2.7181019596635947):3.726039931916611,(((3:1.7677669529663688E-4,2:1.7677669529663688E-4):1.1431597528482076,26:1.1433365295435043):3.749177184851094,(19:3.4135196326185024,(5:2.2590086958704534,10:2.2590086958704534):1.154510936748423 [...]
+	TREE  [!(#extinctions 11; generations: 34565)] 'Tree # 25 simulated by Birth/Death Process Trees' = (((7:2.117742918488203,25:2.1177429184881778):1.0532385665853017,((((19:0.7136858073609503,3:0.7136858073609503):0.4127326896308828,27:1.1264184969918998):0.7860786163123459,13:1.9124971133045041):0.1724330687136355,5:2.08493018201818):1.0860513030553047):7.133908307570098,((((16:3.667969684794139,(29:0.46352093354420953,26:0.46352093354420953):3.204448751249904):1.7678469599132092,(((6:2 [...]
+	TREE  [!(#extinctions 30; generations: 64026)] 'Tree # 26 simulated by Birth/Death Process Trees' = ((((7:4.21171145954881,(18:2.7033794865710696,14:2.7033794865710696):1.5083319729778204):11.807795315805699,(((((31:1.7677669529663688E-4,19:1.7677669529663688E-4):1.1568340301796922,4:1.157010806874989):3.6449695968593314,1:4.8019804037350164):4.636272526672271,((((29:3.2725961833293455,(26:0.6733524156477745,10:0.6733524156477745):2.5992437676816875):1.6363924192948258,(16:2.91622959602 [...]
+	TREE  [!(#extinctions 28; generations: 54244)] 'Tree # 27 simulated by Birth/Death Process Trees' = (((((30:1.5307354398574833,(25:0.5008054843260257,14:0.5008054843260257):1.0299299555313222):6.315438576849287,(((2:0.29603737709713235,3:0.29603737709713235):0.11566878338905269,15:0.411706160486182):7.345610851453552,((((33:1.2896052614675875,29:1.2896052614675875):2.6578544282357184,((19:1.7677669529663688E-4,24:1.7677669529663688E-4):2.1252424157084406,7:2.1254191924037373):1.82204049 [...]
+	TREE  [!(#extinctions 21; generations: 51143)] 'Tree # 28 simulated by Birth/Death Process Trees' = ((((2:1.6391170645477398,31:1.6391170645477398):4.09189461817883,(23:4.4468899400637545,3:4.446889940063755):1.284121742662512):4.873815997585321,(((15:1.386121141949624,26:1.3861211419496466):6.780141578391685,(((16:2.4021261253073902,(21:0.23929497631632632,28:0.23929497631632632):2.1628311489909477):1.5475358317435723,(4:0.03217618715071797,(13:1.7677669529663688E-4,29:1.76776695296636 [...]
+	TREE  [!(#extinctions 21; generations: 49879)] 'Tree # 29 simulated by Birth/Death Process Trees' = ((5:8.029242868744372,((26:2.8996337223998014,(16:1.0204882521076606,31:1.0204882521076606):1.8791454702921437):4.882031796076129,(((30:0.4520915741875009,(27:0.10064411047006705,22:0.10064411047006705):0.351447463717439):0.11261461952580547,6:0.5647061937132847):0.8205877445002439,2:1.3852939382136669):6.396371580262341):0.24757735026918984):7.954402209650952,(((28:5.318224453262853,((20 [...]
+	TREE  [!(#extinctions 9; generations: 50259)] 'Tree # 30 simulated by Birth/Death Process Trees' = ((14:3.5628039446154123,((29:2.6420847815507718,21:2.6420847815507718):0.44621357270182277,28:3.088298354252549):0.47450559036266604):14.297303560317129,((1:7.620803829091519,(((18:2.6490929804405217,((25:0.7620003355242505,23:0.7620003355242505):1.709089366000961,(9:1.2807011472600909,31:1.2807011472600909):1.1903885542651487):0.17800327891532708):0.5437146082560399,(17:1.7677669529663688 [...]
+	TREE  [!(#extinctions 14; generations: 42641)] 'Tree # 31 simulated by Birth/Death Process Trees' = (((22:7.792667791897018,(((30:1.4424805338893039,2:1.4424805338893039):2.4970386190726157,(1:0.6518086479136341,28:0.6518086479136341):3.2877105050476927):1.383630914389096,(10:4.967021216337617,((((33:0.12675760642765535,(25:0.03677238122843051,24:0.03677238122843051):0.08998522519922345):0.8594962452293272,32:0.9862538516569647):0.5966273336565217,8:1.5828811853135807):1.185023296476669 [...]
+	TREE  [!(#extinctions 12; generations: 43892)] 'Tree # 32 simulated by Birth/Death Process Trees' = ((((14:6.877335129818884,(29:1.9544743045932376,18:1.9544743045932376):4.9228608252246255):0.6984208574574046,((33:3.272364167045982,(15:0.7033824444452877,4:0.7033824444452877):2.568981722600607):3.9209941048970447,(((2:6.043523170939974,16:6.043523170939974):0.034802159829168684,(19:0.8115811148836267,21:0.8115811148836267):5.26674421588505):0.5551924301419386,(((6:0.03783304140021033,2 [...]
+	TREE  [!(#extinctions 14; generations: 43885)] 'Tree # 33 simulated by Birth/Death Process Trees' = ((25:9.157567737556247,((9:0.8081560963215438,26:0.8081560963215438):4.380577343740367,(14:2.087653089977894,(19:1.950467541063053,18:1.950467541063053):0.1371855489148306):3.1010803500841284):3.9688342974945736):5.667496115338028,(((27:0.995160220466819,8:0.995160220466819):10.570343533146824,((7:1.7162277902545773,11:1.7162277902545773):7.620278171907257,((((12:1.4545920388738018,20:1.4 [...]
+	TREE  [!(#extinctions 12; generations: 43966)] 'Tree # 34 simulated by Birth/Death Process Trees' = (((((7:0.5547056691858626,29:0.5547056691858626):0.22927929702848884,23:0.783984966214293):8.471592587060204,((17:1.2647005218540266,(12:1.7677669529663688E-4,1:1.7677669529663688E-4):1.26452374515873):2.277287665262968,(((28:1.4387398172077581,(19:1.094209598324179,(32:0.8305296615959895,(11:0.6083800922063829,2:0.6083800922063829):0.2221495693896533):0.2636799367280896):0.34453021888345 [...]
+	TREE  [!(#extinctions 26; generations: 77113)] 'Tree # 35 simulated by Birth/Death Process Trees' = (19:23.066639946483367,(((25:1.4680500067424143,4:1.4680500067424143):2.1329989960278337,(14:2.30138426675269,29:2.30138426675269):1.299664736017829):17.45931040838556,((((24:4.3447692137685605,((21:1.2498232673114043,7:1.2498232673114043):2.39378361296218,((28:1.7677669529663688E-4,1:1.7677669529663688E-4):2.6769435128493018,12:2.6771202895445794):0.9664865907292179):0.7011623334962034): [...]
+	TREE  [!(#extinctions 21; generations: 41919)] 'Tree # 36 simulated by Birth/Death Process Trees' = (((((13:7.5208594413547205,((17:5.254585520008496,22:5.254585520008496):0.6974671738764341,((31:1.9181711841582523,20:1.9181711841584388):3.532726263586478,((6:4.001114956806462,(((21:2.238517534887223,9:2.238517534887223):0.1448033835373865,((14:2.0340479599631247,(5:0.5844340068816449,25:0.5844340068816449):1.4496139530813603):0.20446957492389994,26:2.238517534887223):0.1448033835373865 [...]
+	TREE  [!(#extinctions 13; generations: 48317)] 'Tree # 37 simulated by Birth/Death Process Trees' = ((((14:1.7875044335976107,11:1.7875044335976107):1.6701529606875294,(((4:1.727195615108835,10:1.727195615108835):0.13553609396486066,20:1.862731709073775):1.1182162279472707,(32:1.1891480298217514,22:1.1891480298217514):1.7917999071992237):0.4767094572642337):8.07428194883241,((((18:0.48437097371953003,16:0.48437097371953003):0.5269364334957339,7:1.0113074072151214):3.701661492243788,26:4 [...]
+	TREE  [!(#extinctions 2; generations: 27306)] 'Tree # 38 simulated by Birth/Death Process Trees' = (((18:6.977392842130951,((8:2.0959691271738152,5:2.0959691271738152):4.049846364688471,((((27:1.5271157322382314,15:1.5271157322382314):0.5754093211863684,(10:1.1800275168526315,30:1.1800275168526315):0.922497536571784):0.9576809508301204,(24:1.1950452409670513,(33:0.9260715263461589,(3:0.1820828247622702,17:0.1820828247622702):0.7439887015839434):0.268973714620774):1.8651607632875362):0.5 [...]
+	TREE  [!(#extinctions 13; generations: 41942)] 'Tree # 39 simulated by Birth/Death Process Trees' = (((12:1.8798873152686453,(4:0.20151307695952456,29:0.20151307695952456):1.678374238309082):9.196294818960022,(((10:0.951050260998011,27:0.951050260998011):5.674224618346089,19:6.6252748793437535):2.7886914658542423,(((13:1.7677669529663688E-4,31:1.7677669529663688E-4):1.3679774773428186,16:1.3681542540381153):7.216890190383609,((6:0.21902615484112786,24:0.21902615484112786):4.802022085655 [...]
+	TREE  [!(#extinctions 13; generations: 39488)] 'Tree # 40 simulated by Birth/Death Process Trees' = (((((4:2.129798566524287,16:2.129798566524287):1.3981429620271764,8:3.527941528551016):0.7142331883792321,15:4.242174716929527):5.0703878564129425,(((18:4.889015028512835,(((12:0.3605336930539178,26:0.3605336930539178):2.0723936194087016,23:2.4329273124626747):1.4070594420941027,(3:2.1289944423790543,(17:0.20552508812762,19:0.20552508812762):1.9234693542513424):1.7109923121776354):1.04902 [...]
+	TREE  [!(#extinctions 9; generations: 39875)] 'Tree # 41 simulated by Birth/Death Process Trees' = (((17:1.6139787539520158,(26:1.3422764090784909,27:1.3422764090784909):0.27170234487354655):9.264836843298857,(((6:6.07484790188883,3:6.07484790188883):1.1707063979562153,(13:6.986375253457993,29:6.986375253458285):0.25917904638729):3.0491495826087274,((((28:2.169911499833049,12:2.16991149983327):0.15279372573094574,23:2.32270522556423):4.327649807836855,((25:3.458791983946934,21:3.4587919 [...]
+	TREE  [!(#extinctions 20; generations: 51998)] 'Tree # 42 simulated by Birth/Death Process Trees' = (((((9:2.36648880817362,(23:1.8231470616023506,31:1.8231470616023506):0.5433417465711161):0.14293228872788416,((27:0.4106550917808298,18:0.4106550917808298):1.8840501253787867,26:2.2947052171596476):0.21471587974175224):4.737020552864423,((((19:1.619039146827701,15:1.619039146827701):0.5569603001779644,1:2.1759994470058297):0.6029991917391111,(14:0.8681976520611947,(8:0.43831727717676244, [...]
+	TREE  [!(#extinctions 30; generations: 50917)] 'Tree # 43 simulated by Birth/Death Process Trees' = ((19:2.9856417337823546,30:2.985641733782273):12.761428903353094,(((((16:1.451046645974137,10:1.451046645974137):0.7084610145202556,14:2.1595076604944725):1.995499463913116,31:4.15500712440769):4.179599309329514,(26:3.5716514325478177,((8:1.5860345260635236,(12:0.36662341324226083,(18:0.25942528954563704,((29:0.03876798007491414,20:0.03876798007491414):0.19992185464173126,4:0.238689834716 [...]
+	TREE  [!(#extinctions 10; generations: 32793)] 'Tree # 44 simulated by Birth/Death Process Trees' = (((4:6.848502573107421,((10:1.7730962231408118,11:1.7730962231408118):1.7815754777567403,(26:0.8475341503390276,3:0.8475341503390276):2.7071375505579467):3.293830872210629):0.05760197426044073,(12:2.0630732087529906,(22:0.16289666131937144,25:0.16289666131937144):1.9001765474335688):4.843031338615445):2.962491042235179,((((1:5.58046697665143,(((16:0.22313720545022697,33:0.2231372054502269 [...]
+	TREE  [!(#extinctions 9; generations: 33638)] 'Tree # 45 simulated by Birth/Death Process Trees' = ((((((20:4.760057433573059,(((25:0.5197620985019906,16:0.5197620985019906):0.4750214013465522,12:0.9947834998485019):0.24018130058647572,3:1.2349648004351064):3.5250926331382453):1.0632005888711196,(30:2.326251668546265,28:2.326251668546265):3.4970063538988083):1.2990941775502043,(((15:0.3512944545534471,4:0.3512944545534471):1.729136698359533,2:2.0804311529130675):1.567810377160519,(((((2 [...]
+	TREE  [!(#extinctions 6; generations: 40324)] 'Tree # 46 simulated by Birth/Death Process Trees' = ((24:7.650340763978949,25:7.650340763978932):5.286117682623102,(((((30:1.463245119090403,13:1.463245119090403):0.6779159910207471,16:2.141161110111263):4.201548096338741,(7:0.17156970734364432,27:0.17156970734364432):6.171139499105474):2.7484624124579686,((((17:2.144521021013634,(((2:0.2777328673253763,22:0.2777328673253763):1.6715297720445685,8:1.9492626393699732):0.07427363899658689,(19: [...]
+	TREE  [!(#extinctions 11; generations: 41791)] 'Tree # 47 simulated by Birth/Death Process Trees' = ((((((32:0.847154085111813,(31:0.25431969517755953,(11:0.11811931431304425,12:0.11811931431304425):0.13620038086451516):0.592834389934285):0.8355989075808821,10:1.6827529926928841):0.6427091091759284,(20:1.945393651073109,(5:1.6639808652250698,(15:0.010962983715121617,19:0.010962983715121617):1.6530178815099443):0.2814127858479863):0.3800684507957274):0.6875889495435075,((33:1.76776695296 [...]
+	TREE  [!(#extinctions 18; generations: 53604)] 'Tree # 48 simulated by Birth/Death Process Trees' = (((((22:0.7581663436193843,(11:0.3631291866340555,4:0.3631291866340555):0.3950371569853796):4.832380278528621,(30:1.3788888691072223,(10:0.09405106674037268,24:0.09405106674037268):1.2848378023668043):4.211657753040931):0.3901231980444128,((19:2.179782206976461,26:2.179782206976461):0.34323981706382056,(((29:1.0677551289991758,3:1.0677551289991758):0.6135099638758239,7:1.6812650928752413) [...]
+	TREE  [!(#extinctions 18; generations: 58928)] 'Tree # 49 simulated by Birth/Death Process Trees' = ((((((18:1.1148768217476226,3:1.1148768217476226):5.114199857132168,20:6.229076678879548):0.39703777649308464,(((6:0.996082782013471,12:0.996082782013471):0.7888055433900931,13:1.7848883254039194):4.807710804846032,22:6.592599130248867):0.03351532512307292):2.8397728908561164,((31:1.4070550153413384,((16:0.604602475159741,32:0.604602475159741):0.28636758681548513,14:0.8909700619751831):0. [...]
+	TREE  [!(#extinctions 24; generations: 59012)] 'Tree # 50 simulated by Birth/Death Process Trees' = ((((13:6.233354620268357,(((((4:1.2318130162044154,18:1.2318130162044154):1.1846546960747826,22:2.416467712279285):0.26701034684784325,24:2.683478059127066):0.9312988684976024,(3:0.6211753904027356,2:0.6211753904027356):2.9936015372217963):0.2333537762002076,(8:3.1958512294105432,6:3.1958512294105432):0.6522794744141336):2.3852239164433566):8.173144247625679,((19:3.8803889027142087,10:3.8 [...]
+	TREE  [!(#extinctions 18; generations: 50156)] 'Tree # 51 simulated by Birth/Death Process Trees' = ((((4:1.7147551473128042,(28:0.7795109118916121,18:0.7795109118916121):0.9352442354209295):3.3035286441570255,((((25:2.569503686956176,((((30:0.9001807841976005,(6:0.7277263112361296,22:0.7277263112361296):0.17245447296153155):0.46559648338541926,(31:1.7677669529663688E-4,12:1.7677669529663688E-4):1.3656004908879347):0.17120519240743556,15:1.5369824599906459):0.743743465842021,11:2.280725 [...]
+	TREE  [!(#extinctions 13; generations: 43083)] 'Tree # 52 simulated by Birth/Death Process Trees' = ((((((19:0.8958122642536875,11:0.8958122642536875):1.1616485659376996,(15:0.6743678166917269,18:0.6743678166917269):1.3830930134997175):0.041248570162116686,20:2.0987094003538482):5.1636028165682015,((((17:0.7120917003241756,6:0.7120917003241756):2.779953358417475,(8:2.5669020733368764,(29:2.515965493891058,22:2.5159654938911187):0.050936579445789956):0.9251429854053496):1.836957058444147 [...]
+	TREE  [!(#extinctions 9; generations: 31427)] 'Tree # 53 simulated by Birth/Death Process Trees' = ((((27:4.690971527365414,(((23:2.3111369319010944,13:2.3111369319010944):2.343370081115963,(((11:0.7361855690285691,1:0.7361855690285691):0.03880754755060148,3:0.7749931165791765):2.3942453180411905,(9:3.0212302357304406,(21:0.39565295202158657,10:0.39565295202158657):2.625577283708956):0.14800819888974728):1.4852685783961228):0.004444294291552228,4:4.658951307308382):0.03202022005726):1.0 [...]
+	TREE  [!(#extinctions 12; generations: 33505)] 'Tree # 54 simulated by Birth/Death Process Trees' = ((((14:3.165670830386004,19:3.1656708303860226):0.643715593995356,(32:1.0249354909341835,(11:0.38740619393613973,4:0.38740619393613973):0.6375292969981302):2.7844509334467733):3.727815709273097,((((29:2.2819279544545084,(26:0.409142618987396,9:0.409142618987396):1.8727853354670885):3.79854592520511,(5:4.207649202381487,(12:1.0547577384466624,24:1.0547577384466624):3.1528914639347483):1.87 [...]
+	TREE  [!(#extinctions 13; generations: 33492)] 'Tree # 55 simulated by Birth/Death Process Trees' = (((18:0.0401311384390666,(16:1.7677669529663688E-4,27:1.7677669529663688E-4):0.03995436174376996):3.5454213410524655,8:3.5855524794915463):4.0678066800197215,((((30:4.010363880733022,(1:0.558876734307254,13:0.558876734307254):3.4514871464258046):0.9080714492189835,(28:1.4385004283179794,33:1.4385004283179794):3.4799349016342815):1.011555700434367,(2:1.0943183129225582,24:1.094318312922558 [...]
+	TREE  [!(#extinctions 9; generations: 22236)] 'Tree # 56 simulated by Birth/Death Process Trees' = ((((((3:1.0630401147384494,22:1.0630401147384494):0.35436796433081663,(14:1.149634337165003,25:1.149634337165003):0.2677737419043211):1.8652550346681391,(24:2.606441118535266,(11:2.281976487212837,29:2.2819764872128445):0.3244646313225766):0.676221995202445):0.6122203006590855,28:3.8948834143960536):0.8240758303786127,((6:1.3152039549241867,7:1.3152039549241867):1.0249409324335272,(((5:0.1 [...]
+	TREE  [!(#extinctions 22; generations: 53705)] 'Tree # 57 simulated by Birth/Death Process Trees' = (((((((7:0.8876202231283684,27:0.8876202231283684):1.4937140403423335,14:2.3813342634711403):0.7861390104574396,(13:0.03765626470491369,15:0.03765626470491369):3.129817009223471):0.5681424596814172,18:3.7356157336096976):9.102413620736643,(((26:1.7677669529663688E-4,23:1.7677669529663688E-4):5.015988750712104,((1:1.2836470769602066,11:1.2836470769602066):0.07299165412110993,17:1.356638731 [...]
+	TREE  [!(#extinctions 14; generations: 37667)] 'Tree # 58 simulated by Birth/Death Process Trees' = ((((((22:1.940997435355183,31:1.940997435355183):0.6425328506881927,33:2.5835302860434792):1.4361851995762858,((9:1.7677669529663688E-4,27:1.7677669529663688E-4):2.8644407191842687,((1:1.251268154187798,4:1.251268154187798):0.6531866689018867,(30:1.666613807131357,29:1.666613807131357):0.23784101595841056):0.9601626727895767):1.155097989740251):4.4183376920154185,((15:6.262455890852491,(1 [...]
+	TREE  [!(#extinctions 17; generations: 35756)] 'Tree # 59 simulated by Birth/Death Process Trees' = ((((((((1:0.10457138472756493,3:0.10457138472756493):2.5673830061670024,(15:2.3562844917782413,((17:1.5730452337142196,31:1.5730452337142196):0.7679586168397671,32:2.3410038505541952):0.015280641224033447):0.3156698991165454):1.3085671297250243,(16:3.2351637582458603,(33:1.3999457776675819,(((28:1.7677669529663688E-4,23:1.7677669529663688E-4):0.7087649154126563,18:0.7089416921079529):0.49 [...]
+	TREE  [!(#extinctions 14; generations: 34101)] 'Tree # 60 simulated by Birth/Death Process Trees' = (((23:1.002948054869932,13:1.002948054869932):0.9290386556685784,5:1.9319867105388955):6.524284806386005,(((9:6.445130592730466,((16:4.0904536396041165,(12:3.9349564913911084,19:3.9349564913909956):0.15549714821319427):2.0511938248270023,18:6.141647464432944):0.30348312829799523):0.19280184606498146,((27:4.842152138481693,((10:2.268217767544722,(29:2.1525568968121522,(30:0.815190460725847 [...]
+	TREE  [!(#extinctions 15; generations: 70018)] 'Tree # 61 simulated by Birth/Death Process Trees' = ((((((20:5.447280681817901,(25:4.927479583859653,12:4.927479583859204):0.5198010979590716):0.2825135701664469,((((7:1.3032521538980621,31:1.3032521538980621):2.18767295360001,14:3.490925107497957):0.9994127429484463,(17:1.5882960442164493,5:1.5882960442164493):2.902041806229644):0.5934182292696031,29:5.083756079714603):0.6460381722697931):6.064729679773402,(((24:0.6585178456729361,18:0.65 [...]
+	TREE  [!(#extinctions 14; generations: 38248)] 'Tree # 62 simulated by Birth/Death Process Trees' = ((((13:0.9922632235409579,(14:0.3450881844472765,4:0.3450881844472765):0.6471750390937562):8.58834731002244,((26:2.1385365619987526,(25:0.948780390337735,(32:0.9283762661925048,(3:0.8510532199412449,23:0.8510532199412449):0.07732304625128636):0.02040412414523192):1.1897561716606537):1.1180553455724964,1:3.256591907570862):6.324018625992899):0.20797585342026062,(((((12:0.9001316588677524,( [...]
+	TREE  [!(#extinctions 11; generations: 45390)] 'Tree # 63 simulated by Birth/Death Process Trees' = (((20:7.320281455613137,(26:1.6685741970030155,(8:0.006720343028002341,4:0.006720343028002341):1.6618538539750107):5.651707258610054):0.25212060116777435,((13:0.38165933252377426,27:0.38165933252377426):2.4648327348061736,(18:0.3725847173183274,33:0.3725847173183274):2.473907350011622):4.725909989451055):8.090994101289576,(((((11:0.5346904233245635,9:0.5346904233245635):1.6555714659314322 [...]
+	TREE  [!(#extinctions 28; generations: 55583)] 'Tree # 64 simulated by Birth/Death Process Trees' = ((((((((24:1.0045604650269453,14:1.0045604650269453):0.37576256797673063,23:1.380323033003896):5.452010919562992,11:6.83233395256642):2.8590920898576546,7:9.691426042422238):0.883143550835467,4:10.574569593257173):0.39815786282202703,28:10.972727456079866):1.7170712800902166,(((5:0.4933799794746637,(26:0.47362042736791143,20:0.47362042736791143):0.0197595521067535):1.7461207318798602,19:2 [...]
+	TREE  [!(#extinctions 36; generations: 67191)] 'Tree # 65 simulated by Birth/Death Process Trees' = (((33:14.69473678595128,14:14.694736785952095):2.345900193401182,(((2:1.9303542379956513,(3:0.5255447049253014,24:0.5255447049253014):1.404809533070095):6.997246790228058,(((29:1.7677669529663688E-4,4:1.7677669529663688E-4):2.6113213943125184,16:2.611498171007562):1.9002976267786635,((8:0.6422246755150349,26:0.6422246755150349):0.24572765549212958,9:0.8879523310071415):3.6238434667782435) [...]
+	TREE  [!(#extinctions 15; generations: 44808)] 'Tree # 66 simulated by Birth/Death Process Trees' = (((((27:0.8944824390414395,11:0.8944824390414395):1.468514361869504,13:2.3629968009111137):0.25210978575330467,12:2.615106586664311):4.1369657435518565,((22:0.8035747767918349,(9:1.7677669529663688E-4,16:1.7677669529663688E-4):0.8033980000965383):3.1165044777885167,((8:0.8103789009370657,4:0.8103789009370657):1.3630305463399868,(24:0.03765626470491369,6:0.03765626470491369):2.135753182572 [...]
+	TREE  [!(#extinctions 8; generations: 29973)] 'Tree # 67 simulated by Birth/Death Process Trees' = ((((18:2.7379588066096257,28:2.7379588066096257):1.4306506485733639,23:4.168609455181574):1.7774455927759083,(((6:3.4910713255619275,11:3.4910713255619275):0.12148798950918216,((14:1.1415194614940354,(1:0.5323962008266141,4:0.5323962008266141):0.6091232606674607):1.586818443876344,(31:1.1280428776398983,((33:1.7677669529663688E-4,16:1.7677669529663688E-4):0.10765983604238342,27:0.107836612 [...]
+	TREE  [!(#extinctions 16; generations: 44412)] 'Tree # 68 simulated by Birth/Death Process Trees' = (((((4:1.5058729388498517,27:1.5058729388498517):1.8270900399294836,7:3.3329629787791033):2.7823861875887084,(28:5.896718879567317,33:5.896718879567317):0.21863028679871324):3.1847271832596227,(12:2.3626699448935375,((20:0.11110223355532245,25:0.11110223355532245):0.5011218495768185,31:0.6122240831321212):1.7504458617611514):6.9374064047330215):4.350160341512592,(((3:7.178191372424314,((9 [...]
+	TREE  [!(#extinctions 8; generations: 36050)] 'Tree # 69 simulated by Birth/Death Process Trees' = (((((30:0.6919626928996491,11:0.6919626928996491):0.7665078185781724,26:1.4584705114780598):4.447361074971575,((16:0.63878502153585,22:0.63878502153585):5.050227757372516,((((25:1.7677669529663688E-4,23:1.7677669529663688E-4):1.1491563067683432,31:1.14933308346364):2.6081205119350264,(32:2.7764914230673505,9:2.7764914230673505):0.980962172332072):0.10223712919178576,4:3.8596907245906387):1 [...]
+	TREE  [!(#extinctions 10; generations: 28848)] 'Tree # 70 simulated by Birth/Death Process Trees' = ((((30:1.514963018227822,(28:0.6812237125471509,29:0.6812237125471509):0.8337393056804666):0.6769399141321712,2:2.191902932360214):3.6060367171841325,((12:2.158805706991859,(6:2.115156740037149,20:2.1151567400371563):0.04364896695471264):0.5901869792300937,27:2.7489926862218903):3.0489469633224036):0.31778383797157495,((((21:2.95451982158212,((5:0.837778218574587,(16:0.1139518191038592,10 [...]
+	TREE  [!(#extinctions 12; generations: 38686)] 'Tree # 71 simulated by Birth/Death Process Trees' = (((15:1.3408827624533375,32:1.3408827624533375):3.220058444461311,((1:0.31706706342840213,9:0.31706706342840213):0.9746016110049996,(19:0.7091230254415588,24:0.7091230254415588):0.5825456489919021):3.269272532481047):9.174932526821378,(((11:8.601142048851308,(((10:2.9952291211760933,(23:2.724025916815616,(21:1.5767822621787497,4:1.5767822621787497):1.147243654636976):0.27120320436020057): [...]
+	TREE  [!(#extinctions 12; generations: 25999)] 'Tree # 72 simulated by Birth/Death Process Trees' = (((12:3.248866910975431,(14:2.4810906212503157,(24:1.0650057469948506,27:1.0650057469948506):1.4160848742555088):0.7677762897259276):3.4250884514177065,(((29:2.017487155026738,((22:0.9100947595498557,(31:0.3800865352623119,33:0.3800865352623119):0.5300082242876045):0.014712594000225596,20:0.9248073535500801):1.092679801476397):2.220485378888183,((9:0.02881742994008188,3:0.0288174299400818 [...]
+	TREE  [!(#extinctions 4; generations: 17825)] 'Tree # 73 simulated by Birth/Death Process Trees' = ((((15:0.6445342554199471,16:0.6445342554199471):2.672285885467167,((((11:0.05197517702394123,1:0.05197517702394123):1.757378326312716,(24:1.3079353790190642,(2:0.6326442475163933,(10:0.41510089621135743,(30:1.7677669529663688E-4,19:1.7677669529663688E-4):0.4149241195160608):0.2175433513050558):0.6752911315025287):0.5014181243175923):0.7348957718328561,27:2.544249275169136):0.1019151587219 [...]
+	TREE  [!(#extinctions 16; generations: 56814)] 'Tree # 74 simulated by Birth/Death Process Trees' = ((((((8:1.1736254074902688,(33:0.08803762286445603,18:0.08803762286445603):1.0855877846257826):1.667404712834956,((3:1.3519642529377336,(31:0.2994958273870504,21:0.2994958273870504):1.0524684255505758):0.9183394625269508,(6:1.926126042199129,(26:0.7485065593369092,27:0.7485065593369092):1.1776194828620312):0.3441776732656609):0.5707264048607028):2.76489194378324,(17:1.8581779938017475,(4: [...]
+	TREE  [!(#extinctions 15; generations: 31951)] 'Tree # 75 simulated by Birth/Death Process Trees' = ((((3:0.22186917061205577,18:0.22186917061205577):4.979531116957687,6:5.20140028756903):3.3854645025189045,(((7:2.08121887042614,(9:1.7323745497735117,(20:1.458699246816424,(((28:1.7677669529663688E-4,16:1.7677669529663688E-4):0.6109725908876399,24:0.6111493675829365):0.386023200742652,2:0.9971725683256178):0.4615266784906784):0.2736753029569745):0.34884432065255433):1.6593196323517696,4: [...]
+	TREE  [!(#extinctions 11; generations: 37388)] 'Tree # 76 simulated by Birth/Death Process Trees' = (((25:4.516860827260885,(22:1.457531033185879,5:1.457531033185879):3.059329794076498):3.048773751442323,11:7.565634578704347):3.1594041861301427,(((20:6.929630491430732,((1:5.24220740494871,((12:4.333738965984056,(29:4.245108082710985,(31:3.8697844103379655,(21:0.7312825305295647,16:0.7312825305295647):3.138501879808165):0.3753236723737573):0.0886308832733545):0.2359947406578857,(8:1.3465 [...]
+	TREE  [!(#extinctions 13; generations: 38859)] 'Tree # 77 simulated by Birth/Death Process Trees' = ((19:4.556016069145065,((27:2.26829815722068,28:2.26829815722068):1.1774887145662472,((3:1.6878784299765328,(5:1.5668743058312309,((11:0.25061752774139834,22:0.25061752774139834):0.46817335559791173,12:0.7187908833392737):0.8480834224918081):0.12100412414523334):0.2296995316653222,20:1.9175779616418716):1.5282089101450154):1.1102291973587117):6.611165315346212,((25:0.5225664339247557,18:0 [...]
+	TREE  [!(#extinctions 8; generations: 37799)] 'Tree # 78 simulated by Birth/Death Process Trees' = ((((12:0.49505329334724263,19:0.49505329334724263):7.030664065797412,(((29:5.574985089999875,((((23:1.1661244930509413,(13:0.8993945979150498,(28:0.4755846551340418,15:0.4755846551340418):0.42380994278109324):0.26672989513576173):1.546622633736797,9:2.712747126787436):1.7371927526442197,(27:2.469604116820273,((((30:0.14279056462245088,7:0.14279056462245088):0.799361234617609,6:0.9421517992 [...]
+	TREE  [!(#extinctions 8; generations: 22707)] 'Tree # 79 simulated by Birth/Death Process Trees' = (((4:2.9813585225511643,(27:1.9651115327514368,(3:0.23835149878197717,15:0.23835149878197717):1.7267600339694285):1.0162469898002726):2.1824526610597017,((2:1.572177949499127,18:1.572177949499127):3.136526674092807,((30:4.13775660631745,17:4.13775660631792):0.09928162597000031,((20:0.0977603411057714,8:0.0977603411057714):2.195582892114458,(((1:1.7677669529663688E-4,10:1.7677669529663688E- [...]
+	TREE  [!(#extinctions 11; generations: 46637)] 'Tree # 80 simulated by Birth/Death Process Trees' = ((((((7:0.5241455797024402,15:0.5241455797024402):1.0998733332736106,22:1.624018912976275):2.9525164221635785,(((((28:0.4326492656550542,14:0.4326492656550542):0.29576267848407795,30:0.7284119441390895):1.0451029418560895,4:1.7735148859954621):1.2833929041132897,26:3.0569077901085078):0.7512942646674995,(17:3.437767884629166,(19:3.0229453382284457,((((1:1.7677669529663688E-4,32:1.76776695 [...]
+	TREE  [!(#extinctions 6; generations: 36411)] 'Tree # 81 simulated by Birth/Death Process Trees' = ((4:7.4692688790840025,((24:5.891405685572553,(((8:1.7677669529663688E-4,11:1.7677669529663688E-4):0.10978115638594314,20:0.10995793308123979):1.732286425704661,(2:0.356034202634065,27:0.356034202634065):1.486210156151819):4.049161326787823):0.5979654193660034,((21:0.4895887631439498,18:0.4895887631439498):5.785821272286627,(29:2.7586815248823924,((32:0.13851814498730633,28:0.1385181449873 [...]
+	TREE  [!(#extinctions 14; generations: 58070)] 'Tree # 82 simulated by Birth/Death Process Trees' = (((((9:6.36626184208983,((((10:0.7136216418043332,14:0.7136216418043332):2.114946663991763,21:2.8285683057959368):1.4872606304095222,((8:0.9166732187873329,(2:0.22014801954345053,22:0.22014801954345053):0.6965251992439283):3.3034537663330727,(1:1.7963430071671351,((28:1.7677669529663688E-4,26:1.7677669529663688E-4):1.1161895646146847,((23:0.38559318298175904,24:0.38559318298175904):0.0143 [...]
+	TREE  [!(#extinctions 6; generations: 26781)] 'Tree # 83 simulated by Birth/Death Process Trees' = (((((24:1.7913809008170034,32:1.7913809008170034):2.09051267719139,(((23:0.5156632449548593,19:0.5156632449548593):0.24500970241753456,9:0.76067294737235):1.0799072578856141,8:1.8405802052581244):2.0413133727502895):0.9805672104060268,3:4.862460788413861):2.0994356340037266,((((26:1.219301167295142,18:1.219301167295142):0.7181507646799914,22:1.9374519319752845):1.3350354159582207,21:3.2724 [...]
+	TREE  [!(#extinctions 8; generations: 30700)] 'Tree # 84 simulated by Birth/Death Process Trees' = ((7:5.918438544901883,(8:2.4780189954195193,6:2.4780189954195193):3.440419549482):2.1959724692838654,((((20:1.228832526162368,16:1.228832526162368):3.4032872065202975,((5:0.3942232590018981,27:0.3942232590018981):2.8598736974998427,22:3.2540969565017996):1.3780227761812869):0.40896748426285107,17:5.041087216946066):3.0175836818692643,(((((26:2.1165676850779445,1:2.1165676850779445):0.96694 [...]
+	TREE  [!(#extinctions 4; generations: 23855)] 'Tree # 85 simulated by Birth/Death Process Trees' = ((((12:0.14398837854463195,22:0.14398837854463195):3.4003151307419768,(1:3.3630804747740917,32:3.3630804747740584):0.18122303451267366):1.5366374789338937,((17:0.315171053571064,25:0.315171053571064):3.4100779470820197,((14:1.711958413127982,(30:1.7677669529663688E-4,13:1.7677669529663688E-4):1.7117816364326852):2.006603700801184,(5:1.6132662095020127,((10:0.32287272320566757,7:0.322872723 [...]
+	TREE  [!(#extinctions 3; generations: 23064)] 'Tree # 86 simulated by Birth/Death Process Trees' = ((2:1.4434372768042711,10:1.4434372768042711):5.934532306520959,((((19:0.2618369839244492,20:0.2618369839244492):1.143969825998431,(13:0.5694245895779843,16:0.5694245895779843):0.8363822203448283):4.290367597991678,((((31:1.2550819688061319,7:1.2550819688061319):1.136551435978989,(18:2.27055397334273,(9:0.49779685408953134,33:0.49779685408953134):1.7727571192531093):0.1210794314425288):1.7 [...]
+	TREE  [!(#extinctions 14; generations: 47232)] 'Tree # 87 simulated by Birth/Death Process Trees' = (((((((18:0.5747371908338588,(10:0.26834985206703205,6:0.26834985206703205):0.30638733876684776):0.8340372271743856,4:1.4087744180084145):1.9300598527860557,24:3.338834270794699):1.538928437937347,(27:1.9523308056236766,22:1.9523308056236766):2.925431903108085):1.8079568701397695,(((25:4.88553983651316,(11:4.508933571542295,31:4.508933571542295):0.37660626497211436):0.30138967672696815,(( [...]
+	TREE  [!(#extinctions 8; generations: 38954)] 'Tree # 88 simulated by Birth/Death Process Trees' = (((8:3.6667515874035703,((((10:0.3737798448301131,11:0.3737798448301131):0.7484240010000772,(31:0.07990589488081044,12:0.07990589488081044):1.0422979509493882):0.6155336742762547,((25:0.224696884203602,4:0.224696884203602):0.3122870847717398,18:0.5369839689753251):1.2007535511312224):1.0947081473961284,2:2.8324456675026095):0.8343059199011567):0.7233474557353012,6:4.390099043138238):5.0896 [...]
+	TREE  [!(#extinctions 9; generations: 29359)] 'Tree # 89 simulated by Birth/Death Process Trees' = ((((((20:3.354687891620105,(13:3.2140600668408865,25:3.214060066840682):0.1406278247795771):1.9385099963892145,(4:3.8154262426438823,(7:0.11785439233550046,(19:0.045964769383855596,27:0.045964769383855596):0.0718896229516431):3.6975718503080213):1.4777716453660261):0.14668049934503424,16:5.4398783873548):0.7327974384335483,(((15:0.05333155565076163,5:0.05333155565076163):0.3002511648988579 [...]
+	TREE  [!(#extinctions 5; generations: 23583)] 'Tree # 90 simulated by Birth/Death Process Trees' = (((((11:0.5657903353541026,12:0.5657903353541026):1.3033773392673773,30:1.8691676746216306):2.813594568679111,((20:1.7677669529663688E-4,3:1.7677669529663688E-4):4.622836796051525,2:4.623013572746721):0.05974867055291277):0.43046701095941575,((28:0.9339970385844175,(22:0.3590547163739635,18:0.3590547163739635):0.5749423222105533):2.6424444647538725,((21:1.154667550431801,((10:0.61463149619 [...]
+	TREE  [!(#extinctions 12; generations: 35053)] 'Tree # 91 simulated by Birth/Death Process Trees' = (((((((17:2.5233198284300298,((31:0.061780065137508375,5:0.061780065137508375):1.256586581715877,9:1.318366646853408):1.2049531815765848):0.5298409060494728,(30:0.7000403593460761,20:0.7000403593460761):2.353120375133566):2.442659323259341,19:5.495820057738806):1.41093182481202,(((((2:1.4503873453510099,1:1.4503873453510099):0.6730840710591532,7:2.1234714164104536):0.10050863387780798,33: [...]
+	TREE  [!(#extinctions 19; generations: 41325)] 'Tree # 92 simulated by Birth/Death Process Trees' = (((((27:0.020155371870546702,11:0.020155371870546702):0.4418391534223084,24:0.46199452529285584):5.052179500238734,(((19:1.7219079078969663,20:1.7219079078969663):0.22458465568737593,7:1.9464925635843704):0.7830028604444621,16:2.7294954240285936):2.784678601503258):7.4388994300396964,(((33:0.21428674719032612,21:0.21428674719032612):4.909499877715497,((((13:1.6264036199564853,(18:1.177297 [...]
+	TREE  [!(#extinctions 7; generations: 47436)] 'Tree # 93 simulated by Birth/Death Process Trees' = ((33:11.455815786245974,(((15:3.251667729659119,((9:0.21019555354593608,17:0.21019555354593608):0.6785273905117059,5:0.8887229440575899):2.362944785601727):4.804484028991324,((((31:0.7014180598893014,25:0.7014180598893014):3.495962246912657,16:4.1973803068013495):0.9353862663053172,(12:1.0536104571181057,(2:0.5757444852503252,29:0.5757444852503252):0.47786597186781066):4.079156115989782):2 [...]
+	TREE  [!(#extinctions 13; generations: 39096)] 'Tree # 94 simulated by Birth/Death Process Trees' = (((30:3.0954420406550693,(3:2.3840988487790042,(4:1.2498572161996637,28:1.2498572161996637):1.1342416325794495):0.7113431918764666):0.15609234590571758,(10:2.87671966435324,6:2.8767196643532627):0.3748147222077061):8.795086751943105,(((5:5.00019198636404,(((25:1.7677669529663688E-4,24:1.7677669529663688E-4):0.04048469182965987,32:0.04066146852495651):0.1181374286920557,20:0.15879889721701 [...]
+	TREE  [!(#extinctions 17; generations: 38967)] 'Tree # 95 simulated by Birth/Death Process Trees' = ((((17:4.848101897471828,(8:4.294041633613843,((28:0.36318032251851273,23:0.36318032251851273):1.7537161893443125,(2:2.0927041535300672,18:2.0927041535300672):0.02419235833291916):2.1771451217523103):0.5540602638579621):0.5264535240542337,(((19:0.09457836059043183,15:0.09457836059043183):0.6301773089594541,33:0.7247556695498587):2.153789259218499,(21:1.985738711159651,1:1.985738711159651) [...]
+	TREE  [!(#extinctions 20; generations: 48248)] 'Tree # 96 simulated by Birth/Death Process Trees' = (((33:8.223810940088704,(((22:0.402844212030192,(32:0.16951082305550352,15:0.16951082305550352):0.23333338897469144):2.417414976554907,((29:0.7556754308394148,2:0.7556754308394148):0.6696983229372415,16:1.4253737537769335):1.3948854348084234):2.9055820155557335,6:5.725841204140467):2.4979697359478883):2.883674166878249,(23:1.9100784935536725,7:1.9100784935536725):9.197406613414017):0.9858 [...]
+	TREE  [!(#extinctions 9; generations: 33233)] 'Tree # 97 simulated by Birth/Death Process Trees' = ((31:7.768772205311336,(((14:0.7695613793288817,(2:0.1022096552626176,12:0.1022096552626176):0.6673517240662817):1.4339759858683987,(7:0.971444549232117,32:0.971444549232117):1.2320928159651598):4.861155039962269,(((25:0.2806719324181473,27:0.2806719324181473):0.8855297658455332,23:1.1662016982637424):4.869093883103384,(18:3.783099871164788,(24:1.4697451311513898,9:1.4697451311513898):2.31 [...]
+	TREE  [!(#extinctions 24; generations: 71594)] 'Tree # 98 simulated by Birth/Death Process Trees' = (((26:5.1424318677462795,17:5.142431867745805):7.071640399098503,(((((4:0.13045998322931882,28:0.13045998322931882):3.5021476217466954,(6:1.3429864317256808,24:1.3429864317256808):2.2896211732506044):1.389900881607633,(9:2.967170587781851,16:2.967170587781851):2.055337898802229):0.7432359931050039,((23:1.6784989343332584,22:1.6784989343332584):0.5485394679487051,((3:0.07477937071720779,30 [...]
+	TREE  [!(#extinctions 13; generations: 36103)] 'Tree # 99 simulated by Birth/Death Process Trees' = ((((4:3.973472494642045,((17:1.8741792999063613,(29:0.6728905250850542,5:0.6728905250850542):1.2012887748210528):1.5724571059218342,(12:2.4435312228709827,32:2.4435312228709827):1.003105182957402):0.5268360888142491):2.49938809411979,(10:5.085681458086046,((26:2.1371230315240606,(((19:0.3421987234233916,(30:0.13612088398514297,1:0.13612088398514297):0.20607783943824623):0.0718434904069474 [...]
+	TREE  [!(#extinctions 26; generations: 52712)] 'Tree # 100 simulated by Birth/Death Process Trees' = (((((3:0.6148616880550342,(33:0.3394579366180219,(8:0.07401442900046852,(24:1.7677669529663688E-4,14:1.7677669529663688E-4):0.07383765230517188):0.2654435076175517):0.27540375143704915):0.020403375556775093,7:0.6352650636118022):9.273953780168553,((30:0.5322516612621754,(16:0.37889708191073673,27:0.37889708191073673):0.15335457935145208):8.38004443536753,(17:4.948912681485822,(31:1.77913 [...]
+
+END;
+
+
+
diff --git a/doc/source/examples/pythonidae_combined.nex b/doc/source/examples/pythonidae_combined.nex
new file mode 100644
index 0000000..22f23b9
--- /dev/null
+++ b/doc/source/examples/pythonidae_combined.nex
@@ -0,0 +1,494 @@
+#NEXUS
+
+BEGIN TAXA;
+    TITLE 4300387408;
+    DIMENSIONS NTAX=33;
+    TAXLABELS
+        Candoia_aspera
+        Antaresia_melanocephalus
+        Python_reticulatus
+        Morelia_carinata
+        Python_sebae
+        Morelia_boeleni
+        Antaresia_perthensis
+        Liasis_olivaceus
+        Morelia_amethistina
+        Python_curtus
+        Bothrochilus_boa
+        Morelia_nauta
+        Morelia_kinghorni
+        Python_regius
+        Python_molurus
+        Antaresia_stimsoni
+        Morelia_oenpelliensis
+        Morelia_bredli
+        Antaresia_childreni
+        Python_timoriensis
+        Liasis_fuscus
+        Morelia_viridisS
+        Apodora_papuana
+        Morelia_tracyae
+        Liasis_mackloti
+        Antaresia_ramsayi
+        Antaresia_maculosa
+        Liasis_albertisii
+        Morelia_viridisN
+        Morelia_clastolepis
+        Loxocemus_bicolor
+        Morelia_spilota
+        Xenopeltis_unicolor
+  ;
+END;
+
+BEGIN TAXA;
+    TITLE 4368272912;
+    DIMENSIONS NTAX=33;
+    TAXLABELS
+        Xenopeltis_unicolor
+        Loxocemus_bicolor
+        Morelia_spilota
+        Morelia_bredli
+        Morelia_carinata
+        Morelia_amethistina
+        Morelia_oenpelliensis
+        Morelia_boeleni
+        Morelia_viridisS
+        Morelia_viridisN
+        Liasis_olivaceus
+        Liasis_mackloti
+        Liasis_fuscus
+        Liasis_albertisii
+        Apodora_papuana
+        Bothrochilus_boa
+        Antaresia_maculosa
+        Antaresia_stimsoni
+        Antaresia_childreni
+        Antaresia_perthensis
+        Antaresia_melanocephalus
+        Antaresia_ramsayi
+        Python_reticulatus
+        Python_timoriensis
+        Python_sebae
+        Python_molurus
+        Python_curtus
+        Python_regius
+        Candoia_aspera
+        Morelia_nauta
+        Morelia_clastolepis
+        Morelia_tracyae
+        Morelia_kinghorni
+  ;
+END;
+
+BEGIN CHARACTERS;
+    TITLE 4368293584;
+    LINK TAXA = 4368272912;
+    DIMENSIONS NCHAR=2716;
+    FORMAT DATATYPE=DNA GAP=- MISSING=? MATCHCHAR=.;
+    MATRIX
+        Xenopeltis_unicolor         ???????????????????????????????????????????????????????AAACT--AAAATTCCCATTT-CCCACATATAT----GGATATT--ACGAAG--AAAAAA---GGACTAAAAAAAG---TCCTCTCTCGACCCCCCCCCTACCCCCCCCC-ACAGTT---AGTACGGGT------TTTCC-ATATATGTAACTCTTATAGATTTGCCTATCAAGGC-ATACTATGTATAATCATACATTAATGGCTTGCCCCATGAATATTAAACAGGAATTTCCCTTTAAATATTTTAGCCTAAAAAAGCCTTCGTACAGAACTTTAATA----CCACATTTCT-CAGTCGTTCAATGAAGCACGGAT-ATAGTA--TTGTT-GATAACCATGACTATCC--ACATCCAACTTGTCTTACAGGATCTTGCTA-TTCACGTGAAATCCTCTA [...]
+        Loxocemus_bicolor           ???????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????? [...]
+        Morelia_spilota             ?GCCACACCCCTCACTTCCTCC-------------------CAACCATAGTCTGTAA-TTTACAGACTATGGT--CCATGCCTTAATATA-AAGCCAAAAATCCATATAATTTACCACAAAATAAAG-----CTCTCTC-TCGGCCCCCCCCCTACCCCCCCCC---AARAA-CATTGGGGAR------ACCGGCACACAAAACCA--TTARAAAACTCTTAACAAACCT--CTCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAGCAGGAATTTCCCTTTTATTATTTTAGTCTAAAATGGCCTTTGTACAAAATATTCTG----TCCTCATTCTCTTGGTCGTTCTATGCAGCACGAGTT--AACTA-ATCTT-ATTAATCATGGATATTC-TCAAC-CTAAGGGTGTCTCTTAGTCTAGCG-CTTCCCGTGAAATCCTCTA [...]
+        Morelia_bredli              ?GCCAC-CCCCTCACTTCCT---------------------TAACCATAGTCTGTAA-TTTACAGACTATGGT--CCATGCCTTAATATA-AAACCAAAAATCCATATAATTTACCACAAAATAAAG-----YTYTYTY-TYGGCCCCCCCCCTACMCCCCCCC--AAAGAA-CATTGGGAAA------ACCGGCACACAAAACTA--TTAGAAAACTCTTAACAAACCC--CTCTATGTATAATCTTACATTAATGGTTTGCCTCATGAATATTAAGCAGGAATTTCCCTTTTATTATTTTAGTCTAAAATGGCCTTTGTACAAAACATTCCG----TCCTCATTCTCCTGGTCGTTCTATGCAGCATGAGTT--AACCA-ATCTT-ATTGATCATGGATATTC-TTAAC-CTAAGGGTGTCTCTTATCCTAGCA-CTTCCCGTGAAATCCTCTA [...]
+        Morelia_carinata            ?GCCACAACCCTCACTTCCT--------------------AAAACCATAGTCTGTAAA--TACAGACTATGGT--TCTTACCTCAATATA-AAGCCAAAAACCCATATAAAACGC-ACACAATAAAACG---CTCTC-C-TCGGCCCCCCCCCTACCCCCCCC--ATAATAAACATAGGAGAA------ATCAGCACACAAAACTA--CTGAAGATACCCCCTCATCTCT--CCCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAGCAGGAATTTCCCTTCAAATATTTTAGTCTAAATAAGCCTTCGTACAGAATATTTAG----TCCTCATTTTC-TGGTCGTTCAATGCAACACGGATT--AATGG-ATCTT-ACTAACCATGGCTATCC-TTGAT-CAAGKGGKGTCTYTTAATCTAGTA-CTTCCCGTGAAACCCTCTA [...]
+        Morelia_amethistina         ?ACCACACCCCTCACTTCCTC--------------------CAACCATAGTCTGTAA-TTTACAGACTATGGT--CCATGCCTTAATATA-AAGCCAAAAATCCATATAATTTACCACAAAATAAAG-----CTCTCTC-TCGGCCCCCCCCCTACCCCCCCCC--AAAAAAACATTGGGGAA------ACCGGCACACAAAACCA--TTAAAAAACTCTTAACAAACCT--CTCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAGCAGGAATTTCCCTTTTATTATTTTAGTCTAAAATGGCCTTTGTACAAAATATTCTG----TCCTCATTTTCTTGGTCGTTCTATGCAGCATGAGCT--AACTA-ATCTT-ATTAATCATGGATATTC-TTAAC-CTARGGGTGTCTCTTAGTCTAGCG-CTTCCCGTGAAATCCTCTA [...]
+        Morelia_oenpelliensis       ?GCCAC-MCCCTCACTTCCT---------------------TAACCATAGTCTGTAA-TTTACAGACTATGGT--CCATGCCTTAATATA-AAACCAGAAATCCATATAATTTACCACCAAATAAAG-----YTYTYTY-TYGGCCCCCCCCCTACCCCCCCCC--AAAGAA-CATTGGGAGA------ACCGGCACACAAAATTA--TTAGAAGACTTTTAACATACCC--CTCTATGGATAATTTTACATTAATGGTTTGCCTCATGRATATTAAGCAGGGAWTTCCCTTTTATTATTTTAGTCTAAAACGGCCCTTGTACCAGACATTCCG----TCCTCATTCTCCTGGTCGTTCTATGCAGCATGAGTT--AACCA-ATCTT-ATTGATCATGGATATTCCTTGAC-CTAAGGGTGTSTCTTATCCTAGCA-CTTCCCGTGAAATCCT-TA [...]
+        Morelia_boeleni             ??AAACAACCCTCACTTCCTT--------------------CAACCATAGTCTGGA---TTCCAGACTATGGT--TGTTACCTAAAAAACTAAAGAAAAAATCCATATAAAC----------TAAAAA----CTCTCTC-TCGGCCCCCCCCCTACMCCCCCC---GGGTCAGCACAAAAAC-------ATCAC---------------CCAAAAATCCCCCTTTTT-CC--CCCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATAATAAGCAGGAATTTCCCTTTAAATATTTTAGTCTAAAATAGCCTTT-TACATAAAATTATG----TCCTCATTTCT-TGGTCGTTCAATGCAGCACGGATT--AATAT-ATCTT-ATTGATCATGGATATCC-TTGGT-CTAATGGTGTCTCTTAGTCTAACA-CTTCCCGTGAAATCCTCTA [...]
+        Morelia_viridisS            ?A-ATCAACCCTCACTTCCTCC-------------------TAGCCATAGTCTGTAAG-TTACAGACTATGGCT--CATGCCTTAATATATAAACCAAAAACCCATATAAT-CACTGAACAATAAAA-----CTYTYTYCTCGGCCCCCCCCCTACCCCCCCC---GGAAAACCATAAAA---------ATCAGCACATAAATAAA--CCTACTAATCCCATTGCTTCCT---CCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAGCAGGAATTTCCCTTCAAATATTTTAGCCTAAATTAGCTTCCGTACAAAATATCTAG----CCCTCATTTTC-TGGTCGTTCAATGCAATCGGGGTT--AATAA-ATCTT-ACTAACCATGGATATCC-TTGAT-CAGGTGGTGTCTCTTAATTTAGTA-CTTCCCGTGAAATCCTCTA [...]
+        Morelia_viridisN            ?A-CTCAACCCTCACTTCCTTC-------------------CAGCCATAGTCTGTAAA-TTACAGGCTATGGCT--CATACCTTGATATATAAACCAAAAACCCATATAATTCACCACACAACAAAA-----CTCTCTCCTCGGCCCCCCCC-TACCCCCCCCC--GGAAAAACATAGAAGAA------GTCAGCACAATTAAACT--TACTGATAACCCCTTGCTTCCT--CCCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAGCAGGAATTTCCCTTCAAATATTTTAGCCTAAAATAGCCTTTGTACAAAATACCTTG----TCCTCATTTTC-TGGTCGTYCAATGCAATCGGGTCT--AACAA-ATCTT-ACTAACCATGGATATCC-TTGAT-CAAGTTGTGTCTCTTAATCTAGTAACTTCCCGTGAAATCCTCTA [...]
+        Liasis_olivaceus            ?GCCACAACCCTCACTTCCCC-----------------ACCTAACCATAGTCTGTAAA-TTACAGACTATGGT--TGATACCTTAATACA-AAGCCGAAACCCCATATAAACAGCACCACAACAAAA----CTCTACTC-TCGGCCCCCCCCCTACMCCCCCCC--ACAAAAACATAGGARAA------ATCAGCACAAACAATC---MCCTAAAATCCCCCCTTAACCC--CCCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAGCAGGAATTCCCCTTTAAATATTTTAGTCTGAATTAGCCCTTGTACAAAAAATCTTG----TCCTCATTTTC-TGGTCGTTCAATGCAGCACGGATT--AATAG-ATCTT-ATTAACCATGGCTATCC-TTGAT-CTAGTGGTGTCCCATGATCTAGTA-CTTCCCGTGAAATCCTCTA [...]
+        Liasis_mackloti             ?ACCACAACCCTCACTTCCTT--------------------CAGCCATAGTCTGTAA-TTTACAGGCTATGGC--TGATACCTTAATATA-AAACCAAAATCCCATATAAATACCACCACAACAAAG-----CTCTCTC-TCGGCCCCCCCCCTACMCCCCCCC--ACCAAAACATAGAARAA------ATCAGCACAATAAATA---CTARAAGTATTTGCTTCCTTCC--CCCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTTAGCAGGAATTTCCCTTTAAATATTTTAGCCTAAAATAGCCTTTGTACACAAAACTATG----TCCTCATTTCT-TGGTCGTTCAATGCAGCACGGATT--AATAG-ATTTT-AATAACCATGACTATCC-TTGAT-CTAGTGGTGTCCCATGATTTAGTA-CTTCCCGTGAAATCCTCTA [...]
+        Liasis_fuscus               ?ACCACAACCCTCACTTCCTC--------------------CAGCCATAGTCTGTAA-TTTACAGGCTATGGC--TGATACCTTAATATA-AAACCAAAATCCCATATAAATACCACCACAACAAAG-----CTCTCTY-TCGGCCCCCCCCCTACCCCCCCCC--ACCAAAACATAGAAGAA------ATCAGCACA-AAATAACA-CTAGAAGTATTACTTCCTTGCC--CCCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTTAGCAGGAATTTCCCTTCAAATATTTTAGCCTAAAATAGCCTTCATACATAAAATTATG----TCCTCATTTCT-TGGTCGTTCAATGCAGCACGGATT--AATGG-ATTTT-AATAACCATGACTATCC-TTGAT-CTAGTGGTGTCCCATGATTTAGTA-CTTCCCGTGAAATCCTCTA [...]
+        Liasis_albertisii           ????GCTCCTCTCACTTCCTC--------------------AGACCACAGTCTGCAA---TGCAGACTGTGGTTTTGTGCCCAGAATATA--AACCAAAAAACCATATAAACAACACCRCGACAAAAAAGA--TCTCTC-TCGGCCCCCCCCMTACMCCCCCCC--AAAAAAACATARAGGAA------ATCAG--------------------TTCATARACT--------CTCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAGCAGGAATTCCCCTTTAAATATTTTAATCTAAATTAGCCTTCGTACACAAAATTCAG----TCCTCATTCTC-TGGTCGTTCAATGCAGCACGGATT--AATCA-GTCTT-ACTAACCATGGATATCC-TTGAT-CTAGTCGTCTCTCTTAGTCTAACA-CTTCCCGTGAAACCCTCTA [...]
+        Apodora_papuana             ?GCCACAACCCTCAMTTCCTT--------------------CAGCCACAGTYTGTAA-TTTACAGACTGTGGC--CCATGCCTCAATATA-AAGCCGAAAATCCATATAAATAACACCAAAACAAAG-----CTYTCCC-TYGGCCCCCCCCCTACCCCCCCCC--AAAAAAATATAGAGAA------CTATAGAACAAATAACCA---CCAAGAAGTTCACTATCCCCC--TCCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAGCCGGAATTTCCCTTTAAATATTTTAGTCTAAATATGCCCTTGTACACAAAATTCAG----TCCTCATTTCT-TGGTCGTTCAATGCAGCCAGGAAT--AATCA-ATCTT-ATTAACCATGGATATCC-TTGAT-CTAGTGGTGTCTCTTGGTCTAGTA-CTTCCCGTGAAATCCTCTA [...]
+        Bothrochilus_boa            ??GCACCGCCCTCACTTCCTC--------------------CGACCGCAGTCTGCC----AGCAGGCTGCGGTC-GCATGCCCAAAAACACAAACCAAAAAACCATATAAACAACGCCGCAACAAAAGG----YCYCYC-TCGGCCCCCCCCCTAC-CCCCCCC-ACAAAAAACATAGAGAAA------ATCAG--------------------TTTTCACAC---------CCCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAGCAGGAATTTCCCTTCAAATATTTTAATCTAAAATAGCCTTTGTACATAAAATTTGC----CCCTCATTTCT-TGGTCGTTCAATGCAGCATGGATT--AATCA-GTCTT-ATTAACCATGGATATTC-TCAGT-CTAGTTGTGTCTCTTAGCCTAACA-CTTCCCGTGAAATCCTCTA [...]
+        Antaresia_maculosa          ?ACCACAACCCTCACTTCCTCC--------------------AGCCATAGTCTGTAAATTTACAGACTATGGC--TGATACCTCAACATA-CAGCCAAAATTCCATATAATAT-CCCCACAACAA-----CTYTYTYTCYTYGGCCCCCCCCCTACCCCCCCC---ATCCAAATATATAAGAA------ATCAGCACAATAAACCT--ACTAGGAATTGCCAATAACTCC--CCCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAGCAGGAATTCCCCTTCAAATATTTTAGTCTAAAATAGCCTTTGTACAGAATATTTAG----TCCTCATTTCT-TGGTCGTTCAATGCAACACGGATT---ATCAGTTCTT-ACTAACCATGGATATCC-TTGAT-CTAGTGGTGTCTCTTAATCTAGTA-CTTCCCGTGAAATCCTCTA [...]
+        Antaresia_stimsoni          ?ACCACAACCCTCAMTTCCTTTCAGCCATAGTCTGTAAATACAGCCATAGTCTGTAAA--TACAGACTATGGC--TGATACCGCCATATA-GAGCCGAAAACCCATATAATATGCCACACAATAAA------CTYTYTCCTYGGCCCCCCCCCTACCCCCCCCC--ATTAAAACATATGGGAA------AACAGCACAAATACATA--TTAAAGAATGTCCAATTAATCC--TCCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAGCAGGAATTCCCCTTTAAATATTTTAGCCTAAAATGTCCTTCGTACAGAATATTAAG----TCCTCATTTTC-TGGTCGTTCAATGCAATCAGGATT--AATCA-TTCTT-ACTAACCATGGCTATCC-TTGAT-CTAGTGGTGTCCCTTAATTTAGTA-CTTCCCGTGAAATCCTCTA [...]
+        Antaresia_childreni         ?GCCACAACCCTCACTTCCTTCCAGCCATAGTCTGTAAATACAGCCATAGTCTGTAAA--TACAGACTATGGC--TGATGCCGCCATATA-GAGCCGAAAAACCATATAATATACCACACAATAAA------CTCTCTCCTCGGCCCCCCCCCTACCCCCCCCC--ATTAAAACATATGGGAA------AGCAGCACAAATACATA--TTAAAGAATGTCCAATTAATCC--TCCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAGCAGGAATTCCCCTTTAAATATTTTAGTCTAAAATGTCCTTTGTACAGAATATTTAG----CCCTCATTCTC-TGGTCGTTCAATGCAATCGGGATT--AATCA-TTCTT-AATAACCATGACTATCC-TTGAT-CTAGTGGTGTCCCTTGATTTAGTA-CTTCCCGTGAAATCCTCTA [...]
+        Antaresia_perthensis        ?ATCA-AACCC------------------------AAAACTAAGCCACAGCCTGTTT--AAACAGGCTGTGGC--TGATGCCGCCATACA-AAGCCGAAATTCCATATAACACACCACAATATAAA------CTYTYTCCTYGGCCCCCCCCCTACCCCCCCCC--AACCAAACATATAAGAA------AACAGAACAGTGAACAA--TTAGAGATTCTCCAATTAACTC--TCCTATGTATAATCTTACATTAATGGTTTGCCCCATGGATATTAAGCAGGAATTTCCCTTCAAATATTTTAGTCTAAAATAGCCTTTGTACAGTCTATTTAG----TCCTCATTTTC-TGGTCGTTCAATGCAGCATGGATT--AATCA-TTCTT-ACCGATCATGACTATCC-TTGAT-CTAGTGGTGTCTCTTAATTTAGTA-CTTCCCGTGAAATCCTCTA [...]
+        Antaresia_melanocephalus    ?ACCACA----------CCTTCC-----------------CCAACCATAGTCTGTAACC--ACAGACTATGGT--CGATGTCTCAATATA-AAGCCAAAAATCTATATAAATAAA-ACACAATAAAG-----CTCTCTCCTCGGCCCCCCCC-TACMCCCCCC--ACAAGAAATATAGAAGAA------ACCAGCACATAAGACTA--TAAGGATTCCCCCCTTCTTTCC--CCCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAGCCGGAATTCCCATTTAAATATTTTAATCTAAATTTGCCTTTGTACTTAAAATTCAG----TCCTCATTTCT-TGGTCGTTCAATGCAGCACGGATT--AATAG-ATCTT-ATTAACCATGGCTATCC-TTGAT-CTAGTGGTGTCCCATGATCTAGCT-CTTCCCGTGAAATCCTCTA [...]
+        Antaresia_ramsayi           ?ACCACG----------CCTTCC-----------------CCA-CCATAGTCTGTAAA-TTACAGACTATGGT--CGTTGCCTCAACATA-AAGCCAAAAACCCATATAAACAAAAC--ATATAAA----CTCTCTCTCCTCGACCCCCCCC-TACCCCCCCC--ACAAGAAATATAGAAGAA------ACCAGCACATAAGACTA--TAAGGATTTCCCCCTCCTTTCC--CCCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAGCCGGAATTCCCATTTAAATATTTTAATCTAAATT-GCCTTCGTACCTAAAATTCAG----TCCTCATTTCT-TGGTCGTTCAATGCAGCACGGATT--AATAG-ATCTT-ATTAACCATGGCTATCC-TTGAT-CTAGTGGTGTCCCATGATCTAGCT-CTTCCCGTGAAATCCTCTA [...]
+        Python_reticulatus          ??CCATCACCCTCACTTCCTCC--------------------AACCATAGCCAAATA-TTT---GGCTATGGTT-TCATGCCAAAATATATCAACCAAAAACCCATATTAATATAATGCTATAAAATGG-------TCCCTCGACCCCCCCCCTACCCCCCCC--AAAAAA--CATAAGGAAA------GTCCG-CACATCATAAACCTCGTACTTTTCCCTATTTTTT-GCTCCTATGTATAATCTTACATTAATGGCTTGCCCCATGGATAATAAGCAGGAATTTCCCTTTTAATATTTTAGTCTAAATTAGCCTTCGTACAGGTAATTCAGT----CCTCATTTTC-TGGTCGTTCAATGCAGCATGGATT--AATAA-TTGTT-GATAACCATGGATATCC-TTGAT-CTAGTTGTGTCCCTTGATTTAACA-CTTCCCGTGAAATCCTCTA [...]
+        Python_timoriensis          TA-CACCACCA------------------------------AGACCATAGTCGGTAAATC----GACTATGGTCTTTTTACGCCAAAAATACAACCAAAAATCCATATTAATATAGCAATATAAAATAG-------CCCCTCGACCCCCCCCCTACCCCCCCCC-ACAAAAA-TATAAAGAAA------ACCCG-TATGTCATAAACTCCGAATTTTTCCCTATTTTT--GCCCCTATGTATAATCATACATTATTGGCTTGCCCCATGGATAATAAGCAAGAATTCCCTTTTTAATATTTTAGTCTAAAATTGCCTTT-TACAAAAAACTCAGT----CCTCATTTCT-TGGTCGTTCAATGCAGCATGGGCT--AATAA-TTATT-AATAACCATGACTATCC-TTGAT-CTAGTTGTGTCTCTTAGTTTGGTA-CTTCCCGTGAAATCCTCTA [...]
+        Python_sebae                ?????????????????????????????????????????????????????????????????????????????CTTCCTCAGACAC-AAACTCA-ACCTCAAATAAAAATAAAAATAAT-----------CCTACCTCGGCCCCCCCCCTACCCCCCCC--ACTATTT-CATATGGAA-------TACAGGATATATAC-TTTGTTAGAAAAATCCATATTTTTTCTACCCTATGTATAATCTTACATTAATGGCTTGCCCCATGAATAATAAGCGGGAATTCCTAATAAAATATTTTAGCCTAAAATTGCCTTCGTACATAAAATT-AGC---TCCACATTTCTTTGGTCGTTCAATGCTGCANGGATTATAGTAC-TTCTT-AATACACATGACTATCC-TTGAT-CTAGTCGTCTCTCTTAACTTAACA-CTTCCCGTGAAATCCTCTA [...]
+        Python_molurus              ?????????????????????????????????????????????????????????????????????????????CTTCCTCAGACAC-AAACTCA-ACCTCAAATAAAAATAAAAACAAT-----------CCTACCTCGGCCCCCCCCCTACCCCCCCCC-ACTATTT-CATATGGAA-------TACAGGATATATACATTTGTTAGAAAAATCCATATTTTTTCTACCCTATGTATAATCTTACATTAATGGCTTGCCCCATGAATAATAAGCGGGAATTCCTAATAAAATATTTTAGCCTAAAATTGCCTTCGTACATAAAATT-AGC---TCCACATTTCTTTGGTCGTTCAATGCTGCACGGATTATAGTAC-TTCTT-AATACACATGACTATCC-TTGAT-CTAGTCGTCTCTCTTAACTTAACA-CTTCCCGTGAAATCCTCTA [...]
+        Python_curtus               ?CCACAAAA-----------------------------------CCAT-----------------ATTAATYTT--CCCACCTATAAYTA-AACCCGAAATTCCCTATAAA--CACAACAAAAAATA-----CTCCTTCYTCGCCCCCCCCC-TACCCCCCCCCCAC-ATTT-AATATAAGAT------TCTGG--AATATACACACATCGTTAATTTCCATATTTTTT--ATGCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATAATAAGCCGGAATTCCATATTAAATATTTTAGCCTAAAATTGCCTTAGTACCTAAAACT-AGTCCTTCCTCATTTTC-TGGTCGTTCAATGCTGCATGGATT--AATCA-TTCTTTAACAGATATGTCTATCC-TTGAT-CTAGTCGTCTCTCTTAACCTGGCG-CTTCCCGTGAAATCCTCTA [...]
+        Python_regius               ?????????????????????????????????????????????????????????????????????????????TTACCTCAAT----AAACCCAAACCCACTATAAAAATATAA-----------------CCCCCTCGGCCCCCCCCCTTCCCCCCCCC-ACTTACA---TAGGAGGA------TTTAG-ATATATACACATATTAGGATTTTCCCTATCTTTTC-ACCCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAACCAGAATTTCCAATTAAATATTTTAACCTAAAATTGCCTTCGTACACTACACC-AGT---CCCTCATTTCT-TGGTCGTTCAATGCTGCACGGATTATAGTAC-TTATT-AATGCTCATGTCTATCC-TTGGT-CTAGTGGTGTCTCTTAGTTTAACA-CTTCCCGTGAAATCCTCTA [...]
+        Candoia_aspera              ?????????????????????????????????????????????????????????????????????????????????????????????????AAA----CTA-------------------------CT-CTCTGG-GACCCCCCCC-TACCCCCCCC--AGATAAACTATACTAAAATTTACCTGAGTACACTATGTAAATATTGTACATTAGTCTATATTTC--ATGCTATGTATAATCATACATTAATGATCTGCCCCATGGATAATAAGCAGGAATTTCCCTATTAATATTTCAGCCTATTAATGCCTTAGTACAGTCAGTGTGTC---ACCACATCAT--GGGTCGTTTTATGCAGCAAGGATTA-ACTA--TTATT-GGTAATCATGCCTATCC--TGATCCAAGTTGTC-CTCTTAATCTACCTA-CTCACGTGAAATCCTCTA [...]
+        Morelia_nauta               ???????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????? [...]
+        Morelia_clastolepis         ???????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????? [...]
+        Morelia_tracyae             ???????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????? [...]
+        Morelia_kinghorni           ???????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????? [...]
+    ;
+END;
+
+
+BEGIN TREES;
+    TITLE 4325835408;
+    LINK TAXA = 4300387408;
+    TREE 11 = (Candoia_aspera:0.3846478,(Loxocemus_bicolor:0.2549529,((((Python_sebae:0.06807004,Python_molurus:0.03043545):0.01465748,Python_curtus:0.09600346):0.01247736,Python_regius:0.09841203):0.03751737,((Python_timoriensis:0.06586915,Python_reticulatus:0.04536943):0.07520284,(((((Antaresia_childreni:0.01730015,Antaresia_stimsoni:0.01262602):0.02887657,Antaresia_perthensis:0.06405242):0.01385353,Antaresia_maculosa:0.07585236):0.008884809,(Morelia_carinata:0.05554095,(Morelia_viridi [...]
+    TREE 12 = ((Loxocemus_bicolor:0.2306511,((Python_regius:0.1249936,(Python_curtus:0.1161678,(Python_sebae:0.07594277,Python_molurus:0.04880214):0.01050635):0.009484123):0.05837598,((((Bothrochilus_boa:0.06362487,Liasis_albertisii:0.04429535):0.05290393,(Morelia_boeleni:0.0975965,(((Morelia_tracyae:0.02920131,(Morelia_amethistina:0.02703314,((Morelia_kinghorni:0.0078452,Morelia_nauta:0.005866894):0.005442751,Morelia_clastolepis:0.003560364):0.01476575):0.008177027):0.02992863,Morelia_o [...]
+    TREE 13 = (Candoia_aspera:0.5080599,(Loxocemus_bicolor:0.2965857,(((Python_reticulatus:0.05785765,Python_timoriensis:0.07319806):0.05928309,(((Bothrochilus_boa:0.06122711,Liasis_albertisii:0.06414279):0.03891576,(((((Liasis_fuscus:0.01426001,Liasis_mackloti:0.01729048):0.04258978,Liasis_olivaceus:0.04600247):0.0113502,Apodora_papuana:0.05986282):0.01038398,(Antaresia_ramsayi:0.03308029,Antaresia_melanocephalus:0.0411259):0.04100124):0.01325467,((Morelia_carinata:0.06009035,(Morelia_v [...]
+    TREE 14 = (((((Python_reticulatus:0.06098649,Python_timoriensis:0.06479356):0.06867842,((Morelia_boeleni:0.1040542,((Bothrochilus_boa:0.05386548,Liasis_albertisii:0.06105181):0.04408371,(((Liasis_olivaceus:0.04827163,(Liasis_fuscus:0.01955875,Liasis_mackloti:0.01269835):0.03672677):0.01628567,Apodora_papuana:0.06293987):0.03062699,(Antaresia_ramsayi:0.03533973,Antaresia_melanocephalus:0.04486755):0.05072205):0.00297664):0.004512241):0.001240846,(((Morelia_carinata:0.05786984,(Morelia [...]
+    TREE 15 = (((((Python_reticulatus:0.05439983,Python_timoriensis:0.08321393):0.04871133,((Morelia_boeleni:0.08585758,((Bothrochilus_boa:0.05362335,Liasis_albertisii:0.04403941):0.03032729,((Morelia_oenpelliensis:0.05763201,(((Morelia_kinghorni:0.00341021,Morelia_clastolepis:0.006671437):0.007917761,Morelia_nauta:0.001951335):0.01049725,(Morelia_tracyae:0.05625849,Morelia_amethistina:0.02860654):0.0006372106):0.03067896):0.01095649,(Morelia_spilota:0.02808814,Morelia_bredli:0.03112591) [...]
+    TREE 16 = (((((Python_reticulatus:0.04805722,Python_timoriensis:0.05322303):0.0782823,(((((Antaresia_perthensis:0.06330741,(Antaresia_childreni:0.02077168,Antaresia_stimsoni:0.01869424):0.03122662):0.01200088,Antaresia_maculosa:0.05309401):0.006744738,(Morelia_carinata:0.073235,(Morelia_viridisN:0.03121673,Morelia_viridisS:0.04001572):0.01598083):0.01197911):0.01512452,((Apodora_papuana:0.03974916,((Liasis_fuscus:0.01597086,Liasis_mackloti:0.01058074):0.05649681,Liasis_olivaceus:0.04 [...]
+    TREE 17 = ((Loxocemus_bicolor:0.2487605,((Python_regius:0.1233404,(Python_curtus:0.1077844,(Python_molurus:0.0373181,Python_sebae:0.0759607):0.02438578):0.01916703):0.03225566,((Python_reticulatus:0.06111238,Python_timoriensis:0.06333372):0.06545558,(((Morelia_spilota:0.0228283,Morelia_bredli:0.02804907):0.03880804,((((Morelia_nauta:0.001533473,(Morelia_clastolepis:0.003356968,Morelia_kinghorni:0.007887915):0.005664857):0.01199247,Morelia_amethistina:0.02933171):0.01226355,Morelia_tr [...]
+    TREE 18 = ((Loxocemus_bicolor:0.2507472,((Python_regius:0.1105118,(Python_curtus:0.1086967,(Python_sebae:0.07728629,Python_molurus:0.0423532):0.0213407):0.01968316):0.06155854,((Python_timoriensis:0.07817613,Python_reticulatus:0.07259407):0.07650008,(((((Morelia_viridisS:0.0465201,Morelia_viridisN:0.02961522):0.02715512,Morelia_carinata:0.07196027):0.01556349,(Antaresia_maculosa:0.06562058,((Antaresia_stimsoni:0.01004443,Antaresia_childreni:0.02254039):0.04174026,Antaresia_perthensis [...]
+    TREE 19 = (Candoia_aspera:0.3965166,(Loxocemus_bicolor:0.2679677,((Python_regius:0.1029863,(Python_curtus:0.09892457,(Python_sebae:0.07480435,Python_molurus:0.04166847):0.01203458):0.03734556):0.02161339,((Python_timoriensis:0.0483175,Python_reticulatus:0.05879262):0.06698997,(((Bothrochilus_boa:0.06842803,Liasis_albertisii:0.05331119):0.0440924,((Antaresia_melanocephalus:0.0347434,Antaresia_ramsayi:0.03256232):0.03610559,(Apodora_papuana:0.07975386,((Liasis_mackloti:0.01064635,Liasi [...]
+    TREE 20 = (Candoia_aspera:0.4488251,(Loxocemus_bicolor:0.2850208,((Python_regius:0.1072431,(Python_curtus:0.07993694,(Python_sebae:0.07800197,Python_molurus:0.03348335):0.01611311):0.008776165):0.03537475,((Python_reticulatus:0.05680797,Python_timoriensis:0.06572175):0.05325016,((((Antaresia_perthensis:0.06941898,(Antaresia_childreni:0.0139039,Antaresia_stimsoni:0.0148759):0.04123835):0.00879989,Antaresia_maculosa:0.06008866):0.007626708,(Morelia_carinata:0.07607666,(Morelia_viridisN [...]
+    TREE 21 = ((((Python_timoriensis:0.07117878,Python_reticulatus:0.04821365):0.07463146,(((((Antaresia_perthensis:0.08739655,(Antaresia_stimsoni:0.01621692,Antaresia_childreni:0.02665731):0.04022228):0.008193846,Antaresia_maculosa:0.07342905):0.001017085,((Morelia_viridisS:0.04622026,Morelia_viridisN:0.03667079):0.02958224,Morelia_carinata:0.06285014):0.01282166):0.02229121,(((((Morelia_clastolepis:0.003050844,(Morelia_kinghorni:0.003807813,Morelia_nauta:0.01326833):0.002467171):0.0073 [...]
+    TREE 22 = ((Loxocemus_bicolor:0.196467,Candoia_aspera:0.4000064):0.04033051,((Python_regius:0.1172232,((Python_sebae:0.07022621,Python_molurus:0.03127386):0.01725967,Python_curtus:0.1201985):0.009464382):0.03286576,((Python_timoriensis:0.054655,Python_reticulatus:0.06552768):0.04240201,(((((Antaresia_perthensis:0.05941156,(Antaresia_stimsoni:0.009209314,Antaresia_childreni:0.02528722):0.03086887):0.01323638,Antaresia_maculosa:0.05655764):0.01615778,((Morelia_viridisS:0.03740166,Morel [...]
+    TREE 23 = (Candoia_aspera:0.4592592,(((Python_regius:0.1337113,((Python_sebae:0.05325812,Python_molurus:0.04577269):0.01464595,Python_curtus:0.09770408):0.01604463):0.0451044,((Python_timoriensis:0.07054697,Python_reticulatus:0.06483624):0.07321957,((((Morelia_viridisS:0.04158552,Morelia_viridisN:0.03267045):0.0167798,Morelia_carinata:0.06104408):0.02706362,(Antaresia_maculosa:0.0674958,(Antaresia_perthensis:0.06282181,(Antaresia_stimsoni:0.02007935,Antaresia_childreni:0.01562126):0. [...]
+    TREE 24 = (Candoia_aspera:0.416012,(((((Python_sebae:0.08152061,Python_molurus:0.04775603):0.02331853,Python_curtus:0.09187623):0.0268565,Python_regius:0.1253132):0.04677824,((Python_reticulatus:0.04722766,Python_timoriensis:0.06954232):0.05724026,((((Morelia_spilota:0.02565724,Morelia_bredli:0.02998293):0.03298402,((((Morelia_nauta:0.002384214,(Morelia_clastolepis:0.005820083,Morelia_kinghorni:0.004446333):0.001406536):0.008647775,Morelia_tracyae:0.05487271):0.01087026,Morelia_ameth [...]
+    TREE 25 = ((Loxocemus_bicolor:0.2006045,Candoia_aspera:0.5955799):0.0444977,((Python_regius:0.1046038,((Python_sebae:0.08121141,Python_molurus:0.04485486):0.02542729,Python_curtus:0.09084656):0.0145274):0.05230496,((Python_timoriensis:0.07903435,Python_reticulatus:0.07460281):0.05455651,(((Bothrochilus_boa:0.05585555,Liasis_albertisii:0.05123584):0.03234854,(Morelia_boeleni:0.09132604,((Morelia_oenpelliensis:0.07046467,(Morelia_amethistina:0.0232165,(Morelia_tracyae:0.05046845,((More [...]
+    TREE 26 = ((((((Python_sebae:0.09108678,Python_molurus:0.04955886):0.01999562,Python_curtus:0.1059653):0.01601986,Python_regius:0.1440779):0.02435115,((Python_reticulatus:0.06048541,Python_timoriensis:0.06863296):0.07562467,(((((Morelia_viridisN:0.03454344,Morelia_viridisS:0.05635091):0.02151505,Morelia_carinata:0.06198796):0.01203427,(Antaresia_maculosa:0.0845287,((Antaresia_childreni:0.03515405,Antaresia_stimsoni:0.007169672):0.04130242,Antaresia_perthensis:0.05833049):0.009794389) [...]
+    TREE 27 = (((((Python_timoriensis:0.05751875,Python_reticulatus:0.05970213):0.09181866,(((Antaresia_maculosa:0.06428122,((Antaresia_stimsoni:0.02158674,Antaresia_childreni:0.03257027):0.04245471,Antaresia_perthensis:0.06523106):0.01066557):0.008685391,((Morelia_viridisS:0.05196139,Morelia_viridisN:0.03425494):0.02249345,Morelia_carinata:0.07069562):0.01909782):0.01948604,((((Bothrochilus_boa:0.06369204,Liasis_albertisii:0.04518104):0.02424989,(((Liasis_mackloti:0.01199028,Liasis_fusc [...]
+    TREE 28 = (Candoia_aspera:0.4345551,(Loxocemus_bicolor:0.314093,((Python_regius:0.173824,(Python_curtus:0.1043669,(Python_sebae:0.07896053,Python_molurus:0.05467638):0.01587752):0.03999686):0.04129383,(((Morelia_boeleni:0.0936573,((((Liasis_olivaceus:0.03802104,(Liasis_fuscus:0.02550778,Liasis_mackloti:0.007911858):0.04045908):0.01101188,Apodora_papuana:0.07327953):0.01822209,(Antaresia_melanocephalus:0.03691812,Antaresia_ramsayi:0.02370955):0.0601409):0.007223612,(Liasis_albertisii: [...]
+    TREE 29 = (Candoia_aspera:0.4559287,((((Python_reticulatus:0.05517757,Python_timoriensis:0.06814217):0.08340554,((((((Morelia_tracyae:0.02748149,(Morelia_amethistina:0.03098765,((Morelia_nauta:0.006576406,Morelia_kinghorni:0.007688372):0.0007096533,Morelia_clastolepis:0.00977261):0.0210015):0.006038462):0.02583509,Morelia_oenpelliensis:0.0749369):0.0101301,(Morelia_spilota:0.02004658,Morelia_bredli:0.03451659):0.04024715):0.02746889,(Bothrochilus_boa:0.08344832,Liasis_albertisii:0.04 [...]
+    TREE 30 = (((((((Morelia_bredli:0.02865577,Morelia_spilota:0.03152456):0.02984838,(Morelia_oenpelliensis:0.05957398,((((Morelia_clastolepis:0.009648714,Morelia_kinghorni:0.008176357):0.008419985,Morelia_nauta:0.002053939):0.008951861,Morelia_tracyae:0.03430245):0.007474801,Morelia_amethistina:0.02655718):0.02660193):0.005862491):0.02390784,((((Antaresia_melanocephalus:0.04143373,Antaresia_ramsayi:0.02445915):0.05473143,((Liasis_mackloti:0.01147677,Liasis_fuscus:0.02043269):0.06112321 [...]
+    TREE 31 = (Candoia_aspera:0.3564553,(((((Python_sebae:0.07223247,Python_molurus:0.03344707):0.02744703,Python_curtus:0.100609):0.02068396,Python_regius:0.1083106):0.04848281,((Python_timoriensis:0.07007541,Python_reticulatus:0.06893867):0.08352695,((((Morelia_viridisS:0.05242792,Morelia_viridisN:0.03244932):0.03188631,Morelia_carinata:0.0666426):0.01467226,(Antaresia_maculosa:0.04897325,((Antaresia_stimsoni:0.01523345,Antaresia_childreni:0.02280061):0.03972103,Antaresia_perthensis:0. [...]
+    TREE 32 = (((((((Morelia_boeleni:0.09460149,((Morelia_oenpelliensis:0.06591956,(((Morelia_clastolepis:0.008404051,(Morelia_kinghorni:0.006978938,Morelia_nauta:0.007460564):0.000440772):0.01431461,Morelia_amethistina:0.0208417):0.006610699,Morelia_tracyae:0.02966128):0.05849246):0.008923475,(Morelia_spilota:0.0247648,Morelia_bredli:0.03024164):0.02807078):0.03123918):0.003089671,(Bothrochilus_boa:0.04782091,Liasis_albertisii:0.05497745):0.0331477):0.008159145,(((Liasis_mackloti:0.0134 [...]
+    TREE 33 = ((Loxocemus_bicolor:0.2669451,(((Python_reticulatus:0.05321987,Python_timoriensis:0.06997756):0.05861088,((((((Antaresia_childreni:0.01655703,Antaresia_stimsoni:0.01637029):0.03569376,Antaresia_perthensis:0.06286153):0.007241909,Antaresia_maculosa:0.06266587):0.008208767,((Morelia_viridisN:0.03704275,Morelia_viridisS:0.04520784):0.03192493,Morelia_carinata:0.051277):0.0100963):0.01478441,(((Antaresia_melanocephalus:0.04876354,Antaresia_ramsayi:0.02593541):0.04278765,(Apodor [...]
+    TREE 34 = (Candoia_aspera:0.4569261,(((Python_regius:0.1411037,((Python_sebae:0.06459784,Python_molurus:0.03858578):0.01245523,Python_curtus:0.1156069):0.03368835):0.04372914,((Python_reticulatus:0.04306023,Python_timoriensis:0.06315612):0.07695555,(((((Antaresia_ramsayi:0.04512156,Antaresia_melanocephalus:0.03252252):0.04167882,(Apodora_papuana:0.06102348,(Liasis_olivaceus:0.03044151,(Liasis_fuscus:0.02654637,Liasis_mackloti:0.00437689):0.04196502):0.01919925):0.01657837):0.01029805 [...]
+    TREE 35 = ((((((Python_sebae:0.07121849,Python_molurus:0.02621536):0.01770809,Python_curtus:0.1089461):0.03107291,Python_regius:0.1164904):0.05656528,((Python_reticulatus:0.05387978,Python_timoriensis:0.0633591):0.05634522,(((Morelia_boeleni:0.08749596,((Morelia_spilota:0.02898978,Morelia_bredli:0.02387138):0.0409496,(Morelia_oenpelliensis:0.06187724,((Morelia_nauta:0.006906582,(Morelia_kinghorni:0.01526566,Morelia_clastolepis:0.01043315):0.002784817):0.009692299,(Morelia_amethistina [...]
+    TREE 36 = ((Loxocemus_bicolor:0.3172354,(((Python_curtus:0.08136119,(Python_sebae:0.0735508,Python_molurus:0.03640422):0.01557105):0.0277907,Python_regius:0.09717536):0.04494788,((((Antaresia_maculosa:0.06047012,((Antaresia_stimsoni:0.01936481,Antaresia_childreni:0.0204231):0.02860615,Antaresia_perthensis:0.067609):0.005236841):0.01713776,((Morelia_viridisS:0.04331104,Morelia_viridisN:0.02973395):0.02836114,Morelia_carinata:0.05650005):0.01346905):0.01404818,(((Bothrochilus_boa:0.046 [...]
+    TREE 37 = ((Loxocemus_bicolor:0.2740998,Candoia_aspera:0.4155202):0.02116592,(((((Morelia_boeleni:0.06929356,((Morelia_oenpelliensis:0.07583182,(Morelia_amethistina:0.01459558,((Morelia_nauta:0.007322429,(Morelia_clastolepis:0.01181237,Morelia_kinghorni:0.005259852):0.003398728):0.03372015,Morelia_tracyae:0.03434483):0.02091431):0.03202138):0.005658255,(Morelia_bredli:0.02809767,Morelia_spilota:0.03355095):0.02263292):0.02159314):0.00291552,(((Antaresia_melanocephalus:0.05160809,Anta [...]
+    TREE 38 = (Candoia_aspera:0.4059339,(Loxocemus_bicolor:0.3025017,(((((Morelia_boeleni:0.07452276,((Morelia_bredli:0.02106962,Morelia_spilota:0.0320159):0.03108456,(((Morelia_amethistina:0.02608054,((Morelia_nauta:0.009185291,Morelia_kinghorni:0.006006182):0.004924326,Morelia_clastolepis:0.00406642):0.01622507):0.01576933,Morelia_tracyae:0.05538706):0.02692716,Morelia_oenpelliensis:0.05802539):0.005663774):0.02119685):0.005774344,(((Antaresia_melanocephalus:0.04396474,Antaresia_ramsay [...]
+    TREE 39 = (Candoia_aspera:0.3487431,(Loxocemus_bicolor:0.2715437,((Python_regius:0.108148,((Python_molurus:0.04219487,Python_sebae:0.06968664):0.02150992,Python_curtus:0.1232163):0.0239361):0.0522518,(((((Bothrochilus_boa:0.06552829,Liasis_albertisii:0.03917488):0.02331611,((((Liasis_fuscus:0.02518991,Liasis_mackloti:0.00890263):0.04813051,Liasis_olivaceus:0.0405597):0.01507422,Apodora_papuana:0.07516039):0.01507437,(Antaresia_ramsayi:0.04147509,Antaresia_melanocephalus:0.045958):0.0 [...]
+    TREE 40 = (Candoia_aspera:0.5223368,(((Python_regius:0.1034435,((Python_molurus:0.04249723,Python_sebae:0.07799269):0.01292014,Python_curtus:0.09253175):0.02023361):0.03960567,(((((Morelia_spilota:0.02882786,Morelia_bredli:0.02270972):0.02443599,(((Morelia_amethistina:0.02571044,((Morelia_kinghorni:0.01171321,Morelia_nauta:0.00610836):0.00422032,Morelia_clastolepis:0.004021223):0.01317024):0.00131684,Morelia_tracyae:0.03160369):0.03319636,Morelia_oenpelliensis:0.06634761):0.005747809 [...]
+    TREE 41 = (Candoia_aspera:0.4850667,(Loxocemus_bicolor:0.2902032,((Python_regius:0.1196588,(Python_curtus:0.08757062,(Python_sebae:0.07518693,Python_molurus:0.03538358):0.0142014):0.01088135):0.030351,(((((Morelia_viridisS:0.05240148,Morelia_viridisN:0.04396064):0.01791946,Morelia_carinata:0.06703203):0.01547424,(Antaresia_maculosa:0.08728829,(Antaresia_perthensis:0.06958781,(Antaresia_stimsoni:0.01361088,Antaresia_childreni:0.02863435):0.04253736):0.02274367):0.01055237):0.02506572, [...]
+    TREE 42 = (Candoia_aspera:0.4227249,(Loxocemus_bicolor:0.2630346,((Python_regius:0.1292282,((Python_sebae:0.07375409,Python_molurus:0.05145713):0.009915304,Python_curtus:0.104214):0.02208551):0.03973028,((Python_timoriensis:0.0722339,Python_reticulatus:0.07091186):0.05739378,((((Morelia_boeleni:0.09101835,((Morelia_bredli:0.0278041,Morelia_spilota:0.03169677):0.02124261,(Morelia_oenpelliensis:0.05296945,((Morelia_tracyae:0.05372661,((Morelia_clastolepis:0.008265544,Morelia_kinghorni: [...]
+    TREE 43 = (Candoia_aspera:0.5153805,((((Python_curtus:0.07863771,(Python_sebae:0.08222992,Python_molurus:0.04254121):0.01317515):0.02583793,Python_regius:0.137146):0.05263082,(((Morelia_boeleni:0.1016967,(((Bothrochilus_boa:0.04778673,Liasis_albertisii:0.05529477):0.04329956,(Antaresia_ramsayi:0.02994375,Antaresia_melanocephalus:0.0416666):0.05128635):0.001794929,(Apodora_papuana:0.06170972,(Liasis_olivaceus:0.03671799,(Liasis_mackloti:0.006090192,Liasis_fuscus:0.02565038):0.03958824 [...]
+    TREE 44 = ((Loxocemus_bicolor:0.2648974,((((((Morelia_spilota:0.01775619,Morelia_bredli:0.02572672):0.03433169,((Morelia_tracyae:0.03554912,(((Morelia_kinghorni:0.004910661,Morelia_nauta:0.01120176):0.01147165,Morelia_clastolepis:0.003855517):0.01543003,Morelia_amethistina:0.02183181):0.012722):0.02702229,Morelia_oenpelliensis:0.05908537):0.007540966):0.02306979,(((((Antaresia_childreni:0.02080121,Antaresia_stimsoni:0.01730966):0.02730232,Antaresia_perthensis:0.0794485):0.01868214,An [...]
+    TREE 45 = (((((Python_reticulatus:0.06082482,Python_timoriensis:0.05976824):0.07470233,(((((Antaresia_childreni:0.01703978,Antaresia_stimsoni:0.01702941):0.03308871,Antaresia_perthensis:0.08043923):0.01555501,Antaresia_maculosa:0.0884772):0.005314314,(Morelia_carinata:0.0651085,(Morelia_viridisN:0.04315993,Morelia_viridisS:0.05203646):0.02278244):0.01707531):0.02346517,(((Antaresia_melanocephalus:0.04749625,Antaresia_ramsayi:0.03669699):0.03379984,((Liasis_olivaceus:0.04033815,Apodor [...]
+    TREE 46 = (Candoia_aspera:0.5322171,((((Python_reticulatus:0.0535918,Python_timoriensis:0.06535871):0.06768871,(((((Antaresia_melanocephalus:0.03916935,Antaresia_ramsayi:0.0373195):0.04072262,(((Liasis_fuscus:0.01736701,Liasis_mackloti:0.01169367):0.0425045,Liasis_olivaceus:0.03651515):0.008463145,Apodora_papuana:0.06127901):0.01813412):0.006101433,(Liasis_albertisii:0.05168434,Bothrochilus_boa:0.05431427):0.03115496):0.005398566,(Morelia_boeleni:0.06280028,(((Morelia_tracyae:0.02379 [...]
+    TREE 47 = ((((((Morelia_boeleni:0.08532348,((Morelia_spilota:0.02557332,Morelia_bredli:0.02833054):0.03910548,(((Morelia_amethistina:0.0250698,((Morelia_nauta:0.008027407,Morelia_kinghorni:0.01428535):0.01094484,Morelia_clastolepis:0.00227683):0.01955651):0.005372779,Morelia_tracyae:0.03145023):0.03441583,Morelia_oenpelliensis:0.05781036):0.005803634):0.02195589):0.002986267,((Bothrochilus_boa:0.06447638,Liasis_albertisii:0.04012674):0.03942857,((Antaresia_ramsayi:0.04033451,Antaresi [...]
+    TREE 48 = (Candoia_aspera:0.4436023,(Loxocemus_bicolor:0.2369616,(((Python_curtus:0.09049056,(Python_molurus:0.029384,Python_sebae:0.0627384):0.01268501):0.01953619,Python_regius:0.1171373):0.04828605,((Python_timoriensis:0.06139592,Python_reticulatus:0.0646338):0.07305738,(((((Antaresia_stimsoni:0.02057256,Antaresia_childreni:0.02154838):0.03608791,Antaresia_perthensis:0.07564235):0.01173533,Antaresia_maculosa:0.07024714):0.01354829,(Morelia_carinata:0.0709676,(Morelia_viridisS:0.04 [...]
+    TREE 49 = ((Loxocemus_bicolor:0.3405555,(((((((Morelia_spilota:0.02807685,Morelia_bredli:0.02566795):0.03087109,(Morelia_oenpelliensis:0.06798181,(Morelia_tracyae:0.02098472,(Morelia_amethistina:0.02311013,((Morelia_nauta:0.004146555,Morelia_kinghorni:0.009369746):0.00429045,Morelia_clastolepis:0.008566892):0.01380615):0.006610653):0.03471999):0.003897041):0.01810336,((Morelia_carinata:0.06383306,(Morelia_viridisN:0.03476118,Morelia_viridisS:0.05680829):0.02523946):0.009249984,(Antar [...]
+    TREE 50 = (Candoia_aspera:0.365292,(Loxocemus_bicolor:0.310124,((((Python_sebae:0.08393153,Python_molurus:0.04102906):0.02198402,Python_curtus:0.08610296):0.01361938,Python_regius:0.1521334):0.04792291,((Python_timoriensis:0.08534907,Python_reticulatus:0.06216048):0.07485164,(((((((Morelia_amethistina:0.03515294,(Morelia_clastolepis:0.004999444,(Morelia_nauta:0.009645939,Morelia_kinghorni:0.008671095):0.0001768814):0.02966952):0.01516104,Morelia_tracyae:0.04027297):0.01631001,Morelia [...]
+    TREE 51 = (Candoia_aspera:0.5449368,((((Python_reticulatus:0.09099772,Python_timoriensis:0.07144241):0.05727305,(((Morelia_carinata:0.0577648,(Morelia_viridisN:0.03065865,Morelia_viridisS:0.04847735):0.02928353):0.008153621,(Antaresia_maculosa:0.05881195,(Antaresia_perthensis:0.06472734,(Antaresia_childreni:0.02932234,Antaresia_stimsoni:0.02067186):0.04013394):0.01273477):0.01701452):0.01169918,(((Antaresia_ramsayi:0.03842948,Antaresia_melanocephalus:0.03766409):0.0452689,((Liasis_ol [...]
+    TREE 52 = (Candoia_aspera:0.4119319,(((Python_regius:0.1152994,((Python_sebae:0.07552402,Python_molurus:0.03490164):0.01108603,Python_curtus:0.1145467):0.01901207):0.0443746,((((Morelia_carinata:0.06566802,(Morelia_viridisN:0.03596026,Morelia_viridisS:0.0471616):0.01708947):0.01026524,(Antaresia_maculosa:0.06025642,(Antaresia_perthensis:0.05098101,(Antaresia_childreni:0.01934081,Antaresia_stimsoni:0.0135667):0.03058171):0.009517906):0.01267591):0.008263889,((((Morelia_spilota:0.02344 [...]
+    TREE 53 = ((((Python_regius:0.1254009,(Python_curtus:0.08753147,(Python_molurus:0.04027961,Python_sebae:0.08576586):0.02140595):0.02317131):0.0390212,((((Morelia_boeleni:0.07962354,((Morelia_spilota:0.03086129,Morelia_bredli:0.03248024):0.03102577,((Morelia_tracyae:0.04475659,(Morelia_amethistina:0.03098438,((Morelia_kinghorni:0.008669885,Morelia_nauta:0.005987475):0.002591122,Morelia_clastolepis:0.002634122):0.01566736):0.01338949):0.02701571,Morelia_oenpelliensis:0.07358275):0.0088 [...]
+    TREE 54 = (Candoia_aspera:0.4297135,((((((Antaresia_maculosa:0.07003845,(Antaresia_perthensis:0.06743232,(Antaresia_childreni:0.02884627,Antaresia_stimsoni:0.02139588):0.03896306):0.01121203):0.008262347,((Morelia_viridisS:0.046964,Morelia_viridisN:0.03312958):0.03465966,Morelia_carinata:0.06247932):0.01828836):0.01619593,((((Apodora_papuana:0.04879782,((Liasis_mackloti:0.01053472,Liasis_fuscus:0.02365744):0.04023937,Liasis_olivaceus:0.03599876):0.01405198):0.01631438,(Antaresia_mela [...]
+    TREE 55 = (Candoia_aspera:0.4032337,(((((Python_molurus:0.03143687,Python_sebae:0.09823878):0.02140932,Python_curtus:0.09608987):0.01506344,Python_regius:0.1271778):0.05095627,((((((Morelia_oenpelliensis:0.0740013,((Morelia_amethistina:0.02689424,(Morelia_nauta:0.005721368,(Morelia_clastolepis:0.00501394,Morelia_kinghorni:0.007579701):0.008862685):0.01332026):0.002459013,Morelia_tracyae:0.03682387):0.0356683):0.005829372,(Morelia_spilota:0.02072617,Morelia_bredli:0.0303542):0.0297485 [...]
+    TREE 56 = ((((Python_curtus:0.09610909,(Python_sebae:0.0763935,Python_molurus:0.04010346):0.01665276):0.01579041,Python_regius:0.1201013):0.02228269,((((((Antaresia_stimsoni:0.009933382,Antaresia_childreni:0.02504422):0.03898344,Antaresia_perthensis:0.07441505):0.01655102,Antaresia_maculosa:0.06341264):0.00423628,(Morelia_carinata:0.06782785,(Morelia_viridisS:0.0406782,Morelia_viridisN:0.03474428):0.02682416):0.01516531):0.02205259,((Morelia_boeleni:0.07700987,((((Morelia_amethistina [...]
+    TREE 57 = (Candoia_aspera:0.4153505,(Loxocemus_bicolor:0.2791384,((Python_regius:0.1064145,(Python_curtus:0.1071383,(Python_sebae:0.07241633,Python_molurus:0.04887044):0.01224031):0.020618):0.04332308,((Python_reticulatus:0.06484751,Python_timoriensis:0.05405059):0.04751213,((((Bothrochilus_boa:0.05362835,Liasis_albertisii:0.04960141):0.0356816,((Antaresia_ramsayi:0.03268517,Antaresia_melanocephalus:0.03758129):0.03736965,((Liasis_olivaceus:0.04277857,Apodora_papuana:0.06016667):0.00 [...]
+    TREE 58 = (Candoia_aspera:0.4873354,(Loxocemus_bicolor:0.2332783,(((((((((Liasis_mackloti:0.007309739,Liasis_fuscus:0.0279446):0.05690776,Liasis_olivaceus:0.03166848):0.01372647,Apodora_papuana:0.0585181):0.01686857,(Antaresia_melanocephalus:0.04486908,Antaresia_ramsayi:0.03001735):0.04537131):0.007726529,(Bothrochilus_boa:0.08559648,Liasis_albertisii:0.06387931):0.0357606):0.007852339,Morelia_boeleni:0.08361377):0.0009829988,(((Antaresia_maculosa:0.07797792,(Antaresia_perthensis:0.0 [...]
+    TREE 59 = ((Candoia_aspera:0.4578877,(((Python_reticulatus:0.06169376,Python_timoriensis:0.08422366):0.05572204,(((Morelia_boeleni:0.0931297,((((Antaresia_childreni:0.02158899,Antaresia_stimsoni:0.01438034):0.03998282,Antaresia_perthensis:0.06970084):0.01535176,Antaresia_maculosa:0.07081544):0.01052213,(Morelia_carinata:0.0774876,(Morelia_viridisN:0.03236672,Morelia_viridisS:0.07599355):0.01839833):0.01302615):0.01439441):0.0008917252,((Liasis_albertisii:0.039823,Bothrochilus_boa:0.0 [...]
+    TREE 60 = ((Loxocemus_bicolor:0.2564752,Candoia_aspera:0.430636):0.02776712,(((Python_reticulatus:0.05389938,Python_timoriensis:0.07081924):0.05042149,((Bothrochilus_boa:0.0657108,Liasis_albertisii:0.04306774):0.03297476,((((Morelia_spilota:0.02325149,Morelia_bredli:0.02818583):0.02944659,(Morelia_oenpelliensis:0.05882524,(Morelia_amethistina:0.02618559,(((Morelia_clastolepis:0.00525444,Morelia_kinghorni:0.004066344):0.003764506,Morelia_nauta:0.005986931):0.00816707,Morelia_tracyae:0 [...]
+    TREE 61 = ((Loxocemus_bicolor:0.2390967,Candoia_aspera:0.4592643):0.04768498,((((((((Apodora_papuana:0.06974535,Liasis_olivaceus:0.03615791):0.01678672,(Liasis_fuscus:0.01981257,Liasis_mackloti:0.007387913):0.03607835):0.02837702,(Antaresia_melanocephalus:0.0413593,Antaresia_ramsayi:0.02633209):0.05602189):0.01375102,((Antaresia_maculosa:0.06634786,((Antaresia_childreni:0.02528178,Antaresia_stimsoni:0.01857345):0.03243086,Antaresia_perthensis:0.06599134):0.007734964):0.00491988,(More [...]
+    TREE 62 = ((Loxocemus_bicolor:0.3119484,((Python_regius:0.09866725,(Python_curtus:0.1245086,(Python_sebae:0.07169133,Python_molurus:0.03607564):0.01980528):0.02430393):0.02232077,((Python_reticulatus:0.0580492,Python_timoriensis:0.07219332):0.05736778,((((Bothrochilus_boa:0.06789342,Liasis_albertisii:0.0626714):0.0318012,((Antaresia_ramsayi:0.0316683,Antaresia_melanocephalus:0.04699017):0.05802338,(Apodora_papuana:0.06741561,(Liasis_olivaceus:0.03440653,(Liasis_fuscus:0.0262918,Liasi [...]
+    TREE 63 = ((((Python_regius:0.1045362,(Python_curtus:0.0815732,(Python_sebae:0.0731453,Python_molurus:0.04121527):0.01532189):0.01994899):0.03798915,((Python_timoriensis:0.06294615,Python_reticulatus:0.05094862):0.0574045,((Morelia_boeleni:0.08758336,(((((Liasis_mackloti:0.004929908,Liasis_fuscus:0.02028416):0.0462742,Liasis_olivaceus:0.03706543):0.01458087,Apodora_papuana:0.05878766):0.01585728,(Antaresia_ramsayi:0.01785121,Antaresia_melanocephalus:0.05271079):0.04691403):0.00569747 [...]
+    TREE 64 = (Candoia_aspera:0.5268895,((((Python_reticulatus:0.05913797,Python_timoriensis:0.06249719):0.05855848,(((((Antaresia_perthensis:0.06857628,(Antaresia_childreni:0.02184403,Antaresia_stimsoni:0.01644374):0.02731027):0.01049891,Antaresia_maculosa:0.06825385):0.0212855,(Morelia_carinata:0.05797942,(Morelia_viridisN:0.04222686,Morelia_viridisS:0.03983184):0.02372562):0.007988327):0.01161671,(Morelia_boeleni:0.08816773,((Bothrochilus_boa:0.0735602,Liasis_albertisii:0.05135359):0. [...]
+    TREE 65 = ((((((Python_molurus:0.05248859,Python_sebae:0.08445334):0.01912769,Python_curtus:0.1068431):0.01303875,Python_regius:0.1060619):0.04121669,((Python_reticulatus:0.06641269,Python_timoriensis:0.06510396):0.05472704,(((Apodora_papuana:0.06501856,((Liasis_fuscus:0.01938097,Liasis_mackloti:0.007993713):0.04233311,Liasis_olivaceus:0.04417499):0.007100968):0.01449947,(Antaresia_melanocephalus:0.03318174,Antaresia_ramsayi:0.02608021):0.04401241):0.02197335,(((((Morelia_spilota:0.0 [...]
+    TREE 66 = ((((((Python_molurus:0.04811833,Python_sebae:0.07004114):0.02196186,Python_curtus:0.1096454):0.01956063,Python_regius:0.1123615):0.03691369,(((((Liasis_albertisii:0.05370764,Bothrochilus_boa:0.06265907):0.03561458,((Apodora_papuana:0.07272776,((Liasis_fuscus:0.01370828,Liasis_mackloti:0.02309465):0.05741218,Liasis_olivaceus:0.04167728):0.01163812):0.01665479,(Antaresia_melanocephalus:0.0253543,Antaresia_ramsayi:0.03190032):0.05173058):0.0008003378):0.005472764,Morelia_boele [...]
+    TREE 67 = ((((((Python_sebae:0.06236479,Python_molurus:0.04213767):0.01777824,Python_curtus:0.1021113):0.01363883,Python_regius:0.1001127):0.01958544,((Python_timoriensis:0.04812869,Python_reticulatus:0.06212032):0.06951745,(((Morelia_boeleni:0.07786193,((Morelia_spilota:0.02928192,Morelia_bredli:0.01920753):0.02754254,((Morelia_amethistina:0.02519712,(Morelia_tracyae:0.03750275,((Morelia_kinghorni:0.0112295,Morelia_nauta:0.01662518):0.008346236,Morelia_clastolepis:0.005399784):0.010 [...]
+    TREE 68 = ((Loxocemus_bicolor:0.3421064,((Python_regius:0.1115622,(Python_curtus:0.09826584,(Python_sebae:0.07191922,Python_molurus:0.04064791):0.01578384):0.01438764):0.04523101,((Python_timoriensis:0.06358136,Python_reticulatus:0.05833856):0.06307472,((((Morelia_carinata:0.05734906,(Morelia_viridisS:0.04318783,Morelia_viridisN:0.04583721):0.01821118):0.01545327,((Antaresia_perthensis:0.06296287,(Antaresia_stimsoni:0.01706827,Antaresia_childreni:0.02104141):0.03055942):0.01182927,An [...]
+    TREE 69 = ((Candoia_aspera:0.4450836,Loxocemus_bicolor:0.3174019):0.02447175,((Python_regius:0.1424483,((Python_molurus:0.03302827,Python_sebae:0.07146083):0.01905547,Python_curtus:0.1159114):0.0381509):0.04241104,(((((((Antaresia_childreni:0.01890977,Antaresia_stimsoni:0.01517946):0.035416,Antaresia_perthensis:0.06857496):0.01161928,Antaresia_maculosa:0.07220173):0.007211953,(Morelia_carinata:0.0642391,(Morelia_viridisN:0.03726853,Morelia_viridisS:0.05549287):0.01029716):0.0189687): [...]
+    TREE 70 = ((Loxocemus_bicolor:0.2857882,(((Python_curtus:0.09804145,(Python_molurus:0.04031657,Python_sebae:0.07843305):0.02059991):0.02529989,Python_regius:0.117949):0.04936197,((((((Morelia_oenpelliensis:0.06557468,(Morelia_tracyae:0.04528187,((Morelia_clastolepis:0.004341288,(Morelia_kinghorni:0.01006468,Morelia_nauta:0.004045206):0.004556852):0.01467889,Morelia_amethistina:0.02757685):0.005371513):0.05121582):0.003292884,(Morelia_spilota:0.01948951,Morelia_bredli:0.03155399):0.03 [...]
+    TREE 71 = (Candoia_aspera:0.4717339,(((((((Bothrochilus_boa:0.05883323,Liasis_albertisii:0.05456283):0.02271329,((Apodora_papuana:0.05998257,(Liasis_olivaceus:0.02913736,(Liasis_fuscus:0.01732647,Liasis_mackloti:0.01674711):0.04874046):0.008143937):0.01887131,(Antaresia_ramsayi:0.04183237,Antaresia_melanocephalus:0.04076158):0.04427686):0.009855444):0.01066004,(((Morelia_oenpelliensis:0.04305822,((Morelia_clastolepis:0.002836661,(Morelia_nauta:0.01515712,Morelia_kinghorni:0.004505663 [...]
+    TREE 72 = (Candoia_aspera:0.4905738,(Loxocemus_bicolor:0.2519591,(((Python_timoriensis:0.07404226,Python_reticulatus:0.05474481):0.06644049,(((Morelia_boeleni:0.09095004,((Morelia_bredli:0.02959755,Morelia_spilota:0.0353426):0.02959985,(((Morelia_amethistina:0.02093452,(Morelia_nauta:0.004317592,(Morelia_kinghorni:0.009871938,Morelia_clastolepis:0.0136011):0.006546919):0.03087369):0.01212434,Morelia_tracyae:0.03055861):0.05770397,Morelia_oenpelliensis:0.07079792):0.003571392):0.03535 [...]
+    TREE 73 = (Candoia_aspera:0.4598036,(((((((Morelia_spilota:0.01982865,Morelia_bredli:0.02966733):0.03884227,((Morelia_tracyae:0.03055964,(((Morelia_kinghorni:0.004122602,Morelia_clastolepis:0.006418796):0.006158391,Morelia_nauta:0.00751089):0.02200455,Morelia_amethistina:0.02131889):0.01090798):0.01356813,Morelia_oenpelliensis:0.05887202):0.01445794):0.02558764,((((Antaresia_ramsayi:0.02479341,Antaresia_melanocephalus:0.04232038):0.04453974,(Apodora_papuana:0.05413614,((Liasis_fuscus [...]
+    TREE 74 = (((Python_regius:0.122777,((Python_molurus:0.03861148,Python_sebae:0.08854008):0.01836058,Python_curtus:0.1087409):0.02868272):0.03219403,((((((Morelia_oenpelliensis:0.0601058,((Morelia_amethistina:0.0244574,Morelia_tracyae:0.0397486):0.0001275072,(Morelia_nauta:0.004788438,(Morelia_clastolepis:0.005838887,Morelia_kinghorni:0.01017217):0.001899022):0.01904309):0.04300782):0.00633599,(Morelia_spilota:0.02559737,Morelia_bredli:0.02940281):0.02435702):0.0246874,((Antaresia_mac [...]
+    TREE 75 = (((Python_regius:0.1185489,(Python_curtus:0.08720485,(Python_molurus:0.0435094,Python_sebae:0.08370481):0.01269984):0.0270439):0.05088809,((((Liasis_albertisii:0.05173274,Bothrochilus_boa:0.05183286):0.05072088,(((Antaresia_melanocephalus:0.03656163,Antaresia_ramsayi:0.03155388):0.03835047,(((Liasis_fuscus:0.01454665,Liasis_mackloti:0.01162582):0.0407249,Liasis_olivaceus:0.03344422):0.01447517,Apodora_papuana:0.06149126):0.01772949):0.01052499,((((Antaresia_childreni:0.0190 [...]
+    TREE 76 = ((Loxocemus_bicolor:0.289421,(((Python_curtus:0.1034425,(Python_molurus:0.03838318,Python_sebae:0.0657545):0.005487207):0.01794188,Python_regius:0.1158322):0.02708483,((((((Apodora_papuana:0.05140632,(Liasis_olivaceus:0.03483575,(Liasis_fuscus:0.0167882,Liasis_mackloti:0.01303193):0.0447589):0.007661923):0.0181732,(Antaresia_melanocephalus:0.03623783,Antaresia_ramsayi:0.02679624):0.04371616):0.001420114,(Liasis_albertisii:0.05907394,Bothrochilus_boa:0.06117998):0.04004438): [...]
+    TREE 77 = ((Loxocemus_bicolor:0.3190314,(((((Morelia_boeleni:0.07978362,((Morelia_spilota:0.02172516,Morelia_bredli:0.026232):0.03155658,((Morelia_amethistina:0.02728021,(Morelia_tracyae:0.05859969,((Morelia_clastolepis:0.004245739,Morelia_kinghorni:0.007346214):0.006021728,Morelia_nauta:0.01110223):0.0109052):0.00218281):0.02807543,Morelia_oenpelliensis:0.05684937):0.006062989):0.01907406):0.00915462,(Liasis_albertisii:0.03894934,Bothrochilus_boa:0.0623029):0.0322232):0.002144932,(( [...]
+    TREE 78 = ((Loxocemus_bicolor:0.2229325,Candoia_aspera:0.4421281):0.03062222,(((((((Morelia_bredli:0.03399205,Morelia_spilota:0.02517605):0.02337728,(((Morelia_amethistina:0.01529103,(Morelia_clastolepis:0.006187776,(Morelia_kinghorni:0.01337353,Morelia_nauta:0.005734063):0.005374756):0.02460788):0.001540486,Morelia_tracyae:0.03329582):0.0333974,Morelia_oenpelliensis:0.07525567):0.00578434):0.02385279,Morelia_boeleni:0.08113851):0.006018764,(((Antaresia_melanocephalus:0.04012,Antares [...]
+    TREE 79 = ((Loxocemus_bicolor:0.2068851,Candoia_aspera:0.4157484):0.06394592,((Python_regius:0.1119306,((Python_molurus:0.02507726,Python_sebae:0.06768835):0.01273502,Python_curtus:0.09793023):0.02951751):0.03910106,(((((Morelia_spilota:0.02664926,Morelia_bredli:0.03702698):0.02468232,(Morelia_oenpelliensis:0.05764724,((Morelia_amethistina:0.02414779,(Morelia_clastolepis:0.005833709,(Morelia_nauta:0.007372257,Morelia_kinghorni:0.0086475):0.00620881):0.01279634):0.008251266,Morelia_tr [...]
+    TREE 80 = (Candoia_aspera:0.391632,(Loxocemus_bicolor:0.2646089,(((Python_reticulatus:0.05737131,Python_timoriensis:0.06872314):0.05957824,(((((Antaresia_childreni:0.02040491,Antaresia_stimsoni:0.01350101):0.03175717,Antaresia_perthensis:0.06307919):0.01255095,Antaresia_maculosa:0.07096548):0.01294142,(Morelia_carinata:0.0554948,(Morelia_viridisN:0.03206945,Morelia_viridisS:0.04474466):0.02886815):0.008750372):0.01658624,(((Bothrochilus_boa:0.05867584,Liasis_albertisii:0.04518102):0. [...]
+    TREE 81 = (Candoia_aspera:0.3679055,((((Python_curtus:0.09829191,(Python_molurus:0.03905549,Python_sebae:0.08446096):0.01455288):0.02320233,Python_regius:0.1102946):0.05339913,(((((Antaresia_perthensis:0.06928507,(Antaresia_childreni:0.03003637,Antaresia_stimsoni:0.01953789):0.03258011):0.0164155,Antaresia_maculosa:0.06494583):0.007981955,(Morelia_carinata:0.06003471,(Morelia_viridisN:0.02537246,Morelia_viridisS:0.04879795):0.01950855):0.01576949):0.0175171,((((Morelia_spilota:0.0290 [...]
+    TREE 82 = ((Loxocemus_bicolor:0.2390659,Candoia_aspera:0.3844317):0.05418046,((((Python_molurus:0.04026471,Python_sebae:0.0644374):0.01042403,Python_curtus:0.1225831):0.02166972,Python_regius:0.09710642):0.03811794,(((((Antaresia_perthensis:0.06993962,(Antaresia_childreni:0.01769138,Antaresia_stimsoni:0.01418347):0.03850968):0.01233473,Antaresia_maculosa:0.06562401):0.002737597,(Morelia_carinata:0.05781147,(Morelia_viridisN:0.04477895,Morelia_viridisS:0.06053305):0.01641598):0.019626 [...]
+    TREE 83 = ((Loxocemus_bicolor:0.2036892,Candoia_aspera:0.4028089):0.04007441,(((((Morelia_boeleni:0.06613699,((((((Morelia_kinghorni:0.008681592,Morelia_clastolepis:0.0096482):0.006886978,Morelia_nauta:0.004116863):0.007086856,Morelia_amethistina:0.03765005):0.007846691,Morelia_tracyae:0.05190823):0.02842286,Morelia_oenpelliensis:0.05083896):0.006002964,(Morelia_spilota:0.03349041,Morelia_bredli:0.03408026):0.02509088):0.01676837):0.01044434,(Bothrochilus_boa:0.05595351,Liasis_albert [...]
+    TREE 84 = ((((Python_curtus:0.1223827,(Python_sebae:0.06621626,Python_molurus:0.03832782):0.01086444):0.02337286,Python_regius:0.124018):0.02184441,((Python_timoriensis:0.08372909,Python_reticulatus:0.04903101):0.06903388,(Morelia_boeleni:0.0884944,((((Morelia_bredli:0.0312634,Morelia_spilota:0.02388251):0.02731682,((Morelia_amethistina:0.02294787,(((Morelia_nauta:0.007833751,Morelia_kinghorni:0.008007233):0.005648212,Morelia_clastolepis:0.009921374):0.01119791,Morelia_tracyae:0.0413 [...]
+    TREE 85 = ((Loxocemus_bicolor:0.324078,(((Python_curtus:0.09449341,(Python_molurus:0.03660124,Python_sebae:0.08110901):0.02108984):0.01929663,Python_regius:0.1138374):0.07513523,((Python_timoriensis:0.05849644,Python_reticulatus:0.07499078):0.05284259,((((Antaresia_ramsayi:0.02524694,Antaresia_melanocephalus:0.04279777):0.05075451,(Apodora_papuana:0.05920609,(Liasis_olivaceus:0.03153405,(Liasis_mackloti:0.01327589,Liasis_fuscus:0.01920955):0.04526949):0.01172603):0.01473238):0.006227 [...]
+    TREE 86 = (Candoia_aspera:0.4640829,(((((Python_molurus:0.03415465,Python_sebae:0.07069288):0.01969121,Python_curtus:0.08093054):0.006925213,Python_regius:0.1301421):0.02361257,(((((Morelia_boeleni:0.08796563,((Morelia_oenpelliensis:0.06088415,((((Morelia_kinghorni:0.007620532,Morelia_nauta:0.009314096):0.008601672,Morelia_clastolepis:0.002071879):0.01426838,Morelia_amethistina:0.03194737):0.00157039,Morelia_tracyae:0.04071359):0.03416296):0.003333199,(Morelia_spilota:0.03133059,More [...]
+    TREE 87 = (((((Python_sebae:0.05806749,Python_molurus:0.04863067):0.01977878,Python_curtus:0.08771888):0.02114992,Python_regius:0.1133869):0.03790215,((((Morelia_boeleni:0.0781837,(((Apodora_papuana:0.05732655,(Liasis_olivaceus:0.04020414,(Liasis_mackloti:0.009338819,Liasis_fuscus:0.02041633):0.04082824):0.008338371):0.01673676,(Antaresia_melanocephalus:0.03542311,Antaresia_ramsayi:0.03377948):0.04833716):0.01207703,(Bothrochilus_boa:0.05472008,Liasis_albertisii:0.04752972):0.0298097 [...]
+    TREE 88 = (Candoia_aspera:0.5266748,(Loxocemus_bicolor:0.2863724,(((Python_curtus:0.1113482,(Python_molurus:0.0369985,Python_sebae:0.07977591):0.0133193):0.02088768,Python_regius:0.109057):0.03589019,((Python_timoriensis:0.06994488,Python_reticulatus:0.06733911):0.05117117,(((((Antaresia_perthensis:0.07476111,(Antaresia_stimsoni:0.0102694,Antaresia_childreni:0.02539743):0.04680466):0.007219799,Antaresia_maculosa:0.07683412):0.005258364,((Morelia_viridisS:0.03428402,Morelia_viridisN:0 [...]
+    TREE 89 = ((((Python_timoriensis:0.06707148,Python_reticulatus:0.06506125):0.04355094,(((Morelia_boeleni:0.08020637,((((Morelia_amethistina:0.02208421,((Morelia_clastolepis:0.006524679,Morelia_kinghorni:0.009968198):0.005432657,Morelia_nauta:0.004671117):0.0219751):0.005202818,Morelia_tracyae:0.04945317):0.0290873,Morelia_oenpelliensis:0.06477172):0.00946036,(Morelia_bredli:0.02027949,Morelia_spilota:0.02893624):0.02610896):0.01580249):0.01046464,((Bothrochilus_boa:0.06404848,Liasis_ [...]
+    TREE 90 = ((Loxocemus_bicolor:0.2342189,((Python_regius:0.125989,((Python_molurus:0.03747382,Python_sebae:0.08511339):0.01629249,Python_curtus:0.08855054):0.01128598):0.03744629,((Python_timoriensis:0.06794617,Python_reticulatus:0.06178539):0.04416913,((((Bothrochilus_boa:0.05820688,Liasis_albertisii:0.05675853):0.02458397,(((Liasis_olivaceus:0.03059831,(Liasis_mackloti:0.008783671,Liasis_fuscus:0.01852068):0.04863702):0.01346526,Apodora_papuana:0.06713094):0.01489775,(Antaresia_rams [...]
+    TREE 91 = ((((Python_regius:0.1516846,((Python_sebae:0.07523123,Python_molurus:0.04721355):0.02365172,Python_curtus:0.09479072):0.03676268):0.04198977,((Python_reticulatus:0.07135687,Python_timoriensis:0.05190384):0.06425761,(((((Morelia_spilota:0.02512919,Morelia_bredli:0.02644278):0.03247482,(Morelia_oenpelliensis:0.0642112,((Morelia_tracyae:0.03612823,Morelia_amethistina:0.03026808):0.001308547,(Morelia_clastolepis:0.003069521,(Morelia_nauta:0.01104297,Morelia_kinghorni:0.00833590 [...]
+    TREE 92 = (Candoia_aspera:0.4547292,(Loxocemus_bicolor:0.3272833,(((((((((Morelia_amethistina:0.02060874,((Morelia_nauta:0.005561469,Morelia_kinghorni:0.008690786):0.005051559,Morelia_clastolepis:0.007840901):0.01022098):0.0133147,Morelia_tracyae:0.05010387):0.02268618,Morelia_oenpelliensis:0.06417942):0.005575328,(Morelia_spilota:0.02642943,Morelia_bredli:0.02261605):0.03579255):0.01411255,Morelia_boeleni:0.07721844):0.0007152459,((Liasis_albertisii:0.04806457,Bothrochilus_boa:0.057 [...]
+    TREE 93 = (Candoia_aspera:0.5254491,(((((Python_sebae:0.07638384,Python_molurus:0.03664319):0.01567718,Python_curtus:0.1210042):0.01515347,Python_regius:0.1274836):0.04894325,((((Antaresia_maculosa:0.05457504,(Antaresia_perthensis:0.0810693,(Antaresia_childreni:0.02907495,Antaresia_stimsoni:0.0118365):0.02838937):0.01649198):0.006525643,(Morelia_carinata:0.05356569,(Morelia_viridisS:0.04860563,Morelia_viridisN:0.0308708):0.02143981):0.01601209):0.01907801,((((((Liasis_mackloti:0.0120 [...]
+    TREE 94 = (Candoia_aspera:0.4325058,(Loxocemus_bicolor:0.29547,((((Python_sebae:0.07775931,Python_molurus:0.03063228):0.01949916,Python_curtus:0.09782399):0.01065466,Python_regius:0.1565605):0.04138757,(((Morelia_boeleni:0.1021874,((Morelia_bredli:0.03767743,Morelia_spilota:0.03995434):0.02511893,(Morelia_oenpelliensis:0.06688421,((Morelia_tracyae:0.04224919,((Morelia_nauta:0.007632277,Morelia_kinghorni:0.007195393):0.0051102,Morelia_clastolepis:0.006891541):0.01861817):0.001031537,M [...]
+    TREE 95 = (Candoia_aspera:0.4667417,(Loxocemus_bicolor:0.2636684,((((Python_sebae:0.06724253,Python_molurus:0.04279827):0.0137619,Python_curtus:0.1224426):0.01620571,Python_regius:0.105882):0.05012959,((((((Morelia_bredli:0.02989658,Morelia_spilota:0.02336917):0.03265544,(((Morelia_amethistina:0.02684736,((Morelia_kinghorni:0.009531203,Morelia_nauta:0.005903659):0.003909769,Morelia_clastolepis:0.004228678):0.006278146):0.01098578,Morelia_tracyae:0.04320166):0.03115355,Morelia_oenpell [...]
+    TREE 96 = (((((((Liasis_albertisii:0.05597893,Bothrochilus_boa:0.06708494):0.02136291,(((Morelia_oenpelliensis:0.06692067,((((Morelia_nauta:0.006490195,Morelia_kinghorni:0.008862895):0.002589311,Morelia_clastolepis:0.006537181):0.0124676,Morelia_amethistina:0.02783844):0.007327408,Morelia_tracyae:0.04129567):0.04256243):0.00284383,(Morelia_spilota:0.02580417,Morelia_bredli:0.04117144):0.02719414):0.02677905,Morelia_boeleni:0.08508827):0.004503982):0.001474833,((((Apodora_papuana:0.06 [...]
+    TREE 97 = ((Loxocemus_bicolor:0.2661032,(((((Morelia_boeleni:0.09289861,((Bothrochilus_boa:0.06138615,Liasis_albertisii:0.05013821):0.03171201,(((Liasis_olivaceus:0.02512497,(Liasis_fuscus:0.02569574,Liasis_mackloti:0.006837142):0.04839449):0.009461557,Apodora_papuana:0.07488417):0.02025706,(Antaresia_ramsayi:0.02520289,Antaresia_melanocephalus:0.04226063):0.04379259):0.004359125):0.003586254):0.002317637,((Morelia_spilota:0.02530919,Morelia_bredli:0.02428088):0.02863846,((Morelia_am [...]
+    TREE 98 = (Candoia_aspera:0.4338458,(Loxocemus_bicolor:0.2634376,(((Python_curtus:0.1105749,(Python_sebae:0.09556636,Python_molurus:0.03273635):0.01150569):0.008801576,Python_regius:0.1625759):0.03722187,(((((Morelia_viridisN:0.04247342,Morelia_viridisS:0.03651036):0.03260268,Morelia_carinata:0.07507731):0.00841834,(Antaresia_maculosa:0.0826,(Antaresia_perthensis:0.08604039,(Antaresia_childreni:0.03150816,Antaresia_stimsoni:0.01530298):0.03460219):0.01527724):0.01680449):0.01551863,( [...]
+    TREE 99 = ((((((Morelia_boeleni:0.07341909,((Morelia_spilota:0.02253496,Morelia_bredli:0.02604728):0.03621784,(Morelia_oenpelliensis:0.07207704,(Morelia_tracyae:0.04248111,(((Morelia_kinghorni:0.007760683,Morelia_nauta:0.0199143):0.002944562,Morelia_clastolepis:0.003695029):0.0118863,Morelia_amethistina:0.02630623):0.01516716):0.02096247):0.00987943):0.0225472):0.006581086,(((Apodora_papuana:0.05859656,(Liasis_olivaceus:0.03719666,(Liasis_fuscus:0.01950437,Liasis_mackloti:0.007994642 [...]
+    TREE 100 = (Candoia_aspera:0.4341243,(Loxocemus_bicolor:0.2408998,(((Python_curtus:0.09611847,(Python_sebae:0.1025681,Python_molurus:0.02687551):0.03656667):0.02051083,Python_regius:0.1336665):0.05131532,((Python_timoriensis:0.06018468,Python_reticulatus:0.06436806):0.06278732,(((((Morelia_oenpelliensis:0.06656884,(Morelia_tracyae:0.02729484,(((Morelia_nauta:0.009314223,Morelia_kinghorni:0.01130303):0.003158153,Morelia_clastolepis:0.008433545):0.0182326,Morelia_amethistina:0.02933323 [...]
+    TREE 101 = (Candoia_aspera:0.4509276,((((((Morelia_boeleni:0.07145028,((Morelia_bredli:0.02447092,Morelia_spilota:0.02234732):0.03985019,(Morelia_oenpelliensis:0.06013741,((((Morelia_kinghorni:0.006472318,Morelia_nauta:0.003261386):0.003279182,Morelia_clastolepis:0.003404097):0.01056659,Morelia_tracyae:0.05587622):0.006968234,Morelia_amethistina:0.02026666):0.03641767):0.005075176):0.03110211):0.01046361,((Bothrochilus_boa:0.06200234,Liasis_albertisii:0.05664673):0.04321532,((Apodora [...]
+    TREE 11 = ((((((Morelia_boeleni:0.105931,((Bothrochilus_boa:0.05131693,Liasis_albertisii:0.04545252):0.03797298,((Antaresia_ramsayi:0.02228446,Antaresia_melanocephalus:0.03885661):0.05087393,((Liasis_olivaceus:0.0281452,(Liasis_fuscus:0.02249635,Liasis_mackloti:0.01251578):0.04465905):0.006607861,Apodora_papuana:0.05587392):0.008114536):0.01409248):0.002334905):0.005272702,((((Morelia_viridisS:0.03675911,Morelia_viridisN:0.02198791):0.0215533,Morelia_carinata:0.06453783):0.005055946, [...]
+    TREE 12 = ((Loxocemus_bicolor:0.2458238,((Python_regius:0.1106511,(Python_curtus:0.1001811,(Python_molurus:0.04448544,Python_sebae:0.07099682):0.008173695):0.02536996):0.04260848,((Python_timoriensis:0.06684263,Python_reticulatus:0.03900576):0.06959,(((Morelia_carinata:0.05905979,(Morelia_viridisS:0.04271627,Morelia_viridisN:0.03918736):0.04180082):0.00903691,((Antaresia_perthensis:0.06119587,(Antaresia_stimsoni:0.01571635,Antaresia_childreni:0.02290186):0.04092202):0.003749944,Antar [...]
+    TREE 13 = ((((Python_regius:0.125521,(Python_curtus:0.1073245,(Python_molurus:0.03657539,Python_sebae:0.06864878):0.03002534):0.01433173):0.04825549,((((Morelia_carinata:0.05949514,(Morelia_viridisS:0.04806101,Morelia_viridisN:0.02823277):0.04405801):0.01385116,(((Antaresia_stimsoni:0.01413133,Antaresia_childreni:0.02703797):0.02343367,Antaresia_perthensis:0.07249178):0.008990339,Antaresia_maculosa:0.06567229):0.01366012):0.01659183,(((Bothrochilus_boa:0.05828401,Liasis_albertisii:0. [...]
+    TREE 14 = ((Loxocemus_bicolor:0.2311109,((((((Morelia_oenpelliensis:0.07476196,(Morelia_tracyae:0.04489038,(((Morelia_kinghorni:0.008629219,Morelia_nauta:0.01117212):0.001642294,Morelia_clastolepis:0.005837899):0.005027789,Morelia_amethistina:0.02727465):0.007558527):0.03979655):0.004991851,(Morelia_spilota:0.03343332,Morelia_bredli:0.03267572):0.03095787):0.03054975,((Bothrochilus_boa:0.07451251,Liasis_albertisii:0.05084842):0.02736059,Morelia_boeleni:0.08948575):0.003187523):0.0038 [...]
+    TREE 15 = ((Loxocemus_bicolor:0.2896212,((Python_regius:0.09775786,(Python_curtus:0.09624462,(Python_sebae:0.07141944,Python_molurus:0.03844634):0.01642442):0.01540055):0.03715156,((Python_timoriensis:0.07277793,Python_reticulatus:0.0524129):0.08165967,(Morelia_boeleni:0.08817638,((((Morelia_carinata:0.06965416,(Morelia_viridisN:0.04107285,Morelia_viridisS:0.05026227):0.02040291):0.01957017,(((Antaresia_childreni:0.02846544,Antaresia_stimsoni:0.01113197):0.05445025,Antaresia_perthens [...]
+    TREE 16 = ((Loxocemus_bicolor:0.2332114,((((Python_sebae:0.08310605,Python_molurus:0.03309147):0.02673089,Python_curtus:0.09236202):0.01485251,Python_regius:0.09270653):0.04843605,((Python_timoriensis:0.06307732,Python_reticulatus:0.05258899):0.06886643,((((Morelia_viridisS:0.0484855,Morelia_viridisN:0.03151694):0.02682321,Morelia_carinata:0.06154282):0.01581032,(Antaresia_maculosa:0.07283047,(Antaresia_perthensis:0.07071279,(Antaresia_childreni:0.01633722,Antaresia_stimsoni:0.018473 [...]
+    TREE 17 = (Candoia_aspera:0.4823539,(Loxocemus_bicolor:0.2773091,((Python_regius:0.1146737,(Python_curtus:0.1202873,(Python_sebae:0.07350455,Python_molurus:0.04267794):0.01202121):0.02153016):0.04880882,((Python_timoriensis:0.06966236,Python_reticulatus:0.06563854):0.04079652,(((Antaresia_maculosa:0.06757648,((Antaresia_childreni:0.01839516,Antaresia_stimsoni:0.01265365):0.04068759,Antaresia_perthensis:0.07210109):0.007794484):0.009137391,(Morelia_carinata:0.08291748,(Morelia_viridis [...]
+    TREE 18 = ((((Python_timoriensis:0.07692143,Python_reticulatus:0.06844929):0.06878282,(((Morelia_carinata:0.06970537,(Morelia_viridisS:0.04785833,Morelia_viridisN:0.03674054):0.02311062):0.01471066,(((Antaresia_childreni:0.02932162,Antaresia_stimsoni:0.0193905):0.03769673,Antaresia_perthensis:0.07640949):0.01295086,Antaresia_maculosa:0.07700848):0.009436204):0.01981386,((((((Morelia_amethistina:0.03582876,((Morelia_nauta:0.00598038,Morelia_kinghorni:0.009249085):0.006765277,Morelia_c [...]
+    TREE 19 = (((((Python_timoriensis:0.07824341,Python_reticulatus:0.06773204):0.05436504,((((((Liasis_olivaceus:0.04213837,Apodora_papuana:0.06076422):0.006128772,(Liasis_fuscus:0.01053002,Liasis_mackloti:0.01818946):0.04563388):0.02132318,(Antaresia_ramsayi:0.03897168,Antaresia_melanocephalus:0.03769482):0.04549028):0.01191707,((Morelia_carinata:0.07412641,(Morelia_viridisS:0.06176765,Morelia_viridisN:0.02758298):0.02351254):0.0115188,(((Antaresia_childreni:0.02294752,Antaresia_stimso [...]
+    TREE 20 = ((Loxocemus_bicolor:0.3315088,(((Python_timoriensis:0.08265575,Python_reticulatus:0.04842836):0.06637015,((((Bothrochilus_boa:0.06045392,Liasis_albertisii:0.05932411):0.03647307,Morelia_boeleni:0.09952603):0.002094248,(((((Antaresia_childreni:0.02364124,Antaresia_stimsoni:0.01538722):0.03483375,Antaresia_perthensis:0.07842851):0.01029187,Antaresia_maculosa:0.05794009):0.01418545,(Morelia_carinata:0.06118829,(Morelia_viridisS:0.05108068,Morelia_viridisN:0.03327827):0.0295365 [...]
+    TREE 21 = (Candoia_aspera:0.3879355,(Loxocemus_bicolor:0.2635277,(((Python_timoriensis:0.07488745,Python_reticulatus:0.07165347):0.06286752,(((Bothrochilus_boa:0.05066114,Liasis_albertisii:0.04631989):0.03495751,((Apodora_papuana:0.06336035,((Liasis_fuscus:0.02679166,Liasis_mackloti:0.01508707):0.04512154,Liasis_olivaceus:0.03170831):0.02130021):0.01225882,(Antaresia_ramsayi:0.02715579,Antaresia_melanocephalus:0.03692815):0.05242907):0.006242959):0.001901093,((((Morelia_bredli:0.0287 [...]
+    TREE 22 = ((Loxocemus_bicolor:0.3484046,((Python_regius:0.1299487,(Python_curtus:0.08751524,(Python_sebae:0.07268371,Python_molurus:0.04254277):0.02008602):0.01126513):0.04984823,(((((Liasis_albertisii:0.04502909,Bothrochilus_boa:0.05899998):0.0354086,((Apodora_papuana:0.05684881,((Liasis_mackloti:0.01544956,Liasis_fuscus:0.02130693):0.04872514,Liasis_olivaceus:0.03650952):0.005483124):0.01448147,(Antaresia_ramsayi:0.02598716,Antaresia_melanocephalus:0.03041967):0.0519068):0.00492741 [...]
+    TREE 23 = ((Loxocemus_bicolor:0.3065548,(((((((Antaresia_ramsayi:0.03385554,Antaresia_melanocephalus:0.03199838):0.0479804,((Liasis_olivaceus:0.03510994,(Liasis_mackloti:0.005883193,Liasis_fuscus:0.02171762):0.05961237):0.01021559,Apodora_papuana:0.0559756):0.01603474):0.01175948,(Bothrochilus_boa:0.06437692,Liasis_albertisii:0.03322093):0.04863423):0.003630452,((Antaresia_maculosa:0.06591439,((Antaresia_stimsoni:0.02082665,Antaresia_childreni:0.02123506):0.05057495,Antaresia_perthen [...]
+    TREE 24 = (Candoia_aspera:0.4485051,(Loxocemus_bicolor:0.2753823,(((Morelia_boeleni:0.07826681,((((Antaresia_ramsayi:0.02174183,Antaresia_melanocephalus:0.0487837):0.05696255,((Liasis_olivaceus:0.03226077,Apodora_papuana:0.05015353):0.002679578,(Liasis_fuscus:0.02321992,Liasis_mackloti:0.01623561):0.05603043):0.01825385):0.01154037,(Bothrochilus_boa:0.0626663,Liasis_albertisii:0.04735238):0.03097784):0.007929447,((((Morelia_viridisS:0.04523728,Morelia_viridisN:0.02992506):0.02415625, [...]
+    TREE 25 = (Candoia_aspera:0.4007296,(Loxocemus_bicolor:0.3023659,((((Liasis_albertisii:0.04484209,Bothrochilus_boa:0.05694912):0.03509212,((((((Antaresia_childreni:0.02381686,Antaresia_stimsoni:0.01965734):0.04160441,Antaresia_perthensis:0.06501918):0.01110509,Antaresia_maculosa:0.06540036):0.01027523,((Morelia_viridisN:0.02966337,Morelia_viridisS:0.04604234):0.01806969,Morelia_carinata:0.06961854):0.008454373):0.01388917,((Antaresia_ramsayi:0.03352314,Antaresia_melanocephalus:0.0431 [...]
+    TREE 26 = (((((Python_timoriensis:0.06449001,Python_reticulatus:0.05702399):0.0703105,(((((Antaresia_childreni:0.02325914,Antaresia_stimsoni:0.01170238):0.04134304,Antaresia_perthensis:0.05331392):0.01818774,Antaresia_maculosa:0.09053385):0.0108276,((Morelia_viridisN:0.03599152,Morelia_viridisS:0.04161514):0.02303153,Morelia_carinata:0.07374027):0.00817112):0.02066169,((((Morelia_oenpelliensis:0.05305869,(Morelia_amethistina:0.03233761,(((Morelia_kinghorni:0.005085139,Morelia_clastol [...]
+    TREE 27 = (Candoia_aspera:0.5072671,(Loxocemus_bicolor:0.2771276,(((Python_timoriensis:0.06508521,Python_reticulatus:0.06118689):0.0753944,(Morelia_boeleni:0.06473072,(((Morelia_oenpelliensis:0.06621928,(((Morelia_clastolepis:0.001220996,(Morelia_nauta:0.005497285,Morelia_kinghorni:0.005154365):0.0134602):0.01821363,Morelia_tracyae:0.04629116):0.01280376,Morelia_amethistina:0.01395589):0.05991748):0.002735354,(Morelia_bredli:0.03028891,Morelia_spilota:0.03007576):0.02473189):0.021103 [...]
+    TREE 28 = ((Loxocemus_bicolor:0.2255088,(((Python_timoriensis:0.07514369,Python_reticulatus:0.0635069):0.05347764,(((Bothrochilus_boa:0.06150959,Liasis_albertisii:0.06228012):0.02139156,((Antaresia_ramsayi:0.03413077,Antaresia_melanocephalus:0.0425458):0.05410898,((Liasis_mackloti:0.01188359,Liasis_fuscus:0.02223782):0.0468034,(Liasis_olivaceus:0.03498516,Apodora_papuana:0.06292781):0.01013783):0.01420626):0.008876288):0.003198402,((((Morelia_spilota:0.02490744,Morelia_bredli:0.02845 [...]
+    TREE 29 = (Candoia_aspera:0.4094712,(Loxocemus_bicolor:0.2688637,(((Python_timoriensis:0.06294914,Python_reticulatus:0.04985823):0.06440863,(((((Liasis_mackloti:0.01262452,Liasis_fuscus:0.0218546):0.04041808,(Apodora_papuana:0.07252098,Liasis_olivaceus:0.03194157):0.01107863):0.02129059,(Antaresia_ramsayi:0.03048903,Antaresia_melanocephalus:0.04901979):0.03837136):0.007696275,((Morelia_boeleni:0.06125945,((Morelia_oenpelliensis:0.06644476,((Morelia_tracyae:0.03592499,((Morelia_kingho [...]
+    TREE 30 = ((((Python_regius:0.1118698,(Python_curtus:0.1179117,(Python_molurus:0.0405954,Python_sebae:0.08683961):0.01242979):0.02079802):0.03544352,((Python_timoriensis:0.07695059,Python_reticulatus:0.05132446):0.06203875,((Morelia_boeleni:0.07927639,((Bothrochilus_boa:0.06279745,Liasis_albertisii:0.0399234):0.03098695,(((Morelia_tracyae:0.04101332,(Morelia_amethistina:0.02455261,(Morelia_clastolepis:0.008176434,(Morelia_kinghorni:0.007268036,Morelia_nauta:0.006141666):0.005785571): [...]
+    TREE 31 = (Candoia_aspera:0.4832148,(Loxocemus_bicolor:0.3404342,((Python_regius:0.1242429,((Python_sebae:0.07265737,Python_molurus:0.040688):0.01471806,Python_curtus:0.1033765):0.03181258):0.0267147,((Python_timoriensis:0.07822391,Python_reticulatus:0.06789191):0.04945783,(((Antaresia_maculosa:0.07109217,(Antaresia_perthensis:0.06324736,(Antaresia_childreni:0.02405573,Antaresia_stimsoni:0.01254822):0.03062618):0.008852923):0.0138846,((Morelia_viridisN:0.05039706,Morelia_viridisS:0.0 [...]
+    TREE 32 = ((((Python_timoriensis:0.07113778,Python_reticulatus:0.05017493):0.06319461,(Morelia_boeleni:0.09190941,((((Antaresia_ramsayi:0.01813054,Antaresia_melanocephalus:0.04870207):0.05265934,((Liasis_olivaceus:0.03281665,Apodora_papuana:0.05671593):0.01603054,(Liasis_fuscus:0.01277387,Liasis_mackloti:0.01399295):0.05404157):0.02837849):0.008398836,(((Antaresia_perthensis:0.07601852,(Antaresia_childreni:0.01693324,Antaresia_stimsoni:0.02617617):0.04277307):0.006759305,Antaresia_ma [...]
+    TREE 33 = (Candoia_aspera:0.4186248,(Loxocemus_bicolor:0.3108572,(((Python_timoriensis:0.08343283,Python_reticulatus:0.05111982):0.06141429,(((((Antaresia_childreni:0.02790229,Antaresia_stimsoni:0.01830717):0.03279611,Antaresia_perthensis:0.07987803):0.01287194,Antaresia_maculosa:0.07316171):0.01139226,((Morelia_viridisS:0.05397353,Morelia_viridisN:0.03585342):0.02003358,Morelia_carinata:0.07596764):0.01745623):0.01212332,((Morelia_boeleni:0.08229089,((Bothrochilus_boa:0.06919409,Lia [...]
+    TREE 34 = ((Loxocemus_bicolor:0.3168767,((((((Bothrochilus_boa:0.05350748,Liasis_albertisii:0.06348142):0.03421965,((Antaresia_ramsayi:0.03163343,Antaresia_melanocephalus:0.03221392):0.04818877,(Apodora_papuana:0.07619766,((Liasis_mackloti:0.0113392,Liasis_fuscus:0.03694665):0.05096152,Liasis_olivaceus:0.02886953):0.01151106):0.02490245):0.007102472):0.00291576,(((Morelia_viridisS:0.04798357,Morelia_viridisN:0.03581978):0.03136736,Morelia_carinata:0.06647472):0.01053297,(Antaresia_ma [...]
+    TREE 35 = ((((Python_timoriensis:0.08489141,Python_reticulatus:0.04570306):0.06504991,(((((Morelia_bredli:0.02840635,Morelia_spilota:0.02926897):0.02782231,(Morelia_oenpelliensis:0.08431588,(Morelia_amethistina:0.01708426,((Morelia_nauta:0.003973568,(Morelia_clastolepis:0.01300467,Morelia_kinghorni:0.007130242):0.003691815):0.02251268,Morelia_tracyae:0.06377869):0.0124496):0.03801378):0.004192503):0.02315028,Morelia_boeleni:0.09657489):0.0009557305,(((Antaresia_ramsayi:0.02655588,Ant [...]
+    TREE 36 = ((((Python_regius:0.1228491,((Python_molurus:0.04996319,Python_sebae:0.07473049):0.02363043,Python_curtus:0.1170907):0.02227195):0.05125574,((Python_timoriensis:0.07688387,Python_reticulatus:0.06000572):0.05840354,((Bothrochilus_boa:0.05783399,Liasis_albertisii:0.04766374):0.04125558,((((Morelia_spilota:0.02963587,Morelia_bredli:0.03057292):0.02700875,(Morelia_oenpelliensis:0.06021393,(Morelia_tracyae:0.04262755,(((Morelia_kinghorni:0.008991961,Morelia_nauta:0.0139383):0.01 [...]
+    TREE 37 = ((Candoia_aspera:0.434272,Loxocemus_bicolor:0.2009677):0.0214365,(((Python_timoriensis:0.07098969,Python_reticulatus:0.05926525):0.06595034,(((((Antaresia_maculosa:0.06951293,((Antaresia_childreni:0.02431376,Antaresia_stimsoni:0.02256829):0.03600323,Antaresia_perthensis:0.07393358):0.01268363):0.007852837,((Morelia_viridisS:0.04831104,Morelia_viridisN:0.04151247):0.02619356,Morelia_carinata:0.06205216):0.008328914):0.01571979,((Antaresia_ramsayi:0.02940384,Antaresia_melanoc [...]
+    TREE 38 = (((((((Antaresia_ramsayi:0.01789625,Antaresia_melanocephalus:0.0471464):0.0499587,((Liasis_olivaceus:0.03622664,(Liasis_mackloti:0.009455217,Liasis_fuscus:0.03042734):0.0510626):0.01244135,Apodora_papuana:0.06968692):0.02203981):0.005449505,(((Morelia_viridisS:0.05494915,Morelia_viridisN:0.04257566):0.02547149,Morelia_carinata:0.05330917):0.00924233,(Antaresia_maculosa:0.07141571,(Antaresia_perthensis:0.06815105,(Antaresia_stimsoni:0.01596413,Antaresia_childreni:0.02007385) [...]
+    TREE 39 = (Candoia_aspera:0.4378878,(((Python_regius:0.1263037,(Python_curtus:0.09068406,(Python_sebae:0.08332073,Python_molurus:0.0359485):0.01736963):0.01742147):0.07144359,((Python_timoriensis:0.06079161,Python_reticulatus:0.04868454):0.09412199,((Morelia_boeleni:0.1011311,((((Morelia_bredli:0.03894734,Morelia_spilota:0.02293709):0.02468898,((Morelia_tracyae:0.04868068,Morelia_amethistina:0.03063431):0.001146831,(Morelia_nauta:0.0113829,(Morelia_kinghorni:0.01139991,Morelia_clasto [...]
+    TREE 40 = ((Loxocemus_bicolor:0.2781926,(((Python_curtus:0.09622619,(Python_sebae:0.08267726,Python_molurus:0.0336874):0.01590162):0.02341003,Python_regius:0.1740821):0.03569153,((((Morelia_boeleni:0.06351209,(Bothrochilus_boa:0.06119387,Liasis_albertisii:0.05070669):0.03869329):0.01018021,((Morelia_bredli:0.0365599,Morelia_spilota:0.03117491):0.02423704,(Morelia_oenpelliensis:0.07314733,((((Morelia_clastolepis:0.0102972,Morelia_kinghorni:0.009981516):0.004881791,Morelia_nauta:0.0016 [...]
+    TREE 41 = ((((Python_regius:0.1324015,((Python_sebae:0.07499595,Python_molurus:0.04531034):0.01317716,Python_curtus:0.08063795):0.02063596):0.03899759,((Python_timoriensis:0.04793628,Python_reticulatus:0.06554662):0.06914582,(((((Morelia_viridisS:0.0384385,Morelia_viridisN:0.02843732):0.01649712,Morelia_carinata:0.07616136):0.01154934,(Antaresia_maculosa:0.0688894,(Antaresia_perthensis:0.07525537,(Antaresia_childreni:0.02342297,Antaresia_stimsoni:0.0103098):0.0301651):0.01238226):0.0 [...]
+    TREE 42 = (Candoia_aspera:0.3942506,(Loxocemus_bicolor:0.3248309,((((Morelia_boeleni:0.06702698,((Morelia_bredli:0.03352857,Morelia_spilota:0.02423749):0.02958618,(Morelia_oenpelliensis:0.05828146,(((Morelia_nauta:0.007829247,Morelia_kinghorni:0.009557425):0.001428237,Morelia_clastolepis:0.008807874):0.01725166,(Morelia_amethistina:0.01830064,Morelia_tracyae:0.04028426):0.004051179):0.02744396):0.006409835):0.02247126):0.004948553,(((((Antaresia_childreni:0.0254365,Antaresia_stimsoni [...]
+    TREE 43 = (((((Python_timoriensis:0.08248895,Python_reticulatus:0.04622255):0.06467156,((((Antaresia_perthensis:0.06518122,(Antaresia_childreni:0.02174977,Antaresia_stimsoni:0.02278117):0.02851192):0.009920717,Antaresia_maculosa:0.06678848):0.01147817,((Morelia_viridisN:0.03490955,Morelia_viridisS:0.05647367):0.02252135,Morelia_carinata:0.07676534):0.0144076):0.01561488,(((Liasis_albertisii:0.03902664,Bothrochilus_boa:0.07108618):0.03421936,((Antaresia_ramsayi:0.029093,Antaresia_mela [...]
+    TREE 44 = (Candoia_aspera:0.4228648,((((Python_timoriensis:0.06553675,Python_reticulatus:0.050714):0.06988906,(((Morelia_boeleni:0.0890654,((Morelia_spilota:0.02392142,Morelia_bredli:0.02193456):0.03376877,(Morelia_oenpelliensis:0.06419856,((Morelia_amethistina:0.01759292,((Morelia_kinghorni:0.008233864,Morelia_nauta:0.007319697):0.007122637,Morelia_clastolepis:0.002995375):0.01946149):0.004642841,Morelia_tracyae:0.05521657):0.03035044):0.002240405):0.01754026):0.003550021,(Bothrochi [...]
+    TREE 45 = ((Loxocemus_bicolor:0.2980402,(((Python_timoriensis:0.0792743,Python_reticulatus:0.05323046):0.06059866,(((Morelia_boeleni:0.06727104,((((Morelia_viridisS:0.04189977,Morelia_viridisN:0.03729936):0.02232826,Morelia_carinata:0.0526073):0.0180869,(((Antaresia_stimsoni:0.01263764,Antaresia_childreni:0.01925792):0.03451767,Antaresia_perthensis:0.06742619):0.005139024,Antaresia_maculosa:0.07294127):0.007092231):0.02540082,((Antaresia_ramsayi:0.03761965,Antaresia_melanocephalus:0. [...]
+    TREE 46 = (Candoia_aspera:0.43241,(Loxocemus_bicolor:0.2980358,(((Python_timoriensis:0.07726999,Python_reticulatus:0.05869396):0.06019174,(((((Bothrochilus_boa:0.06085901,Liasis_albertisii:0.05363194):0.02773079,(Antaresia_ramsayi:0.03121562,Antaresia_melanocephalus:0.03375609):0.0527325):0.00288087,(Apodora_papuana:0.05122583,((Liasis_fuscus:0.01757352,Liasis_mackloti:0.0137212):0.04446291,Liasis_olivaceus:0.0470775):0.01009023):0.0231038):0.005364432,(((Morelia_oenpelliensis:0.0670 [...]
+    TREE 47 = ((((Python_regius:0.1096265,(Python_curtus:0.1035287,(Python_molurus:0.04023382,Python_sebae:0.09929416):0.02592233):0.028048):0.05840156,((((((Liasis_albertisii:0.06455336,Bothrochilus_boa:0.0507599):0.02743137,((Apodora_papuana:0.06914814,((Liasis_mackloti:0.01141442,Liasis_fuscus:0.01855708):0.0620712,Liasis_olivaceus:0.03846675):0.0171175):0.0129019,(Antaresia_ramsayi:0.03210937,Antaresia_melanocephalus:0.03694964):0.04985598):0.01385926):0.001769898,Morelia_boeleni:0.0 [...]
+    TREE 48 = (((((((Bothrochilus_boa:0.05890789,Liasis_albertisii:0.0609866):0.04237209,(((Antaresia_ramsayi:0.03573155,Antaresia_melanocephalus:0.03963692):0.04796978,((Liasis_fuscus:0.02907654,Liasis_mackloti:0.007246393):0.04074824,(Apodora_papuana:0.06019183,Liasis_olivaceus:0.03074218):0.0179236):0.01904604):0.008197873,((Antaresia_maculosa:0.06474745,((Antaresia_childreni:0.0257656,Antaresia_stimsoni:0.01504488):0.03637985,Antaresia_perthensis:0.06523604):0.007883245):0.009561559, [...]
+    TREE 49 = ((Candoia_aspera:0.470702,Loxocemus_bicolor:0.2956604):0.02335185,((Python_regius:0.09584661,((Python_sebae:0.0732703,Python_molurus:0.04720622):0.01763467,Python_curtus:0.117376):0.02483888):0.03117274,((Python_timoriensis:0.08037376,Python_reticulatus:0.04589197):0.05405132,((((Morelia_oenpelliensis:0.07746088,((((Morelia_nauta:0.006172668,Morelia_kinghorni:0.006447997):0.00859351,Morelia_clastolepis:0.002835837):0.02486692,Morelia_tracyae:0.03653335):0.009312386,Morelia_ [...]
+    TREE 50 = (Candoia_aspera:0.4437574,(((((Python_sebae:0.08086658,Python_molurus:0.03252307):0.02637126,Python_curtus:0.09904016):0.01531335,Python_regius:0.143583):0.0451362,(((((Antaresia_ramsayi:0.03718754,Antaresia_melanocephalus:0.03961356):0.04265559,((Apodora_papuana:0.05513246,Liasis_olivaceus:0.04102737):0.009581713,(Liasis_fuscus:0.01992797,Liasis_mackloti:0.008490396):0.05762563):0.01375438):0.01045857,(((Antaresia_perthensis:0.06574388,(Antaresia_childreni:0.02109943,Antar [...]
+    TREE 51 = ((((((((Antaresia_maculosa:0.07388047,(Antaresia_perthensis:0.05894037,(Antaresia_stimsoni:0.01014789,Antaresia_childreni:0.02392858):0.03272025):0.0127324):0.005261333,((Morelia_viridisS:0.04814902,Morelia_viridisN:0.03931801):0.02118613,Morelia_carinata:0.06743429):0.01713753):0.01871503,((Morelia_spilota:0.02897399,Morelia_bredli:0.02928582):0.03210568,((((Morelia_nauta:0.01230512,Morelia_kinghorni:0.00972926):0.00676493,Morelia_clastolepis:0.002086417):0.02346979,(Morel [...]
+    TREE 52 = ((((Python_regius:0.122739,(Python_curtus:0.1320061,(Python_sebae:0.08815482,Python_molurus:0.03987976):0.008925839):0.02849865):0.03213177,(((((Morelia_viridisS:0.05520765,Morelia_viridisN:0.03292917):0.02684103,Morelia_carinata:0.05485755):0.01440183,(((Antaresia_childreni:0.02415124,Antaresia_stimsoni:0.01568463):0.04201286,Antaresia_perthensis:0.06779229):0.006306221,Antaresia_maculosa:0.05300762):0.009265044):0.01964058,(((((Liasis_fuscus:0.01770021,Liasis_mackloti:0.0 [...]
+    TREE 53 = (((((Python_curtus:0.09620483,(Python_sebae:0.06334457,Python_molurus:0.0538921):0.01668897):0.01641654,Python_regius:0.1202121):0.03786821,(((((Bothrochilus_boa:0.04681597,Liasis_albertisii:0.04765389):0.03343482,((Antaresia_ramsayi:0.02933084,Antaresia_melanocephalus:0.0393689):0.03713987,((Liasis_olivaceus:0.03347025,Apodora_papuana:0.06483148):0.02100701,(Liasis_fuscus:0.02707484,Liasis_mackloti:0.007527475):0.03672839):0.02435954):0.006702324):0.00436005,((Antaresia_ma [...]
+    TREE 54 = ((Loxocemus_bicolor:0.3139416,Candoia_aspera:0.4187669):0.04921504,((((Python_molurus:0.04363615,Python_sebae:0.0972421):0.02066382,Python_curtus:0.09211249):0.01832167,Python_regius:0.1204208):0.06289011,(((Morelia_boeleni:0.07412663,((Morelia_bredli:0.03194801,Morelia_spilota:0.01803354):0.03580703,(Morelia_oenpelliensis:0.07438324,((Morelia_amethistina:0.01798105,(Morelia_clastolepis:0.00212618,(Morelia_kinghorni:0.003500397,Morelia_nauta:0.009602255):0.006876329):0.0078 [...]
+    TREE 55 = (Candoia_aspera:0.5149506,(Loxocemus_bicolor:0.250681,((((((Liasis_albertisii:0.05545887,Bothrochilus_boa:0.06595798):0.03867875,((Morelia_oenpelliensis:0.07140551,((Morelia_bredli:0.02271119,Morelia_spilota:0.03039649):0.0216839,((((Morelia_nauta:0.002195662,Morelia_kinghorni:0.007748968):0.007796946,Morelia_clastolepis:0.002227638):0.008569473,Morelia_tracyae:0.02678873):0.005578755,Morelia_amethistina:0.02918388):0.03912099):0.002376657):0.02392946,((((Antaresia_children [...]
+    TREE 56 = (Candoia_aspera:0.4813065,((((((((Antaresia_ramsayi:0.02598769,Antaresia_melanocephalus:0.04109392):0.04781481,(Liasis_albertisii:0.04425997,Bothrochilus_boa:0.06753105):0.03135549):0.004135552,(((Liasis_mackloti:0.01633037,Liasis_fuscus:0.03000881):0.04510967,Liasis_olivaceus:0.04156987):0.0107285,Apodora_papuana:0.06327569):0.02760798):0.0088871,Morelia_boeleni:0.09115155):0.001504997,(((Antaresia_maculosa:0.07446543,(Antaresia_perthensis:0.07047215,(Antaresia_childreni:0 [...]
+    TREE 57 = ((Loxocemus_bicolor:0.2977819,(((((((Morelia_viridisN:0.02454729,Morelia_viridisS:0.04512018):0.02222539,Morelia_carinata:0.06200557):0.01268124,(((Antaresia_childreni:0.02473758,Antaresia_stimsoni:0.01624843):0.04789705,Antaresia_perthensis:0.07009377):0.008545523,Antaresia_maculosa:0.08388544):0.007578775):0.01782145,(((Morelia_tracyae:0.04447008,((Morelia_kinghorni:0.0066021,Morelia_nauta:0.00551457):0.01082195,Morelia_clastolepis:0.001697196):0.0059375):0.004922532,More [...]
+    TREE 58 = ((((Python_regius:0.1200325,((Python_molurus:0.03591442,Python_sebae:0.0707592):0.01993933,Python_curtus:0.1045325):0.01853241):0.02707036,((Python_timoriensis:0.06786831,Python_reticulatus:0.0322445):0.08257587,(((((((Morelia_clastolepis:0.004018062,(Morelia_nauta:0.01075374,Morelia_kinghorni:0.006308406):0.005730398):0.01861746,Morelia_amethistina:0.0232728):0.00946243,Morelia_tracyae:0.03755202):0.02764432,Morelia_oenpelliensis:0.05912281):0.01292832,(Morelia_bredli:0.03 [...]
+    TREE 59 = (((Python_regius:0.1335572,(Python_curtus:0.09927795,(Python_molurus:0.03561646,Python_sebae:0.07830893):0.01697234):0.02824134):0.04031233,(((((Bothrochilus_boa:0.07133514,Liasis_albertisii:0.04973854):0.04588717,((Antaresia_ramsayi:0.03310703,Antaresia_melanocephalus:0.03941893):0.05882317,(Apodora_papuana:0.07295411,((Liasis_fuscus:0.01782433,Liasis_mackloti:0.01512761):0.05644884,Liasis_olivaceus:0.02642465):0.01125838):0.01140388):0.004935439):0.0101065,(Morelia_boelen [...]
+    TREE 60 = (Candoia_aspera:0.424371,(Loxocemus_bicolor:0.3179898,((Python_regius:0.1449508,(Python_curtus:0.1015309,(Python_molurus:0.04869052,Python_sebae:0.09786226):0.01882742):0.02882628):0.03786653,((Python_timoriensis:0.05229728,Python_reticulatus:0.06380921):0.05973712,(((((Antaresia_ramsayi:0.02761203,Antaresia_melanocephalus:0.03813273):0.05830588,((Liasis_olivaceus:0.04693766,(Liasis_fuscus:0.01833181,Liasis_mackloti:0.01696186):0.04960517):0.01702719,Apodora_papuana:0.05487 [...]
+    TREE 61 = ((((Python_regius:0.1228097,((Python_molurus:0.04315806,Python_sebae:0.07493422):0.009730695,Python_curtus:0.1072914):0.02183078):0.04432623,((Python_timoriensis:0.08054852,Python_reticulatus:0.05729257):0.06275327,(((Morelia_carinata:0.0753587,(Morelia_viridisS:0.05041945,Morelia_viridisN:0.03648667):0.02461579):0.0115986,(((Antaresia_stimsoni:0.01180749,Antaresia_childreni:0.02516542):0.03703654,Antaresia_perthensis:0.06873682):0.01651968,Antaresia_maculosa:0.07148424):0. [...]
+    TREE 62 = (((((Python_curtus:0.08909703,(Python_molurus:0.03328324,Python_sebae:0.0715058):0.01824785):0.007554571,Python_regius:0.1454022):0.03384772,((((((Morelia_viridisS:0.05543826,Morelia_viridisN:0.03630355):0.01622295,Morelia_carinata:0.07316485):0.01367374,(Antaresia_maculosa:0.06581564,((Antaresia_childreni:0.0310494,Antaresia_stimsoni:0.01245022):0.03712444,Antaresia_perthensis:0.06435933):0.01742895):0.004963226):0.02577853,(Morelia_boeleni:0.09387192,((Morelia_bredli:0.02 [...]
+    TREE 63 = (Candoia_aspera:0.4745529,(((Python_regius:0.1135173,(Python_curtus:0.1012712,(Python_molurus:0.04331739,Python_sebae:0.07786967):0.01125215):0.03818491):0.03021289,((Python_timoriensis:0.07194533,Python_reticulatus:0.04773572):0.07113062,((((Morelia_spilota:0.02865189,Morelia_bredli:0.03249746):0.02966558,(((Morelia_tracyae:0.04997421,(Morelia_nauta:0.004001658,(Morelia_clastolepis:0.006449675,Morelia_kinghorni:0.007933115):0.006298222):0.02287617):0.00191708,Morelia_ameth [...]
+    TREE 64 = ((((((((Morelia_viridisS:0.04558879,Morelia_viridisN:0.03250553):0.01332789,Morelia_carinata:0.0753067):0.01978827,(Antaresia_maculosa:0.07612639,((Antaresia_stimsoni:0.0233159,Antaresia_childreni:0.02086903):0.02612487,Antaresia_perthensis:0.06545956):0.01015421):0.01066412):0.01823755,((((Antaresia_ramsayi:0.04189037,Antaresia_melanocephalus:0.04454489):0.04572384,(((Liasis_mackloti:0.004894573,Liasis_fuscus:0.01614364):0.05091172,Liasis_olivaceus:0.02327591):0.01019945,A [...]
+    TREE 65 = ((((Python_regius:0.1065045,((Python_molurus:0.02903804,Python_sebae:0.07866277):0.01231844,Python_curtus:0.1314011):0.027084):0.03262673,((Python_timoriensis:0.06663792,Python_reticulatus:0.0784987):0.07588092,(((Bothrochilus_boa:0.07634096,Liasis_albertisii:0.04452527):0.02809324,(((Morelia_oenpelliensis:0.06509642,(Morelia_tracyae:0.0458238,(((Morelia_nauta:0.0121269,Morelia_kinghorni:0.006513538):0.009945509,Morelia_clastolepis:0.01318087):0.02319886,Morelia_amethistina [...]
+    TREE 66 = (Candoia_aspera:0.5274461,(((Python_timoriensis:0.06902759,Python_reticulatus:0.06108568):0.06781643,((((((Antaresia_childreni:0.02527601,Antaresia_stimsoni:0.01240928):0.04320152,Antaresia_perthensis:0.06080926):0.01246198,Antaresia_maculosa:0.06380906):0.009021079,((Morelia_viridisS:0.07220582,Morelia_viridisN:0.0327611):0.01721496,Morelia_carinata:0.06624881):0.007599396):0.02715264,(((Bothrochilus_boa:0.06799821,Liasis_albertisii:0.06149457):0.0249193,((Antaresia_ramsay [...]
+    TREE 67 = ((((Python_regius:0.143336,((Python_molurus:0.05192443,Python_sebae:0.08336195):0.02057265,Python_curtus:0.09573655):0.02544282):0.04651235,((((((((Liasis_fuscus:0.01732133,Liasis_mackloti:0.01183748):0.0552515,Liasis_olivaceus:0.03096171):0.01753867,Apodora_papuana:0.06168452):0.02608198,(Antaresia_ramsayi:0.02889135,Antaresia_melanocephalus:0.03600234):0.05855319):0.009485744,(Bothrochilus_boa:0.06625782,Liasis_albertisii:0.04617117):0.03191976):0.002849137,(Morelia_boele [...]
+    TREE 68 = ((Loxocemus_bicolor:0.2719207,Candoia_aspera:0.4906847):0.04718485,((Python_regius:0.1124593,((Python_molurus:0.0509666,Python_sebae:0.07738415):0.008989942,Python_curtus:0.09897486):0.03516115):0.03687499,((Python_timoriensis:0.06800461,Python_reticulatus:0.06016279):0.06116017,(((Liasis_albertisii:0.06944622,Bothrochilus_boa:0.05920302):0.03120987,(((Liasis_olivaceus:0.05416647,(Liasis_mackloti:0.01101725,Liasis_fuscus:0.01722447):0.05929124):0.008101523,Apodora_papuana:0 [...]
+    TREE 69 = ((((Python_timoriensis:0.07901859,Python_reticulatus:0.04337921):0.05715234,((((Bothrochilus_boa:0.07370024,Liasis_albertisii:0.05462079):0.02485725,((((Liasis_fuscus:0.02829583,Liasis_mackloti:0.009995232):0.04878129,Liasis_olivaceus:0.03673495):0.01427831,Apodora_papuana:0.05211239):0.01637035,(Antaresia_ramsayi:0.03273632,Antaresia_melanocephalus:0.03475002):0.04373889):0.009508868):0.004413184,(((Morelia_oenpelliensis:0.05244673,(Morelia_tracyae:0.02733242,(((Morelia_na [...]
+    TREE 70 = (Candoia_aspera:0.4106247,(Loxocemus_bicolor:0.2331907,((((((((Antaresia_childreni:0.02215029,Antaresia_stimsoni:0.009244344):0.03566251,Antaresia_perthensis:0.07581442):0.01354362,Antaresia_maculosa:0.06464157):0.00675194,((Morelia_viridisN:0.03936049,Morelia_viridisS:0.04292648):0.01785218,Morelia_carinata:0.05712708):0.01866541):0.01866694,((Morelia_oenpelliensis:0.06002338,((Morelia_tracyae:0.03985103,Morelia_amethistina:0.03190266):0.006075925,(Morelia_nauta:0.00277160 [...]
+    TREE 71 = (Candoia_aspera:0.4427327,((((Python_timoriensis:0.07904484,Python_reticulatus:0.05445465):0.06086328,((((Morelia_spilota:0.02948698,Morelia_bredli:0.04304928):0.02883553,(((Morelia_tracyae:0.03379621,((Morelia_clastolepis:0.00613919,Morelia_kinghorni:0.005279703):0.007664687,Morelia_nauta:0.002861372):0.02350224):0.004278877,Morelia_amethistina:0.02920345):0.03896447,Morelia_oenpelliensis:0.05935515):0.004404024):0.02608104,(((Antaresia_perthensis:0.06830344,(Antaresia_sti [...]
+    TREE 72 = ((Loxocemus_bicolor:0.3099623,((Python_regius:0.1377144,((Python_sebae:0.06061368,Python_molurus:0.04563488):0.03354163,Python_curtus:0.08988597):0.02558453):0.03970288,((Python_timoriensis:0.08715385,Python_reticulatus:0.05952325):0.08806109,((((Morelia_bredli:0.03275956,Morelia_spilota:0.025875):0.03529276,(Morelia_oenpelliensis:0.06489886,(Morelia_tracyae:0.03946847,(Morelia_amethistina:0.02457683,(Morelia_clastolepis:0.003320996,(Morelia_kinghorni:0.002810903,Morelia_na [...]
+    TREE 73 = (Candoia_aspera:0.4348448,((((((Morelia_boeleni:0.07664532,((Morelia_bredli:0.03846738,Morelia_spilota:0.03022961):0.03426844,(Morelia_oenpelliensis:0.05603453,(((Morelia_nauta:0.008166571,(Morelia_clastolepis:0.009570103,Morelia_kinghorni:0.008340767):0.002700153):0.01590274,Morelia_amethistina:0.02135429):0.003489883,Morelia_tracyae:0.02901899):0.0362906):0.004718814):0.03173114):0.003755895,(((Antaresia_ramsayi:0.02953742,Antaresia_melanocephalus:0.03996934):0.05390883,( [...]
+    TREE 74 = (Candoia_aspera:0.4742071,((((Python_timoriensis:0.06586218,Python_reticulatus:0.06259166):0.06589371,((((Antaresia_maculosa:0.07162944,((Antaresia_childreni:0.021786,Antaresia_stimsoni:0.01148695):0.02689859,Antaresia_perthensis:0.07131155):0.005756796):0.01229476,((Morelia_viridisS:0.04413955,Morelia_viridisN:0.05235628):0.02208944,Morelia_carinata:0.07115939):0.01801993):0.01841187,((Apodora_papuana:0.06862411,(Liasis_olivaceus:0.03289194,(Liasis_fuscus:0.02546166,Liasis [...]
+    TREE 75 = ((Loxocemus_bicolor:0.2760261,(((Python_timoriensis:0.05565623,Python_reticulatus:0.0666305):0.06163908,(((((Morelia_bredli:0.02809343,Morelia_spilota:0.02234101):0.04248839,((Morelia_tracyae:0.03855563,(Morelia_amethistina:0.0194194,((Morelia_kinghorni:0.007230503,Morelia_nauta:0.01597799):0.003673781,Morelia_clastolepis:0.002712423):0.01983554):0.00770982):0.0348642,Morelia_oenpelliensis:0.0778497):0.007505558):0.02584305,Morelia_boeleni:0.08255867):0.004142258,((Bothroch [...]
+    TREE 76 = (Candoia_aspera:0.4489852,(Loxocemus_bicolor:0.2784511,((((((Morelia_viridisS:0.0478914,Morelia_viridisN:0.03382179):0.01390197,Morelia_carinata:0.06547169):0.01186083,(((Antaresia_childreni:0.02366335,Antaresia_stimsoni:0.01261576):0.0373104,Antaresia_perthensis:0.0698693):0.009951808,Antaresia_maculosa:0.0621237):0.009221396):0.01693595,((Morelia_boeleni:0.0985658,((Bothrochilus_boa:0.06368156,Liasis_albertisii:0.05812566):0.04186163,(((Liasis_olivaceus:0.03976913,(Liasis [...]
+    TREE 77 = ((Loxocemus_bicolor:0.2980009,(((Python_timoriensis:0.07492163,Python_reticulatus:0.05726328):0.06871158,(((((Morelia_viridisS:0.04674211,Morelia_viridisN:0.03242869):0.02905742,Morelia_carinata:0.0640633):0.01010355,(((Antaresia_childreni:0.01838723,Antaresia_stimsoni:0.01565296):0.04438823,Antaresia_perthensis:0.08484077):0.01913492,Antaresia_maculosa:0.07844496):0.003377368):0.01497016,((Bothrochilus_boa:0.06132987,Liasis_albertisii:0.03927122):0.03479677,((((Liasis_fusc [...]
+    TREE 78 = (Candoia_aspera:0.3932904,(Loxocemus_bicolor:0.2749132,(((Python_curtus:0.09183693,(Python_sebae:0.07637954,Python_molurus:0.03762318):0.02709624):0.02812698,Python_regius:0.1154282):0.03559674,(((((((Antaresia_stimsoni:0.01446194,Antaresia_childreni:0.02531378):0.04006727,Antaresia_perthensis:0.06666538):0.005905826,Antaresia_maculosa:0.07190364):0.006189105,(Morelia_carinata:0.06419419,(Morelia_viridisS:0.05339065,Morelia_viridisN:0.02297795):0.03153359):0.01090103):0.016 [...]
+    TREE 79 = ((Candoia_aspera:0.4868107,Loxocemus_bicolor:0.2301868):0.06696102,(((Python_curtus:0.09902725,(Python_sebae:0.07607854,Python_molurus:0.03042101):0.01754461):0.01284939,Python_regius:0.1175586):0.05074195,((((((((Morelia_nauta:0.008126706,Morelia_kinghorni:0.005022595):0.002074662,Morelia_clastolepis:0.004239987):0.01940375,Morelia_amethistina:0.02282263):0.006885183,Morelia_tracyae:0.02829901):0.05350385,Morelia_oenpelliensis:0.07531652):0.003938017,(Morelia_spilota:0.025 [...]
+    TREE 80 = ((((Python_regius:0.1445806,(Python_curtus:0.1271761,(Python_sebae:0.08660773,Python_molurus:0.03642296):0.02308224):0.03951594):0.03432268,((Python_timoriensis:0.05008058,Python_reticulatus:0.08503014):0.07968167,((((((Liasis_fuscus:0.02531552,Liasis_mackloti:0.00943019):0.05551294,Liasis_olivaceus:0.03532999):0.01382014,Apodora_papuana:0.06672151):0.01296964,(Antaresia_ramsayi:0.02616247,Antaresia_melanocephalus:0.04629737):0.03687331):0.01178619,(((Morelia_viridisS:0.045 [...]
+    TREE 81 = (Candoia_aspera:0.4575053,((((Python_timoriensis:0.06194463,Python_reticulatus:0.04911996):0.06573001,((Morelia_boeleni:0.07789338,((Morelia_bredli:0.02561557,Morelia_spilota:0.02375866):0.04673673,(Morelia_oenpelliensis:0.06590139,((((Morelia_kinghorni:0.004965855,Morelia_clastolepis:0.01490083):0.0009494989,Morelia_nauta:0.002852371):0.0104046,Morelia_amethistina:0.02724809):0.01478698,Morelia_tracyae:0.03441991):0.02561766):0.004808734):0.02705979):0.006449734,(((((Morel [...]
+    TREE 82 = ((Loxocemus_bicolor:0.2755664,((Python_regius:0.1286608,((Python_sebae:0.08250047,Python_molurus:0.0305042):0.01309972,Python_curtus:0.09564767):0.01610108):0.04804593,((Python_timoriensis:0.07700127,Python_reticulatus:0.0470671):0.07753829,(((Morelia_boeleni:0.09111672,(Morelia_oenpelliensis:0.06892866,(((((Morelia_kinghorni:0.003380163,Morelia_nauta:0.00575853):0.006891641,Morelia_clastolepis:0.007008237):0.01516912,Morelia_amethistina:0.0213821):0.01504177,Morelia_tracya [...]
+    TREE 83 = ((Loxocemus_bicolor:0.2393583,((Python_regius:0.1362413,(Python_curtus:0.09981791,(Python_sebae:0.08732982,Python_molurus:0.03965756):0.02106739):0.01324574):0.04504424,((((((Liasis_mackloti:0.01502157,Liasis_fuscus:0.02332284):0.04350871,(Apodora_papuana:0.06142209,Liasis_olivaceus:0.03680022):0.01491341):0.0237058,(Antaresia_ramsayi:0.03335222,Antaresia_melanocephalus:0.03614687):0.05208556):0.01276464,(Morelia_boeleni:0.08159735,(((Morelia_spilota:0.02708637,Morelia_bred [...]
+    TREE 84 = ((Loxocemus_bicolor:0.2557088,((((Python_sebae:0.08304661,Python_molurus:0.04328625):0.02159618,Python_curtus:0.06878299):0.02377922,Python_regius:0.10624):0.02370423,((((Morelia_carinata:0.06516085,(Morelia_viridisS:0.03397963,Morelia_viridisN:0.03858964):0.01392689):0.01952269,(Antaresia_maculosa:0.05096261,((Antaresia_stimsoni:0.01894518,Antaresia_childreni:0.02643887):0.03187506,Antaresia_perthensis:0.06112747):0.01289749):0.007764347):0.01485805,((Morelia_boeleni:0.078 [...]
+    TREE 85 = (Candoia_aspera:0.446456,(((((Python_sebae:0.0664236,Python_molurus:0.05748319):0.01384624,Python_curtus:0.1246156):0.01920644,Python_regius:0.09562755):0.03579852,((Python_timoriensis:0.06854112,Python_reticulatus:0.07423406):0.06198347,(((((Antaresia_maculosa:0.0753139,(Antaresia_perthensis:0.05776215,(Antaresia_childreni:0.0188791,Antaresia_stimsoni:0.01203823):0.03974037):0.01302284):0.01107751,((Morelia_viridisS:0.04315065,Morelia_viridisN:0.0447115):0.01881795,Morelia [...]
+    TREE 86 = ((((Python_regius:0.1195237,((Python_molurus:0.05424041,Python_sebae:0.06745426):0.01500456,Python_curtus:0.09363999):0.01373712):0.07174102,((Python_timoriensis:0.05317857,Python_reticulatus:0.07866524):0.06443481,((((Antaresia_perthensis:0.06871814,(Antaresia_childreni:0.01631375,Antaresia_stimsoni:0.01858184):0.03573909):0.00997242,Antaresia_maculosa:0.06653337):0.01156942,((Morelia_viridisS:0.05041764,Morelia_viridisN:0.03210906):0.02278023,Morelia_carinata:0.06324297): [...]
+    TREE 87 = (Candoia_aspera:0.4403734,(Loxocemus_bicolor:0.2585825,(((Python_curtus:0.09600904,(Python_molurus:0.04325963,Python_sebae:0.06144044):0.02193821):0.03512357,Python_regius:0.1158208):0.0226537,((((((Antaresia_ramsayi:0.02896362,Antaresia_melanocephalus:0.04374086):0.05633091,(Bothrochilus_boa:0.07453325,Liasis_albertisii:0.04886916):0.0406942):0.00253113,(Apodora_papuana:0.06827618,((Liasis_fuscus:0.0190238,Liasis_mackloti:0.01240811):0.04949205,Liasis_olivaceus:0.04193583) [...]
+    TREE 88 = ((((Python_regius:0.1262411,(Python_curtus:0.1008612,(Python_sebae:0.07364004,Python_molurus:0.03651178):0.02407779):0.0260306):0.02824526,((((Morelia_boeleni:0.07070576,((Morelia_bredli:0.03022369,Morelia_spilota:0.02205107):0.02783955,(Morelia_oenpelliensis:0.0634195,(Morelia_tracyae:0.03485566,((Morelia_clastolepis:0.007226027,(Morelia_nauta:0.01372385,Morelia_kinghorni:0.01134917):0.003584604):0.009064959,Morelia_amethistina:0.02136233):0.006429469):0.02626722):0.007624 [...]
+    TREE 89 = ((Loxocemus_bicolor:0.2893372,((Python_regius:0.1073912,(Python_curtus:0.09348169,(Python_sebae:0.08938309,Python_molurus:0.03446394):0.01312052):0.02814457):0.0358765,((((((((Morelia_tracyae:0.03510133,((Morelia_clastolepis:0.0046623,Morelia_kinghorni:0.001573419):0.006380224,Morelia_nauta:0.005709148):0.01415564):0.004559492,Morelia_amethistina:0.02868146):0.03694752,Morelia_oenpelliensis:0.06467572):0.009860806,(Morelia_spilota:0.02221128,Morelia_bredli:0.02633986):0.029 [...]
+    TREE 90 = (((((Python_timoriensis:0.09696215,Python_reticulatus:0.05811112):0.0655036,((((((Liasis_olivaceus:0.0365148,Apodora_papuana:0.06857998):0.01276053,(Liasis_mackloti:0.008411,Liasis_fuscus:0.01901003):0.04816006):0.02602376,(Antaresia_ramsayi:0.03002571,Antaresia_melanocephalus:0.03848471):0.03704017):0.007975364,((((Antaresia_stimsoni:0.01721904,Antaresia_childreni:0.02350975):0.02876343,Antaresia_perthensis:0.07909582):0.01076695,Antaresia_maculosa:0.07836454):0.006590077, [...]
+    TREE 91 = (Candoia_aspera:0.4052354,(((Python_regius:0.1509565,(Python_curtus:0.07502647,(Python_molurus:0.03578726,Python_sebae:0.08224294):0.02128537):0.02903882):0.04946381,((Python_timoriensis:0.07174778,Python_reticulatus:0.05925871):0.07438774,(((((Antaresia_ramsayi:0.0296355,Antaresia_melanocephalus:0.03872158):0.05083471,(Bothrochilus_boa:0.04803823,Liasis_albertisii:0.04184282):0.03385926):0.006801014,(((Liasis_fuscus:0.01315756,Liasis_mackloti:0.01757213):0.04596919,Liasis_ [...]
+    TREE 92 = ((Candoia_aspera:0.4832728,Loxocemus_bicolor:0.3133952):0.05045952,(((Python_curtus:0.1003648,(Python_sebae:0.07701747,Python_molurus:0.04519635):0.02379189):0.018113,Python_regius:0.1339096):0.04035469,((Python_timoriensis:0.0802541,Python_reticulatus:0.05762582):0.07422132,((Morelia_boeleni:0.07869006,(((Morelia_oenpelliensis:0.06127573,(((Morelia_clastolepis:0.003375919,(Morelia_nauta:0.005666266,Morelia_kinghorni:0.0128026):0.002713472):0.007898866,Morelia_amethistina:0 [...]
+    TREE 93 = ((((Python_regius:0.1272317,(Python_curtus:0.09829021,(Python_sebae:0.09489939,Python_molurus:0.04318147):0.01798881):0.034006):0.0462139,((Python_timoriensis:0.05756501,Python_reticulatus:0.05855822):0.07458395,(((((Morelia_carinata:0.0619902,(Morelia_viridisS:0.05075946,Morelia_viridisN:0.03469798):0.02745101):0.007211946,(((Antaresia_stimsoni:0.0163066,Antaresia_childreni:0.02426005):0.03499836,Antaresia_perthensis:0.06839545):0.01688134,Antaresia_maculosa:0.06387924):0. [...]
+    TREE 94 = ((Loxocemus_bicolor:0.315418,(((Python_curtus:0.1002578,(Python_sebae:0.06770584,Python_molurus:0.0298364):0.01918509):0.01848734,Python_regius:0.1144905):0.04568069,((Morelia_boeleni:0.08331781,((((Morelia_carinata:0.06687796,(Morelia_viridisS:0.05088015,Morelia_viridisN:0.03185982):0.0174918):0.01702257,(((Antaresia_stimsoni:0.00951792,Antaresia_childreni:0.02495718):0.0336015,Antaresia_perthensis:0.06321583):0.01991029,Antaresia_maculosa:0.0697469):0.006936756):0.0105505 [...]
+    TREE 95 = (((((Python_sebae:0.0744125,Python_molurus:0.03313892):0.01573011,Python_curtus:0.1037118):0.01292763,Python_regius:0.1029389):0.03822617,((Morelia_boeleni:0.08820164,((((Antaresia_maculosa:0.06042653,(Antaresia_perthensis:0.0608496,(Antaresia_childreni:0.0271723,Antaresia_stimsoni:0.01380911):0.03972871):0.01204421):0.007583394,((Morelia_viridisN:0.03192192,Morelia_viridisS:0.05435536):0.02514259,Morelia_carinata:0.07010455):0.01334626):0.01615479,(((Antaresia_ramsayi:0.02 [...]
+    TREE 96 = (Candoia_aspera:0.4928541,(Loxocemus_bicolor:0.2816248,(((((Morelia_oenpelliensis:0.08393614,(Morelia_amethistina:0.02446814,(Morelia_tracyae:0.04530357,((Morelia_kinghorni:0.007960707,Morelia_clastolepis:0.01675912):0.004557217,Morelia_nauta:0.008147904):0.01430799):0.004738885):0.0274262):0.003287132,(Morelia_bredli:0.0284451,Morelia_spilota:0.02765243):0.02726073):0.02848068,((((((Morelia_viridisS:0.05303163,Morelia_viridisN:0.03323683):0.01782342,Morelia_carinata:0.0612 [...]
+    TREE 97 = (Candoia_aspera:0.4487899,((((Python_timoriensis:0.06146061,Python_reticulatus:0.05946495):0.07191773,((((Morelia_tracyae:0.0347971,(Morelia_amethistina:0.02803992,((Morelia_nauta:0.00500443,Morelia_kinghorni:0.006595522):0.003929872,Morelia_clastolepis:0.007496731):0.013329):0.01062581):0.02225971,Morelia_oenpelliensis:0.05555536):0.006840461,(Morelia_bredli:0.03307905,Morelia_spilota:0.02268619):0.02215481):0.03594669,((((Liasis_albertisii:0.03243545,Bothrochilus_boa:0.05 [...]
+    TREE 98 = (((((Python_curtus:0.1013429,(Python_molurus:0.03905245,Python_sebae:0.06607451):0.01882211):0.02961412,Python_regius:0.1080206):0.04239082,((Python_timoriensis:0.05902375,Python_reticulatus:0.0508136):0.06075674,((Morelia_boeleni:0.06727115,((Bothrochilus_boa:0.05342648,Liasis_albertisii:0.05218567):0.02163257,((Antaresia_ramsayi:0.02493475,Antaresia_melanocephalus:0.03825297):0.04441375,((Liasis_olivaceus:0.04086729,Apodora_papuana:0.05537593):0.009908886,(Liasis_fuscus:0 [...]
+    TREE 99 = ((((Python_timoriensis:0.05073709,Python_reticulatus:0.07277766):0.06283212,(Morelia_boeleni:0.08695013,(((((Antaresia_maculosa:0.08512621,(Antaresia_perthensis:0.07026724,(Antaresia_childreni:0.03156755,Antaresia_stimsoni:0.0139927):0.03766207):0.01538491):0.004298772,((Morelia_viridisS:0.03700609,Morelia_viridisN:0.04516503):0.03111741,Morelia_carinata:0.05135428):0.006007194):0.02055132,((((Liasis_fuscus:0.01989677,Liasis_mackloti:0.006589301):0.05645972,Liasis_olivaceus [...]
+    TREE 100 = (Candoia_aspera:0.4332915,((((Python_curtus:0.1152573,(Python_molurus:0.03795271,Python_sebae:0.0840193):0.01046507):0.01900506,Python_regius:0.1219839):0.03456921,((Python_timoriensis:0.05963154,Python_reticulatus:0.06979965):0.0631162,((((((Morelia_oenpelliensis:0.0510031,(Morelia_tracyae:0.05728741,((Morelia_clastolepis:0.00554597,(Morelia_kinghorni:0.007204392,Morelia_nauta:0.01336245):0.004239199):0.01553404,Morelia_amethistina:0.0174348):0.009414604):0.04888199):0.00 [...]
+    TREE 101 = ((((Python_timoriensis:0.05801513,Python_reticulatus:0.07722868):0.06553758,((((Morelia_bredli:0.03139869,Morelia_spilota:0.02840328):0.02824259,((((Morelia_clastolepis:0.00205874,(Morelia_nauta:0.01238758,Morelia_kinghorni:0.00354312):0.006115592):0.01059278,Morelia_tracyae:0.04180855):0.0009798313,Morelia_amethistina:0.02698061):0.03620676,Morelia_oenpelliensis:0.07046009):0.01248873):0.02405634,((Morelia_carinata:0.07603419,(Morelia_viridisN:0.02986943,Morelia_viridisS: [...]
+    TREE 11 = (Candoia_aspera:0.4514987,((((Python_reticulatus:0.05002665,Python_timoriensis:0.07676918):0.07413768,((Liasis_albertisii:0.0645974,Bothrochilus_boa:0.0641873):0.03771287,((Morelia_boeleni:0.09023818,(((Morelia_carinata:0.05665086,(Morelia_viridisN:0.02441905,Morelia_viridisS:0.05618434):0.03360802):0.01153914,(Antaresia_maculosa:0.06928336,(Antaresia_perthensis:0.06768544,(Antaresia_childreni:0.0232448,Antaresia_stimsoni:0.01980798):0.0361576):0.01361446):0.007641934):0.01 [...]
+    TREE 12 = (((Python_regius:0.1195811,(Python_curtus:0.101114,(Python_molurus:0.03377527,Python_sebae:0.08745063):0.01391006):0.01872767):0.0423752,((((((Liasis_olivaceus:0.04405596,Apodora_papuana:0.05899017):0.01893467,(Liasis_mackloti:0.01092328,Liasis_fuscus:0.01664283):0.0462348):0.0195773,(Antaresia_melanocephalus:0.03674829,Antaresia_ramsayi:0.02147645):0.04341939):0.005780893,((Morelia_carinata:0.07176092,(Morelia_viridisN:0.03358656,Morelia_viridisS:0.05472175):0.02865343):0. [...]
+    TREE 13 = ((Loxocemus_bicolor:0.255402,(((Morelia_boeleni:0.07990584,((((((Antaresia_childreni:0.02146837,Antaresia_stimsoni:0.01322464):0.03931901,Antaresia_perthensis:0.06801511):0.01225223,Antaresia_maculosa:0.08151301):0.008640029,(Morelia_carinata:0.05972546,(Morelia_viridisN:0.04594044,Morelia_viridisS:0.05674085):0.0189179):0.007972871):0.01533096,((Morelia_bredli:0.02451975,Morelia_spilota:0.0358741):0.03381143,(Morelia_oenpelliensis:0.05865898,(Morelia_tracyae:0.03683725,((M [...]
+    TREE 14 = ((Candoia_aspera:0.4473626,Loxocemus_bicolor:0.2344362):0.02036165,((((Python_molurus:0.03949217,Python_sebae:0.08887615):0.0135172,Python_regius:0.1395213):0.01295109,Python_curtus:0.09972717):0.05815295,(((((Apodora_papuana:0.05956359,(Liasis_olivaceus:0.04120433,(Liasis_mackloti:0.01339387,Liasis_fuscus:0.01811019):0.05259457):0.01505299):0.02662488,(Antaresia_melanocephalus:0.03133705,Antaresia_ramsayi:0.0411973):0.05179539):0.01417946,((Antaresia_maculosa:0.05978383,(A [...]
+    TREE 15 = (((((Python_curtus:0.1133995,(Python_molurus:0.03883978,Python_sebae:0.08963129):0.02452025):0.02667311,Python_regius:0.09942004):0.04684523,((Python_timoriensis:0.05561288,Python_reticulatus:0.05469003):0.0510733,((Morelia_boeleni:0.08459562,((Morelia_oenpelliensis:0.07649581,(Morelia_tracyae:0.06857963,(Morelia_amethistina:0.03090716,((Morelia_kinghorni:0.01143323,Morelia_clastolepis:0.009882759):0.005533701,Morelia_nauta:0.005203006):0.01649407):0.006763233):0.03048232): [...]
+    TREE 16 = (Candoia_aspera:0.454411,((((Python_curtus:0.1047953,(Python_molurus:0.04792883,Python_sebae:0.06590967):0.01822905):0.02141573,Python_regius:0.1076625):0.04547916,((Python_timoriensis:0.0652374,Python_reticulatus:0.05757644):0.06733267,(((Bothrochilus_boa:0.06078097,Liasis_albertisii:0.05895738):0.03564124,((Morelia_spilota:0.03153276,Morelia_bredli:0.03022422):0.03004519,(Morelia_oenpelliensis:0.05195458,(Morelia_tracyae:0.04952684,(Morelia_amethistina:0.0167668,(Morelia_ [...]
+    TREE 17 = ((Loxocemus_bicolor:0.2950447,Candoia_aspera:0.4956194):0.03409912,(((Python_reticulatus:0.06170708,Python_timoriensis:0.07679534):0.06662456,((((Liasis_fuscus:0.02247032,Liasis_mackloti:0.01165581):0.05333089,(Liasis_olivaceus:0.03201475,Apodora_papuana:0.07040275):0.01349114):0.01793611,(Antaresia_ramsayi:0.02707999,Antaresia_melanocephalus:0.04144063):0.07271472):0.0143494,((Morelia_boeleni:0.07941718,((Morelia_oenpelliensis:0.07000013,((Morelia_tracyae:0.04297898,(Morel [...]
+    TREE 18 = (Candoia_aspera:0.4206473,((((((Bothrochilus_boa:0.05545258,Liasis_albertisii:0.05086388):0.02245116,(((((Morelia_tracyae:0.04497596,(Morelia_clastolepis:0.002300433,(Morelia_kinghorni:0.007580557,Morelia_nauta:0.01209723):0.007905989):0.009573244):0.003758006,Morelia_amethistina:0.02453728):0.04117076,Morelia_oenpelliensis:0.05460769):0.009776291,(Morelia_spilota:0.02406363,Morelia_bredli:0.02809877):0.02882019):0.03137162,Morelia_boeleni:0.08935267):0.00152077):0.01108653 [...]
+    TREE 19 = ((Loxocemus_bicolor:0.2758306,(((Morelia_boeleni:0.09712222,((((Apodora_papuana:0.06032311,(Liasis_olivaceus:0.03718835,(Liasis_fuscus:0.01776308,Liasis_mackloti:0.02078969):0.0453022):0.01128282):0.01605221,(Antaresia_ramsayi:0.02804757,Antaresia_melanocephalus:0.03257871):0.06131908):0.008631585,(Liasis_albertisii:0.06289567,Bothrochilus_boa:0.05847094):0.02659356):0.001774679,(((((Antaresia_childreni:0.0293294,Antaresia_stimsoni:0.01419086):0.03654786,Antaresia_perthensi [...]
+    TREE 20 = (((Python_regius:0.09707175,(Python_curtus:0.105097,(Python_molurus:0.03923706,Python_sebae:0.06965362):0.02029509):0.02375806):0.04989682,((Python_reticulatus:0.06519806,Python_timoriensis:0.06124822):0.07887286,((((((Antaresia_stimsoni:0.01019489,Antaresia_childreni:0.02732365):0.03463165,Antaresia_perthensis:0.06931207):0.0109482,Antaresia_maculosa:0.07265653):0.007259249,((Morelia_viridisS:0.0544687,Morelia_viridisN:0.03351613):0.02469302,Morelia_carinata:0.05918813):0. [...]
+    TREE 21 = (Candoia_aspera:0.5100953,(((((Python_sebae:0.07591037,Python_molurus:0.04685888):0.0159693,Python_curtus:0.1194869):0.03190542,Python_regius:0.1219057):0.03764082,((Python_reticulatus:0.06955304,Python_timoriensis:0.06829867):0.07106028,(((((Morelia_viridisN:0.04546841,Morelia_viridisS:0.03973028):0.03274359,Morelia_carinata:0.07279532):0.01029072,(Antaresia_maculosa:0.06514227,(Antaresia_perthensis:0.04889998,(Antaresia_childreni:0.02577757,Antaresia_stimsoni:0.01387249): [...]
+    TREE 22 = ((Loxocemus_bicolor:0.2604766,Candoia_aspera:0.477939):0.04974364,((Python_regius:0.1139038,(Python_curtus:0.07771172,(Python_molurus:0.0329663,Python_sebae:0.07713101):0.01611444):0.02797897):0.03766492,((Python_reticulatus:0.07004576,Python_timoriensis:0.0529521):0.07873452,(((Liasis_albertisii:0.06200187,Bothrochilus_boa:0.04445638):0.04020958,(Morelia_boeleni:0.09025494,((Antaresia_ramsayi:0.03416303,Antaresia_melanocephalus:0.04850508):0.05644415,(Apodora_papuana:0.054 [...]
+    TREE 23 = ((Loxocemus_bicolor:0.262835,Candoia_aspera:0.4233526):0.05244274,((Python_regius:0.1431143,(Python_curtus:0.1154717,(Python_molurus:0.03285879,Python_sebae:0.08925105):0.0224638):0.03287098):0.04580999,((((Morelia_spilota:0.03001221,Morelia_bredli:0.02447762):0.02814649,(((((Morelia_kinghorni:0.006512097,Morelia_nauta:0.005269942):0.004088566,Morelia_clastolepis:0.006666917):0.01101066,Morelia_tracyae:0.03173798):0.003501345,Morelia_amethistina:0.02439264):0.04355536,Morel [...]
+    TREE 24 = (Candoia_aspera:0.4612549,(((Python_regius:0.1091584,(Python_curtus:0.09906034,(Python_molurus:0.04003386,Python_sebae:0.08731364):0.007798586):0.02269536):0.0371614,(((((((Antaresia_stimsoni:0.01576134,Antaresia_childreni:0.01874793):0.0397746,Antaresia_perthensis:0.06988391):0.0103778,Antaresia_maculosa:0.07115922):0.01047385,((Morelia_viridisN:0.03623554,Morelia_viridisS:0.05393487):0.02483759,Morelia_carinata:0.06020019):0.01557238):0.01448435,(((Morelia_spilota:0.03095 [...]
+    TREE 25 = (Candoia_aspera:0.5061518,(((Python_regius:0.1146303,(Python_curtus:0.09799544,(Python_sebae:0.07231277,Python_molurus:0.03281137):0.02135392):0.02387459):0.0490128,((((((((Morelia_kinghorni:0.008090824,Morelia_nauta:0.00677865):0.004636801,Morelia_clastolepis:0.006176168):0.02159217,(Morelia_amethistina:0.02774497,Morelia_tracyae:0.06031651):0.005094471):0.0378395,Morelia_oenpelliensis:0.06740112):0.002512368,(Morelia_bredli:0.02727586,Morelia_spilota:0.02655475):0.0345744 [...]
+    TREE 26 = (Candoia_aspera:0.4117214,(((((((Morelia_spilota:0.02966092,Morelia_bredli:0.02248443):0.03277605,(((((Morelia_kinghorni:0.008210078,Morelia_nauta:0.007966455):0.004548794,Morelia_clastolepis:0.006720723):0.009598362,Morelia_amethistina:0.02268765):0.007693427,Morelia_tracyae:0.03961947):0.03682493,Morelia_oenpelliensis:0.05115676):0.008050052):0.03203021,((((Antaresia_stimsoni:0.02444624,Antaresia_childreni:0.0252333):0.0318095,Antaresia_perthensis:0.0763034):0.01333969,An [...]
+    TREE 27 = (((((Morelia_boeleni:0.08136462,(((((Liasis_mackloti:0.01469392,Liasis_fuscus:0.02004376):0.04217133,(Liasis_olivaceus:0.04287108,Apodora_papuana:0.04818141):0.01302773):0.02332967,(Antaresia_ramsayi:0.02924354,Antaresia_melanocephalus:0.03579885):0.03955978):0.01125768,(Liasis_albertisii:0.0485576,Bothrochilus_boa:0.05695902):0.02309585):0.001063857,(((Morelia_spilota:0.02560891,Morelia_bredli:0.03012455):0.02098186,((Morelia_tracyae:0.03688196,((Morelia_clastolepis:0.0031 [...]
+    TREE 28 = (Candoia_aspera:0.4449872,(((((Python_molurus:0.03453011,Python_sebae:0.08373404):0.008449763,Python_curtus:0.08456065):0.03067617,Python_regius:0.1044486):0.03640394,(((((Antaresia_melanocephalus:0.03645403,Antaresia_ramsayi:0.0313611):0.05991626,((Liasis_olivaceus:0.03968883,Apodora_papuana:0.0610017):0.007050161,(Liasis_mackloti:0.006764323,Liasis_fuscus:0.02324538):0.05418435):0.01189356):0.003175782,(((Morelia_viridisN:0.03366704,Morelia_viridisS:0.04775397):0.01973658 [...]
+    TREE 29 = ((((Python_regius:0.08981924,((Python_sebae:0.07555498,Python_molurus:0.03563937):0.02936048,Python_curtus:0.08984143):0.009248537):0.03159094,((Python_reticulatus:0.05470638,Python_timoriensis:0.0862496):0.06055097,(((Liasis_albertisii:0.04162999,Bothrochilus_boa:0.0473893):0.0379235,((Antaresia_ramsayi:0.02152948,Antaresia_melanocephalus:0.03427138):0.05178105,((Apodora_papuana:0.05024696,Liasis_olivaceus:0.04916695):0.01541814,(Liasis_fuscus:0.02006362,Liasis_mackloti:0. [...]
+    TREE 30 = ((((Python_reticulatus:0.05656672,Python_timoriensis:0.0615847):0.06869705,(Morelia_boeleni:0.08434733,(((((Morelia_viridisN:0.02822289,Morelia_viridisS:0.04455322):0.0211784,Morelia_carinata:0.06338099):0.01717543,(Antaresia_maculosa:0.06683547,((Antaresia_childreni:0.01892756,Antaresia_stimsoni:0.01717541):0.03864759,Antaresia_perthensis:0.07570149):0.006255439):0.01023573):0.01303267,((Morelia_oenpelliensis:0.05786269,(Morelia_tracyae:0.03557059,(Morelia_amethistina:0.01 [...]
+    TREE 31 = (((((Python_molurus:0.04063373,Python_sebae:0.07514814):0.01706889,Python_curtus:0.08816209):0.02794301,Python_regius:0.126422):0.04901962,((Python_timoriensis:0.07078427,Python_reticulatus:0.06491265):0.06423406,((Bothrochilus_boa:0.05380913,Liasis_albertisii:0.0547089):0.03656184,((((Morelia_spilota:0.02872934,Morelia_bredli:0.02312623):0.03137145,((Morelia_tracyae:0.03510673,((Morelia_clastolepis:0.006385572,(Morelia_nauta:0.007006281,Morelia_kinghorni:0.00643803):0.0033 [...]
+    TREE 32 = ((Candoia_aspera:0.455204,Loxocemus_bicolor:0.2917998):0.04406212,(((Python_timoriensis:0.08183563,Python_reticulatus:0.04512444):0.06887124,((Morelia_boeleni:0.07321775,((Bothrochilus_boa:0.05326229,Liasis_albertisii:0.05245776):0.02197908,((Morelia_spilota:0.02674636,Morelia_bredli:0.03649268):0.0215502,(((Morelia_amethistina:0.03120252,((Morelia_kinghorni:0.005435696,Morelia_nauta:0.007088191):0.01118478,Morelia_clastolepis:0.01091913):0.01733689):0.01347604,Morelia_trac [...]
+    TREE 33 = ((Loxocemus_bicolor:0.2844377,(((Python_curtus:0.1019715,(Python_molurus:0.0429412,Python_sebae:0.08466812):0.02263815):0.02075651,Python_regius:0.1237921):0.03677393,((((Morelia_bredli:0.03119378,Morelia_spilota:0.0233967):0.03652172,(Morelia_oenpelliensis:0.06481902,(Morelia_tracyae:0.04483205,(Morelia_amethistina:0.02436113,((Morelia_kinghorni:0.01747546,Morelia_clastolepis:0.00549594):0.01011681,Morelia_nauta:0.002591119):0.0210983):0.01280651):0.0297356):0.01493592):0. [...]
+    TREE 34 = (Candoia_aspera:0.4195906,((((Python_timoriensis:0.04978334,Python_reticulatus:0.06443191):0.05665736,(((((Antaresia_perthensis:0.06844294,(Antaresia_stimsoni:0.01290088,Antaresia_childreni:0.02636597):0.03241306):0.01075944,Antaresia_maculosa:0.07323313):0.006654512,(Morelia_carinata:0.0807921,(Morelia_viridisN:0.03638272,Morelia_viridisS:0.04498738):0.02539871):0.00670575):0.02185193,((Antaresia_melanocephalus:0.03991443,Antaresia_ramsayi:0.03267628):0.04173048,((Apodora_ [...]
+    TREE 35 = (Candoia_aspera:0.4486796,((((Python_timoriensis:0.06160357,Python_reticulatus:0.05899547):0.06210065,((((Morelia_carinata:0.07039268,(Morelia_viridisN:0.02717392,Morelia_viridisS:0.05285822):0.01629177):0.01622682,((Antaresia_perthensis:0.06306544,(Antaresia_stimsoni:0.01439114,Antaresia_childreni:0.02655157):0.0412621):0.01020385,Antaresia_maculosa:0.06175795):0.004266058):0.01947729,(((Morelia_spilota:0.02145622,Morelia_bredli:0.02229659):0.03046973,Morelia_oenpelliensis [...]
+    TREE 36 = (Candoia_aspera:0.4677103,(((((Python_sebae:0.06512575,Python_molurus:0.03856891):0.02502744,Python_curtus:0.09008242):0.01898717,Python_regius:0.1106015):0.03137335,((Python_reticulatus:0.04563797,Python_timoriensis:0.09058164):0.08538711,((((Liasis_albertisii:0.06089568,Bothrochilus_boa:0.05524926):0.04386574,((Apodora_papuana:0.06280342,(Liasis_olivaceus:0.04780041,(Liasis_mackloti:0.008650393,Liasis_fuscus:0.02488376):0.04392434):0.008986177):0.02061411,(Antaresia_ramsa [...]
+    TREE 37 = (Candoia_aspera:0.4769489,((((Python_curtus:0.09808627,(Python_sebae:0.08711612,Python_molurus:0.03361382):0.01601466):0.0235876,Python_regius:0.1375817):0.04256167,((((Morelia_boeleni:0.07606632,((Morelia_spilota:0.03282064,Morelia_bredli:0.02378048):0.02364717,((Morelia_tracyae:0.02877539,((Morelia_nauta:0.00251251,(Morelia_kinghorni:0.008524486,Morelia_clastolepis:0.004699519):0.006302931):0.01200433,Morelia_amethistina:0.02906535):0.004316516):0.02884597,Morelia_oenpell [...]
+    TREE 38 = (Candoia_aspera:0.3773662,(Loxocemus_bicolor:0.2249788,((((Python_sebae:0.06651892,Python_molurus:0.03498734):0.02464912,Python_curtus:0.07673166):0.01052472,Python_regius:0.1129523):0.04464117,((Python_reticulatus:0.05448691,Python_timoriensis:0.06477214):0.07192476,((((Antaresia_ramsayi:0.03292673,Antaresia_melanocephalus:0.04036128):0.04562737,(Apodora_papuana:0.06416247,((Liasis_mackloti:0.01160595,Liasis_fuscus:0.02231178):0.04907739,Liasis_olivaceus:0.03522126):0.0094 [...]
+    TREE 39 = (((((Python_timoriensis:0.08188618,Python_reticulatus:0.06378417):0.08001537,(((((Morelia_spilota:0.02781278,Morelia_bredli:0.03197371):0.03154039,((Morelia_tracyae:0.02607517,((Morelia_clastolepis:0.002039059,(Morelia_nauta:0.008565151,Morelia_kinghorni:0.011925):0.001514738):0.02744645,Morelia_amethistina:0.01836739):0.02055508):0.03028903,Morelia_oenpelliensis:0.04405405):0.008923189):0.0305229,(Bothrochilus_boa:0.06679369,Liasis_albertisii:0.06716246):0.02623561):0.0041 [...]
+    TREE 40 = (Candoia_aspera:0.4208013,(Loxocemus_bicolor:0.2355042,(((Python_curtus:0.1002398,(Python_molurus:0.04971005,Python_sebae:0.07534479):0.01490631):0.02483788,Python_regius:0.13122):0.04845283,(((((((Antaresia_childreni:0.02476396,Antaresia_stimsoni:0.01173612):0.03504246,Antaresia_perthensis:0.06446316):0.007543391,Antaresia_maculosa:0.05640942):0.007865894,(Morelia_carinata:0.04620256,(Morelia_viridisN:0.03105739,Morelia_viridisS:0.06619639):0.02895136):0.01983984):0.014129 [...]
+    TREE 41 = ((((Python_timoriensis:0.06996509,Python_reticulatus:0.06574926):0.06676801,((((((Antaresia_stimsoni:0.02009407,Antaresia_childreni:0.02232226):0.04431973,Antaresia_perthensis:0.06572423):0.01161341,Antaresia_maculosa:0.07414321):0.006251875,(Morelia_carinata:0.05239441,(Morelia_viridisN:0.03753331,Morelia_viridisS:0.06370906):0.02254906):0.01009813):0.02270443,((Bothrochilus_boa:0.0655155,Liasis_albertisii:0.04513337):0.0402801,((Antaresia_melanocephalus:0.0488194,Antaresi [...]
+    TREE 42 = ((Loxocemus_bicolor:0.2558653,(((Python_reticulatus:0.06041035,Python_timoriensis:0.0495409):0.05712625,(((((Morelia_oenpelliensis:0.06264648,(Morelia_tracyae:0.03295818,((Morelia_nauta:0.004162228,(Morelia_kinghorni:0.007212826,Morelia_clastolepis:0.005783804):0.004869042):0.007822257,Morelia_amethistina:0.02845611):0.008115491):0.02341889):0.004693379,(Morelia_bredli:0.0245021,Morelia_spilota:0.03264814):0.03222283):0.03051242,Morelia_boeleni:0.08301486):0.002222387,(((An [...]
+    TREE 43 = (Candoia_aspera:0.4912831,(Loxocemus_bicolor:0.3305818,(((Python_timoriensis:0.08964648,Python_reticulatus:0.05531334):0.06829904,((((Antaresia_ramsayi:0.02711878,Antaresia_melanocephalus:0.04111096):0.04432045,((Liasis_olivaceus:0.03420107,Apodora_papuana:0.06162698):0.01501303,(Liasis_fuscus:0.01828714,Liasis_mackloti:0.01248727):0.04523117):0.02292171):0.01276623,(Liasis_albertisii:0.06893302,Bothrochilus_boa:0.06296997):0.0452054):0.006619568,((((Antaresia_perthensis:0. [...]
+    TREE 44 = ((Candoia_aspera:0.4282801,Loxocemus_bicolor:0.2557565):0.07118735,((((((Morelia_spilota:0.028086,Morelia_bredli:0.03424453):0.04515606,(Morelia_oenpelliensis:0.04825777,(Morelia_tracyae:0.04446028,(Morelia_amethistina:0.02748901,((Morelia_kinghorni:0.007643079,Morelia_clastolepis:0.008964228):0.003093159,Morelia_nauta:0.009113297):0.0223688):0.01055831):0.02851381):0.01327409):0.02105642,Morelia_boeleni:0.1081844):0.0002377406,(((Antaresia_maculosa:0.06232342,((Antaresia_s [...]
+    TREE 45 = ((Loxocemus_bicolor:0.2120497,Candoia_aspera:0.4029712):0.01332453,(((Python_reticulatus:0.05061441,Python_timoriensis:0.06134717):0.05990154,(((((Morelia_spilota:0.02971883,Morelia_bredli:0.02865454):0.02874998,(((Morelia_amethistina:0.02414642,((Morelia_kinghorni:0.005704897,Morelia_nauta:0.002618733):0.008513417,Morelia_clastolepis:0.008472477):0.01483608):0.01170621,Morelia_tracyae:0.03162107):0.02879083,Morelia_oenpelliensis:0.05522263):0.001620195):0.01422465,Morelia_ [...]
+    TREE 46 = (((((((((Morelia_spilota:0.02655895,Morelia_bredli:0.03511595):0.03519584,(((Morelia_nauta:0.001228679,(Morelia_clastolepis:0.007831828,Morelia_kinghorni:0.004732577):0.009794926):0.0277251,(Morelia_tracyae:0.05416658,Morelia_amethistina:0.02597948):0.00171186):0.04197238,Morelia_oenpelliensis:0.06708394):0.004278351):0.02486795,Morelia_boeleni:0.09303988):0.006333662,(((Apodora_papuana:0.06684645,((Liasis_mackloti:0.01200195,Liasis_fuscus:0.02916414):0.03911876,Liasis_oliv [...]
+    TREE 47 = ((Loxocemus_bicolor:0.2318016,Candoia_aspera:0.4164175):0.03116619,(((Python_timoriensis:0.08144411,Python_reticulatus:0.04430293):0.05610403,((((((Morelia_viridisN:0.03542506,Morelia_viridisS:0.0572556):0.01990815,Morelia_carinata:0.06651258):0.01302045,((Antaresia_perthensis:0.06573364,(Antaresia_stimsoni:0.01771713,Antaresia_childreni:0.0257728):0.04164447):0.02274534,Antaresia_maculosa:0.05726758):0.007223601):0.01653506,((Morelia_bredli:0.03162414,Morelia_spilota:0.024 [...]
+    TREE 48 = ((Loxocemus_bicolor:0.2613632,Candoia_aspera:0.4005454):0.03571461,(((Python_timoriensis:0.06195748,Python_reticulatus:0.0623511):0.05797901,((((Morelia_bredli:0.02536408,Morelia_spilota:0.02897979):0.02486894,(Morelia_oenpelliensis:0.05232564,(Morelia_tracyae:0.0343304,(Morelia_amethistina:0.02615847,((Morelia_kinghorni:0.01046447,Morelia_nauta:0.005524241):0.004390938,Morelia_clastolepis:0.001738627):0.01218575):0.005278279):0.03427753):0.002326722):0.01558027,Morelia_boe [...]
+    TREE 49 = (Candoia_aspera:0.4611198,(((((Python_molurus:0.03619845,Python_sebae:0.0821524):0.01438549,Python_curtus:0.1332483):0.02854173,Python_regius:0.1243103):0.02839463,((Python_reticulatus:0.06396125,Python_timoriensis:0.07307362):0.07136561,(((((Morelia_oenpelliensis:0.06325029,(Morelia_tracyae:0.06238511,((Morelia_nauta:0.004572369,(Morelia_kinghorni:0.005569767,Morelia_clastolepis:0.005787334):0.006707192):0.0199103,Morelia_amethistina:0.02755908):0.004445603):0.04539337):0. [...]
+    TREE 50 = ((Loxocemus_bicolor:0.2767771,((Python_regius:0.1271252,(Python_curtus:0.09296392,(Python_sebae:0.09383303,Python_molurus:0.03726204):0.01252763):0.01701399):0.03783387,((((((Liasis_fuscus:0.01831367,Liasis_mackloti:0.01081487):0.04196987,(Liasis_olivaceus:0.03974469,Apodora_papuana:0.05023767):0.005300062):0.01275833,(Antaresia_ramsayi:0.02844938,Antaresia_melanocephalus:0.03307184):0.046393):0.004489448,(Liasis_albertisii:0.05207137,Bothrochilus_boa:0.05848669):0.03651395 [...]
+    TREE 51 = ((Loxocemus_bicolor:0.1953644,Candoia_aspera:0.4090651):0.07135976,((Python_regius:0.08788631,(Python_curtus:0.09299281,(Python_sebae:0.08835472,Python_molurus:0.04266492):0.01617591):0.02337683):0.03766133,(((((((Morelia_tracyae:0.02987496,((Morelia_nauta:0.005035972,(Morelia_clastolepis:0.009349248,Morelia_kinghorni:0.011742):0.006892148):0.007373184,Morelia_amethistina:0.02331562):0.01523232):0.03311424,Morelia_oenpelliensis:0.06063279):0.01006941,(Morelia_bredli:0.03232 [...]
+    TREE 52 = (Candoia_aspera:0.4675925,(Loxocemus_bicolor:0.3794991,((((((((Liasis_fuscus:0.01909646,Liasis_mackloti:0.0121694):0.04627307,Liasis_olivaceus:0.03963934):0.01307675,Apodora_papuana:0.06285513):0.009469208,(Antaresia_ramsayi:0.03031447,Antaresia_melanocephalus:0.04619856):0.05525247):0.01141626,((Morelia_boeleni:0.07503417,((Morelia_bredli:0.02530584,Morelia_spilota:0.02564753):0.0288385,(((Morelia_amethistina:0.03204628,((Morelia_kinghorni:0.008460891,Morelia_nauta:0.00626 [...]
+    TREE 53 = ((Loxocemus_bicolor:0.2333575,(((((Antaresia_maculosa:0.06466482,(Antaresia_perthensis:0.06424014,(Antaresia_stimsoni:0.02296361,Antaresia_childreni:0.01801302):0.03375228):0.01303995):0.007496688,(Morelia_carinata:0.06781954,(Morelia_viridisN:0.03859534,Morelia_viridisS:0.05089484):0.02204771):0.01143718):0.02112735,(((Apodora_papuana:0.06130652,(Liasis_olivaceus:0.0411087,(Liasis_fuscus:0.01402491,Liasis_mackloti:0.01771778):0.05455264):0.006364489):0.02208133,(Antaresia_ [...]
+    TREE 54 = ((Loxocemus_bicolor:0.3201657,(((Python_timoriensis:0.0581893,Python_reticulatus:0.05907456):0.06282391,(((Bothrochilus_boa:0.06679214,Liasis_albertisii:0.06431971):0.03525423,((Morelia_spilota:0.02382529,Morelia_bredli:0.03023749):0.04178667,((((Morelia_clastolepis:0.005759701,(Morelia_nauta:0.009415851,Morelia_kinghorni:0.005412919):0.01556562):0.01821965,Morelia_tracyae:0.05033836):0.013102,Morelia_amethistina:0.01994143):0.03911183,Morelia_oenpelliensis:0.06652114):0.00 [...]
+    TREE 55 = (Candoia_aspera:0.4363239,(Loxocemus_bicolor:0.274237,(((Python_reticulatus:0.04577835,Python_timoriensis:0.1033163):0.06046119,((((Antaresia_maculosa:0.07537991,(Antaresia_perthensis:0.06182352,(Antaresia_childreni:0.02228823,Antaresia_stimsoni:0.02032132):0.04573505):0.009092116):0.008593031,((Morelia_viridisN:0.02746149,Morelia_viridisS:0.05577087):0.02431754,Morelia_carinata:0.05783515):0.01411612):0.01847424,(Morelia_boeleni:0.08157485,((Apodora_papuana:0.06043711,((Li [...]
+    TREE 56 = ((Loxocemus_bicolor:0.2326946,(((Python_reticulatus:0.04186498,Python_timoriensis:0.08553525):0.0631943,((((Morelia_oenpelliensis:0.06840764,((Morelia_amethistina:0.02496099,(((Morelia_kinghorni:0.008738223,Morelia_clastolepis:0.006100998):0.009642316,Morelia_nauta:0.005706378):0.02156259,Morelia_tracyae:0.05879199):0.002015818):0.03385588,(Morelia_bredli:0.02514599,Morelia_spilota:0.02152035):0.04255435):0.003652759):0.0287093,Morelia_boeleni:0.08673253):0.004519537,(((Ant [...]
+    TREE 57 = ((((((((Antaresia_maculosa:0.06753452,((Antaresia_stimsoni:0.0174042,Antaresia_childreni:0.02406376):0.03572501,Antaresia_perthensis:0.0853869):0.009132408):0.007193958,((Morelia_viridisS:0.04378142,Morelia_viridisN:0.0372949):0.02842223,Morelia_carinata:0.06825358):0.01252652):0.02117735,((((Morelia_amethistina:0.02015902,Morelia_tracyae:0.05018288):0.006975072,(Morelia_clastolepis:0.008999008,(Morelia_nauta:0.01056224,Morelia_kinghorni:0.006781781):0.004545883):0.0169317) [...]
+    TREE 58 = (Candoia_aspera:0.4610956,(((Python_regius:0.1232013,(Python_curtus:0.1051752,(Python_sebae:0.07803735,Python_molurus:0.04395386):0.02047164):0.02053317):0.02733087,((Python_timoriensis:0.0786524,Python_reticulatus:0.05985872):0.05331236,((Morelia_boeleni:0.08349698,(((Bothrochilus_boa:0.0507127,Liasis_albertisii:0.05617265):0.03450997,((Antaresia_melanocephalus:0.03825353,Antaresia_ramsayi:0.02731536):0.05230166,(Apodora_papuana:0.05528345,((Liasis_mackloti:0.01561896,Lias [...]
+    TREE 59 = ((Loxocemus_bicolor:0.2573866,(((Python_curtus:0.09793072,(Python_sebae:0.08371425,Python_molurus:0.04299976):0.01409514):0.02153569,Python_regius:0.09829285):0.05479509,((Python_timoriensis:0.07710499,Python_reticulatus:0.05454431):0.07863418,(((Morelia_boeleni:0.08457115,((Morelia_spilota:0.03016478,Morelia_bredli:0.02802035):0.03625794,(Morelia_oenpelliensis:0.05700879,((((Morelia_nauta:0.01178942,Morelia_kinghorni:0.008054588):0.002399088,Morelia_clastolepis:0.001528491 [...]
+    TREE 60 = (((((Python_timoriensis:0.07926416,Python_reticulatus:0.07137747):0.07251304,((((Morelia_carinata:0.06855047,(Morelia_viridisN:0.04314394,Morelia_viridisS:0.04266962):0.01684549):0.01320995,(Antaresia_maculosa:0.09354209,((Antaresia_stimsoni:0.01078989,Antaresia_childreni:0.02589992):0.04603335,Antaresia_perthensis:0.05790963):0.006737519):0.009808671):0.02509982,(((Antaresia_melanocephalus:0.04321766,Antaresia_ramsayi:0.03739094):0.05413265,(Bothrochilus_boa:0.06058307,Lia [...]
+    TREE 61 = (Candoia_aspera:0.4378292,(Loxocemus_bicolor:0.2662484,(((Python_timoriensis:0.06384791,Python_reticulatus:0.07183585):0.07502361,((((Morelia_carinata:0.06462666,(Morelia_viridisN:0.03903769,Morelia_viridisS:0.0577421):0.02035461):0.01936315,(Antaresia_maculosa:0.05436798,((Antaresia_stimsoni:0.01573073,Antaresia_childreni:0.0211433):0.03806001,Antaresia_perthensis:0.0699794):0.004109115):0.005422296):0.01634019,((Antaresia_melanocephalus:0.03891313,Antaresia_ramsayi:0.0293 [...]
+    TREE 62 = ((((((((((Antaresia_childreni:0.01558902,Antaresia_stimsoni:0.01595246):0.04430847,Antaresia_perthensis:0.07562575):0.007701339,Antaresia_maculosa:0.08918545):0.00634405,((Morelia_viridisN:0.03328426,Morelia_viridisS:0.05089523):0.01773241,Morelia_carinata:0.06823346):0.009729754):0.0283175,((Liasis_albertisii:0.03628829,Bothrochilus_boa:0.06273807):0.05446115,((Antaresia_ramsayi:0.02195563,Antaresia_melanocephalus:0.04103237):0.04937809,(Apodora_papuana:0.06748454,((Liasis [...]
+    TREE 63 = (Candoia_aspera:0.3903282,(Loxocemus_bicolor:0.248947,(((Python_timoriensis:0.05996622,Python_reticulatus:0.04079137):0.05661379,((Bothrochilus_boa:0.06544201,Liasis_albertisii:0.05598568):0.03264463,((((Antaresia_melanocephalus:0.04274463,Antaresia_ramsayi:0.0229638):0.05227536,(Apodora_papuana:0.05124347,(Liasis_olivaceus:0.032322,(Liasis_mackloti:0.01322112,Liasis_fuscus:0.02499189):0.04084609):0.01081107):0.01437584):0.006629841,(((Morelia_viridisN:0.04032599,Morelia_vi [...]
+    TREE 64 = ((((Python_regius:0.1027857,(Python_curtus:0.09600793,(Python_molurus:0.0279114,Python_sebae:0.06603829):0.02608479):0.02383835):0.05037635,((((Apodora_papuana:0.07130196,((Liasis_mackloti:0.01891925,Liasis_fuscus:0.01624055):0.04795536,Liasis_olivaceus:0.04357267):0.0212919):0.01519329,((Liasis_albertisii:0.05178711,Bothrochilus_boa:0.06173864):0.02414452,(Antaresia_ramsayi:0.04078327,Antaresia_melanocephalus:0.03471582):0.04901001):0.002867909):0.003315471,(Morelia_boelen [...]
+    TREE 65 = (Candoia_aspera:0.4484958,(Loxocemus_bicolor:0.2674019,((((Python_molurus:0.03746116,Python_sebae:0.07876257):0.02255983,Python_curtus:0.09736194):0.009465829,Python_regius:0.1122148):0.04183403,((Python_reticulatus:0.05254427,Python_timoriensis:0.06316221):0.04804532,((((Morelia_oenpelliensis:0.06223055,(Morelia_tracyae:0.03993813,(Morelia_amethistina:0.02235027,((Morelia_clastolepis:0.006880411,Morelia_kinghorni:0.005690263):0.0017908,Morelia_nauta:0.005001931):0.0110807) [...]
+    TREE 66 = (Candoia_aspera:0.4387456,(Loxocemus_bicolor:0.3701941,((((Python_molurus:0.02845826,Python_sebae:0.08506522):0.01248504,Python_curtus:0.1079607):0.02967151,Python_regius:0.09794066):0.04692648,((((((Morelia_oenpelliensis:0.07125908,((((Morelia_kinghorni:0.01128121,Morelia_nauta:0.009803465):0.006153012,Morelia_clastolepis:0.008016785):0.004922659,Morelia_amethistina:0.02570226):0.01632472,Morelia_tracyae:0.03681089):0.02162234):0.004091767,(Morelia_spilota:0.0288998,Moreli [...]
+    TREE 67 = ((Candoia_aspera:0.4404567,Loxocemus_bicolor:0.2623151):0.03678611,((Python_regius:0.1300392,(Python_curtus:0.0849365,(Python_sebae:0.06865021,Python_molurus:0.04067085):0.01983921):0.02301141):0.0359684,((Python_reticulatus:0.05548591,Python_timoriensis:0.07583138):0.06659153,(((Morelia_carinata:0.05407921,(Morelia_viridisN:0.03238534,Morelia_viridisS:0.03823864):0.01875875):0.01477069,(((Antaresia_childreni:0.01916999,Antaresia_stimsoni:0.02019813):0.0427897,Antaresia_per [...]
+    TREE 68 = ((Loxocemus_bicolor:0.275201,(((Python_timoriensis:0.07933547,Python_reticulatus:0.05530097):0.08859932,((Morelia_boeleni:0.0768628,((Morelia_bredli:0.02613436,Morelia_spilota:0.02952909):0.03868299,(Morelia_oenpelliensis:0.05920573,(Morelia_amethistina:0.0122179,((Morelia_nauta:0.005477438,(Morelia_clastolepis:0.01002306,Morelia_kinghorni:0.002926079):0.005503214):0.02905945,Morelia_tracyae:0.02562446):0.01299805):0.05995565):0.002851122):0.02890678):0.01651045,(((((Antare [...]
+    TREE 69 = (Candoia_aspera:0.5069986,((((((((Antaresia_maculosa:0.08576571,((Antaresia_childreni:0.03038096,Antaresia_stimsoni:0.01692226):0.04191162,Antaresia_perthensis:0.06766591):0.01159188):0.00361208,((Morelia_viridisN:0.03718157,Morelia_viridisS:0.05590611):0.01486255,Morelia_carinata:0.08684218):0.01085831):0.01500122,(((Antaresia_ramsayi:0.02557342,Antaresia_melanocephalus:0.04322508):0.05597149,(Apodora_papuana:0.06308859,(Liasis_olivaceus:0.03261355,(Liasis_fuscus:0.0270116 [...]
+    TREE 70 = (Candoia_aspera:0.4122645,(((Python_regius:0.110234,((Python_molurus:0.03231937,Python_sebae:0.07999153):0.006036296,Python_curtus:0.09954285):0.02136783):0.03842027,(((((Liasis_albertisii:0.04708427,Bothrochilus_boa:0.0570808):0.04067184,((Antaresia_ramsayi:0.02902459,Antaresia_melanocephalus:0.03726802):0.04574105,(Apodora_papuana:0.07554033,(Liasis_olivaceus:0.03919017,(Liasis_mackloti:0.01169475,Liasis_fuscus:0.01905053):0.03694819):0.01354151):0.01916572):0.001572765): [...]
+    TREE 71 = (Candoia_aspera:0.4589375,(Loxocemus_bicolor:0.2861174,(((((Antaresia_maculosa:0.07010981,((Antaresia_childreni:0.01826358,Antaresia_stimsoni:0.02200477):0.03110523,Antaresia_perthensis:0.06846857):0.01000272):0.004299263,((Morelia_viridisN:0.03728736,Morelia_viridisS:0.03357402):0.02663172,Morelia_carinata:0.06458395):0.009897064):0.01429599,((Morelia_boeleni:0.07654923,(((Morelia_amethistina:0.02884541,(Morelia_tracyae:0.05910984,((Morelia_clastolepis:0.007296296,Morelia_ [...]
+    TREE 72 = (Candoia_aspera:0.4576079,(((((((((Morelia_amethistina:0.01521725,(Morelia_tracyae:0.05461905,(Morelia_clastolepis:0.003219944,(Morelia_nauta:0.006485679,Morelia_kinghorni:0.007425166):0.001335271):0.03162993):0.009930456):0.04123097,Morelia_oenpelliensis:0.07386276):0.008972008,(Morelia_spilota:0.023071,Morelia_bredli:0.03727896):0.03126727):0.03469699,((Antaresia_maculosa:0.06361313,(Antaresia_perthensis:0.07265412,(Antaresia_stimsoni:0.0111831,Antaresia_childreni:0.02761 [...]
+    TREE 73 = ((Loxocemus_bicolor:0.3119393,((((Python_molurus:0.04448889,Python_sebae:0.06188512):0.0142073,Python_curtus:0.1025871):0.03482844,Python_regius:0.1297599):0.02807402,((Python_reticulatus:0.0707405,Python_timoriensis:0.08769258):0.0480519,((((((Morelia_clastolepis:0.01089226,(Morelia_kinghorni:0.01052386,Morelia_nauta:0.009789005):0.002879246):0.0157598,(Morelia_tracyae:0.02688318,Morelia_amethistina:0.02338759):0.008894687):0.02362956,Morelia_oenpelliensis:0.0695819):0.006 [...]
+    TREE 74 = ((Loxocemus_bicolor:0.2083269,Candoia_aspera:0.5152461):0.04062122,((((((Morelia_bredli:0.02280977,Morelia_spilota:0.02376029):0.03352429,(Morelia_oenpelliensis:0.05724071,((Morelia_tracyae:0.03030344,(Morelia_clastolepis:0.003406181,(Morelia_kinghorni:0.003670556,Morelia_nauta:0.01034161):0.0009901308):0.02356278):0.002815858,Morelia_amethistina:0.0307196):0.04274883):0.00406888):0.01093207,Morelia_boeleni:0.06876127):0.008428241,((Liasis_albertisii:0.03787623,Bothrochilus [...]
+    TREE 75 = ((Loxocemus_bicolor:0.3300245,((((((Morelia_spilota:0.01654569,Morelia_bredli:0.03397926):0.03641578,(Morelia_oenpelliensis:0.0720801,(Morelia_tracyae:0.05217545,(((Morelia_clastolepis:0.005005123,Morelia_kinghorni:0.01336545):0.002474397,Morelia_nauta:0.005135748):0.008917268,Morelia_amethistina:0.02211238):0.0103868):0.03809202):0.005851688):0.01685244,(((Morelia_viridisN:0.03925044,Morelia_viridisS:0.03750531):0.01639746,Morelia_carinata:0.07054688):0.008838584,(((Antare [...]
+    TREE 76 = (((((((((Antaresia_melanocephalus:0.02772176,Antaresia_ramsayi:0.04226773):0.0504747,(Apodora_papuana:0.07093863,((Liasis_mackloti:0.01709142,Liasis_fuscus:0.02330178):0.04618391,Liasis_olivaceus:0.0352755):0.01342014):0.02167455):0.0100165,(Bothrochilus_boa:0.07219773,Liasis_albertisii:0.04785805):0.04234023):0.004860061,((((Morelia_tracyae:0.04624559,(Morelia_amethistina:0.02472913,((Morelia_kinghorni:0.01642213,Morelia_clastolepis:0.009078821):0.003514855,Morelia_nauta:0 [...]
+    TREE 77 = ((Candoia_aspera:0.3502765,Loxocemus_bicolor:0.2724483):0.02964649,((((Python_molurus:0.04580597,Python_sebae:0.0612617):0.01270166,Python_curtus:0.07769046):0.02310792,Python_regius:0.09778553):0.0505763,((Python_reticulatus:0.0510522,Python_timoriensis:0.07707769):0.05952654,((((((Apodora_papuana:0.04280117,Liasis_olivaceus:0.03264188):0.01300365,(Liasis_mackloti:0.008264549,Liasis_fuscus:0.01835877):0.04882774):0.01341318,(Antaresia_ramsayi:0.03582116,Antaresia_melanocep [...]
+    TREE 78 = (Candoia_aspera:0.4818862,(((Python_regius:0.1173105,(Python_curtus:0.09787109,(Python_molurus:0.03613981,Python_sebae:0.08034014):0.02579503):0.01255788):0.03521315,((Python_timoriensis:0.06462919,Python_reticulatus:0.04927181):0.0781527,(((((Morelia_bredli:0.03127313,Morelia_spilota:0.03540732):0.03493716,(Morelia_oenpelliensis:0.05428268,((Morelia_tracyae:0.03980916,(Morelia_nauta:0.002858831,(Morelia_kinghorni:0.01143028,Morelia_clastolepis:0.01019444):0.009460836):0.02 [...]
+    TREE 79 = ((((Python_timoriensis:0.07252546,Python_reticulatus:0.06270089):0.06874345,((((Apodora_papuana:0.08250042,(Liasis_olivaceus:0.04256184,(Liasis_mackloti:0.01543068,Liasis_fuscus:0.01570251):0.05041162):0.01150162):0.01891647,((Antaresia_melanocephalus:0.03158756,Antaresia_ramsayi:0.03251311):0.05107385,(Bothrochilus_boa:0.06030661,Liasis_albertisii:0.04458544):0.03289097):0.003724523):0.007746936,(Morelia_boeleni:0.07578827,((Morelia_spilota:0.02666204,Morelia_bredli:0.0290 [...]
+    TREE 80 = (((((Python_timoriensis:0.07942641,Python_reticulatus:0.05318356):0.0542813,((((((Liasis_mackloti:0.01510409,Liasis_fuscus:0.01776043):0.05075845,(Liasis_olivaceus:0.03951128,Apodora_papuana:0.07083407):0.01755491):0.01728235,(Antaresia_melanocephalus:0.03002391,Antaresia_ramsayi:0.02149549):0.04485914):0.01052883,(Bothrochilus_boa:0.05519856,Liasis_albertisii:0.04763622):0.02797339):0.004472592,Morelia_boeleni:0.08374513):0.005074553,((((Antaresia_perthensis:0.07022127,(An [...]
+    TREE 81 = (Candoia_aspera:0.4667571,(Loxocemus_bicolor:0.3367887,((Python_regius:0.1180484,((Python_molurus:0.03959197,Python_sebae:0.1142895):0.01503196,Python_curtus:0.09833922):0.03611823):0.04569427,(((Morelia_boeleni:0.09325398,(((Antaresia_ramsayi:0.02456999,Antaresia_melanocephalus:0.03700992):0.05498125,(Liasis_albertisii:0.06510315,Bothrochilus_boa:0.06030748):0.02629961):0.007964493,((Liasis_olivaceus:0.03164813,(Liasis_mackloti:0.007046992,Liasis_fuscus:0.01983901):0.05932 [...]
+    TREE 82 = ((((((Python_sebae:0.0645436,Python_molurus:0.03466129):0.01099752,Python_curtus:0.1213218):0.01569256,Python_regius:0.1288854):0.02512009,((((Liasis_albertisii:0.04768349,Bothrochilus_boa:0.06732252):0.02577025,((Apodora_papuana:0.0573615,(Liasis_olivaceus:0.03143039,(Liasis_fuscus:0.01956832,Liasis_mackloti:0.01151569):0.04049188):0.01153908):0.02375811,(Antaresia_ramsayi:0.02867471,Antaresia_melanocephalus:0.03825474):0.0532607):0.007452147):0.005339881,((Morelia_boeleni [...]
+    TREE 83 = (((((Python_timoriensis:0.06385903,Python_reticulatus:0.05887241):0.06039356,(((((Antaresia_ramsayi:0.03291234,Antaresia_melanocephalus:0.04406639):0.0551582,(Apodora_papuana:0.06219194,(Liasis_olivaceus:0.04618287,(Liasis_fuscus:0.01928215,Liasis_mackloti:0.01361912):0.04523828):0.01769705):0.01373734):0.005296261,(Liasis_albertisii:0.04618613,Bothrochilus_boa:0.07393764):0.02720086):0.007912564,((Morelia_carinata:0.07341501,(Morelia_viridisN:0.03939504,Morelia_viridisS:0. [...]
+    TREE 84 = ((Candoia_aspera:0.4302856,Loxocemus_bicolor:0.2106506):0.03081074,(((Python_reticulatus:0.039706,Python_timoriensis:0.06318284):0.06737832,((((Antaresia_ramsayi:0.02993718,Antaresia_melanocephalus:0.0516321):0.03462159,((Liasis_fuscus:0.01909033,Liasis_mackloti:0.01920083):0.03884619,(Apodora_papuana:0.07970396,Liasis_olivaceus:0.0318336):0.0159565):0.02141131):0.01086279,(((Antaresia_perthensis:0.05877012,(Antaresia_childreni:0.02373958,Antaresia_stimsoni:0.01345116):0.03 [...]
+    TREE 85 = ((Loxocemus_bicolor:0.2534505,(((((Morelia_carinata:0.07389122,(Morelia_viridisN:0.03609381,Morelia_viridisS:0.05675261):0.01821106):0.01076806,(((Antaresia_stimsoni:0.01256993,Antaresia_childreni:0.02450923):0.02974669,Antaresia_perthensis:0.05764076):0.02295225,Antaresia_maculosa:0.07677902):0.008637588):0.01786285,((Morelia_boeleni:0.07562694,(((Bothrochilus_boa:0.06376279,Liasis_albertisii:0.04877791):0.03468829,(Antaresia_melanocephalus:0.04365131,Antaresia_ramsayi:0.0 [...]
+    TREE 86 = (Candoia_aspera:0.4432525,(((Python_regius:0.1388916,((Python_sebae:0.08554284,Python_molurus:0.05720035):0.009684637,Python_curtus:0.08433645):0.01174155):0.02783744,((((((Morelia_bredli:0.02386123,Morelia_spilota:0.02840604):0.03818062,(Morelia_oenpelliensis:0.05193934,(Morelia_amethistina:0.01753699,(((Morelia_nauta:0.006926759,Morelia_kinghorni:0.005577224):0.001173386,Morelia_clastolepis:0.004074834):0.02082514,Morelia_tracyae:0.05079507):0.004276661):0.04497917):0.008 [...]
+    TREE 87 = ((((((Python_molurus:0.03328051,Python_sebae:0.08260708):0.02277176,Python_curtus:0.09369442):0.01362622,Python_regius:0.1144538):0.04157068,((Python_reticulatus:0.0659659,Python_timoriensis:0.05748917):0.05766686,((((Morelia_spilota:0.02944967,Morelia_bredli:0.02243377):0.03036736,(Morelia_oenpelliensis:0.05862991,((Morelia_amethistina:0.02044603,(Morelia_clastolepis:0.00667788,(Morelia_kinghorni:0.002341999,Morelia_nauta:0.004724123):0.001102001):0.01914452):0.006570517,M [...]
+    TREE 88 = ((Candoia_aspera:0.4541513,Loxocemus_bicolor:0.2958392):0.07081872,(((Python_timoriensis:0.05908032,Python_reticulatus:0.06230935):0.05923604,(((Morelia_carinata:0.059557,(Morelia_viridisN:0.0430535,Morelia_viridisS:0.04451561):0.02637773):0.01378714,(Antaresia_maculosa:0.06826855,((Antaresia_stimsoni:0.01615772,Antaresia_childreni:0.02120395):0.03420147,Antaresia_perthensis:0.06973437):0.02609569):0.003154777):0.02419757,((Morelia_boeleni:0.0936062,((Morelia_oenpelliensis: [...]
+    TREE 89 = ((Loxocemus_bicolor:0.2715632,((Python_regius:0.105666,(Python_curtus:0.08820886,(Python_molurus:0.0289014,Python_sebae:0.08313008):0.01341111):0.02700934):0.0537633,((Python_timoriensis:0.06336319,Python_reticulatus:0.06893482):0.0668129,(((Morelia_carinata:0.05800344,(Morelia_viridisN:0.03450113,Morelia_viridisS:0.04273127):0.02064897):0.01309781,(Antaresia_maculosa:0.07388802,(Antaresia_perthensis:0.06515081,(Antaresia_stimsoni:0.01508704,Antaresia_childreni:0.02555118): [...]
+    TREE 90 = (Candoia_aspera:0.4139069,(((Python_regius:0.1113714,(Python_curtus:0.1046913,(Python_molurus:0.03657293,Python_sebae:0.06993362):0.02487479):0.0164626):0.04311928,((Python_reticulatus:0.06430189,Python_timoriensis:0.06051875):0.0693176,((Morelia_boeleni:0.08663333,((Morelia_spilota:0.03083786,Morelia_bredli:0.0240764):0.03537649,(Morelia_oenpelliensis:0.06986482,((((Morelia_nauta:0.01074089,Morelia_kinghorni:0.006973603):0.003692685,Morelia_clastolepis:0.002062225):0.01547 [...]
+    TREE 91 = ((Loxocemus_bicolor:0.3083324,((((Morelia_boeleni:0.09668887,((Liasis_albertisii:0.05250439,Bothrochilus_boa:0.06016687):0.03512285,((Morelia_oenpelliensis:0.05748413,(Morelia_tracyae:0.0477756,((Morelia_nauta:0.007311925,(Morelia_kinghorni:0.005988491,Morelia_clastolepis:0.003411513):0.004753291):0.02241144,Morelia_amethistina:0.02106282):0.006562451):0.03868782):0.01022867,(Morelia_spilota:0.02908629,Morelia_bredli:0.02807888):0.02736293):0.02072108):0.007550236):0.004824 [...]
+    TREE 92 = (Candoia_aspera:0.4281057,((((Python_timoriensis:0.08823408,Python_reticulatus:0.05737884):0.06100876,(((Bothrochilus_boa:0.07126329,Liasis_albertisii:0.04201895):0.03675228,((Antaresia_melanocephalus:0.04236584,Antaresia_ramsayi:0.03108991):0.04881623,((Liasis_mackloti:0.01787838,Liasis_fuscus:0.01928909):0.04546609,(Liasis_olivaceus:0.05017097,Apodora_papuana:0.06242772):0.00707014):0.01689589):0.01717237):0.003562895,((Morelia_boeleni:0.09789998,((Morelia_oenpelliensis:0 [...]
+    TREE 93 = ((((Python_regius:0.1131232,((Python_molurus:0.05168396,Python_sebae:0.08173496):0.02146037,Python_curtus:0.1083859):0.01259181):0.0382401,(((((Morelia_carinata:0.07151188,(Morelia_viridisN:0.03574948,Morelia_viridisS:0.06082342):0.02895885):0.01261191,((Antaresia_perthensis:0.06404546,(Antaresia_stimsoni:0.01594511,Antaresia_childreni:0.02247536):0.02246347):0.006851028,Antaresia_maculosa:0.0730325):0.01006378):0.02929077,(((Apodora_papuana:0.05937471,Liasis_olivaceus:0.03 [...]
+    TREE 94 = (((Python_regius:0.1239454,((Python_molurus:0.03482248,Python_sebae:0.07534156):0.00995777,Python_curtus:0.09774541):0.04014603):0.01570374,(((Morelia_boeleni:0.08858978,((((Antaresia_ramsayi:0.0206568,Antaresia_melanocephalus:0.04666388):0.04498903,(Apodora_papuana:0.05813629,(Liasis_olivaceus:0.05170553,(Liasis_fuscus:0.02074332,Liasis_mackloti:0.01562806):0.04108401):0.00829329):0.0165318):0.005684696,(Liasis_albertisii:0.05068824,Bothrochilus_boa:0.06321412):0.03784581) [...]
+    TREE 95 = ((((((((Antaresia_ramsayi:0.02667422,Antaresia_melanocephalus:0.04534988):0.05253592,(Apodora_papuana:0.07194987,(Liasis_olivaceus:0.04129446,(Liasis_mackloti:0.02126345,Liasis_fuscus:0.02281094):0.05439975):0.01019772):0.007377441):0.0114808,(((Morelia_viridisS:0.03858762,Morelia_viridisN:0.04247917):0.02657865,Morelia_carinata:0.08634983):0.01532563,((Antaresia_perthensis:0.07156371,(Antaresia_stimsoni:0.02213176,Antaresia_childreni:0.01944844):0.03966154):0.008415598,Ant [...]
+    TREE 96 = ((Loxocemus_bicolor:0.2807967,(((Python_timoriensis:0.06910729,Python_reticulatus:0.05372697):0.07638634,((((Morelia_viridisN:0.02707026,Morelia_viridisS:0.04347542):0.02229195,Morelia_carinata:0.05926867):0.01329909,(Antaresia_maculosa:0.04804866,((Antaresia_stimsoni:0.0148235,Antaresia_childreni:0.02618698):0.03229745,Antaresia_perthensis:0.06685734):0.003997152):0.01381649):0.01124514,((((((Morelia_tracyae:0.04246949,(((Morelia_nauta:0.005484603,Morelia_kinghorni:0.01044 [...]
+    TREE 97 = (Candoia_aspera:0.5039896,(Loxocemus_bicolor:0.2850881,((((Python_sebae:0.08938354,Python_molurus:0.03446374):0.01632282,Python_curtus:0.1258863):0.02531511,Python_regius:0.1176712):0.04925833,((((((Antaresia_ramsayi:0.03828439,Antaresia_melanocephalus:0.03645482):0.04680986,(((Liasis_fuscus:0.02699014,Liasis_mackloti:0.008925802):0.04733224,Liasis_olivaceus:0.03373177):0.01716661,Apodora_papuana:0.05578779):0.01661917):0.001369567,(Liasis_albertisii:0.05880192,Bothrochilus [...]
+    TREE 98 = (Candoia_aspera:0.3960502,(((Python_regius:0.1248549,(Python_curtus:0.09506814,(Python_sebae:0.07827634,Python_molurus:0.03632028):0.01662299):0.02602335):0.05087225,((Python_reticulatus:0.0549807,Python_timoriensis:0.06575509):0.04986447,((((Antaresia_perthensis:0.05946804,(Antaresia_childreni:0.02254218,Antaresia_stimsoni:0.02131714):0.03895009):0.01473793,Antaresia_maculosa:0.07138852):0.004917744,(Morelia_carinata:0.06986168,(Morelia_viridisN:0.03186693,Morelia_viridisS [...]
+    TREE 99 = (((((Morelia_boeleni:0.09666952,((Morelia_bredli:0.02487562,Morelia_spilota:0.01972983):0.02472607,(Morelia_oenpelliensis:0.05818019,(Morelia_tracyae:0.04529842,((Morelia_nauta:0.001550672,(Morelia_clastolepis:0.00699391,Morelia_kinghorni:0.008062495):0.007728304):0.01843411,Morelia_amethistina:0.02151424):0.006970344):0.04507299):0.008188663):0.02156805):0.002725827,((((((Liasis_fuscus:0.02915797,Liasis_mackloti:0.01101938):0.05756546,Liasis_olivaceus:0.04796678):0.0169581 [...]
+    TREE 100 = ((Loxocemus_bicolor:0.2671658,(((Python_curtus:0.1129476,(Python_molurus:0.04232295,Python_sebae:0.06584234):0.02744495):0.01460987,Python_regius:0.1067747):0.04485947,((((((Morelia_spilota:0.03627458,Morelia_bredli:0.02800939):0.03362723,((Morelia_tracyae:0.05576232,(((Morelia_kinghorni:0.00559587,Morelia_nauta:0.009946386):0.003195788,Morelia_clastolepis:0.003350075):0.01567562,Morelia_amethistina:0.02654552):0.005885569):0.02131979,Morelia_oenpelliensis:0.06271031):0.00 [...]
+    TREE 101 = ((Loxocemus_bicolor:0.3169261,((Python_regius:0.1401556,(Python_curtus:0.08796553,(Python_molurus:0.04991819,Python_sebae:0.08324849):0.02384237):0.01601799):0.04009144,((((((Morelia_tracyae:0.04670118,((Morelia_clastolepis:0.003713301,(Morelia_kinghorni:0.01206751,Morelia_nauta:0.01282358):0.01047532):0.0141348,Morelia_amethistina:0.01978401):0.006927226):0.02565125,Morelia_oenpelliensis:0.05165965):0.007390497,(Morelia_spilota:0.02417998,Morelia_bredli:0.03266626):0.0360 [...]
+    TREE 11 = ((((Python_regius:0.1027942,(Python_curtus:0.1093195,(Python_molurus:0.03616785,Python_sebae:0.09714906):0.01900329):0.008746833):0.03480031,((Python_timoriensis:0.05888493,Python_reticulatus:0.05249643):0.06859351,((Liasis_albertisii:0.04248073,Bothrochilus_boa:0.08031133):0.03398111,((Morelia_boeleni:0.07999314,((((Liasis_fuscus:0.01725487,Liasis_mackloti:0.01089339):0.05703001,(Liasis_olivaceus:0.0302746,Apodora_papuana:0.05585387):0.01099982):0.01251347,(Antaresia_melan [...]
+    TREE 12 = ((Loxocemus_bicolor:0.200797,Candoia_aspera:0.4791858):0.02924977,(((Python_timoriensis:0.07453055,Python_reticulatus:0.05007415):0.04164017,((Morelia_boeleni:0.08686082,(((Morelia_tracyae:0.02808619,(Morelia_amethistina:0.02303181,((Morelia_kinghorni:0.01274665,Morelia_nauta:0.009048873):0.001590683,Morelia_clastolepis:0.006054381):0.01882585):0.01332885):0.03223371,Morelia_oenpelliensis:0.05694572):0.002731064,(Morelia_bredli:0.02672501,Morelia_spilota:0.0225727):0.026789 [...]
+    TREE 13 = (Candoia_aspera:0.4273618,(((((((Bothrochilus_boa:0.05415261,Liasis_albertisii:0.06038956):0.0360089,((Antaresia_melanocephalus:0.03041821,Antaresia_ramsayi:0.0331126):0.05873344,((Liasis_fuscus:0.0184059,Liasis_mackloti:0.01465728):0.04310984,(Apodora_papuana:0.05418494,Liasis_olivaceus:0.03883886):0.016399):0.01873951):0.009908615):0.005950868,(Morelia_boeleni:0.09917271,((Morelia_bredli:0.02846252,Morelia_spilota:0.02458112):0.0365903,(((Morelia_tracyae:0.03813979,(Morel [...]
+    TREE 14 = ((Loxocemus_bicolor:0.2478028,((((((Antaresia_ramsayi:0.03367224,Antaresia_melanocephalus:0.03609129):0.04704844,((Apodora_papuana:0.05697895,Liasis_olivaceus:0.03303652):0.003571652,(Liasis_fuscus:0.02007346,Liasis_mackloti:0.008729213):0.04390847):0.02024927):0.005893421,((Liasis_albertisii:0.05713446,Bothrochilus_boa:0.06403746):0.02788034,(Morelia_boeleni:0.07313066,((Morelia_spilota:0.03214009,Morelia_bredli:0.02508033):0.02692491,((((Morelia_clastolepis:0.001813014,(M [...]
+    TREE 15 = (Candoia_aspera:0.4192863,(Loxocemus_bicolor:0.2798413,(((((((Antaresia_stimsoni:0.01329395,Antaresia_childreni:0.02283497):0.03790398,Antaresia_perthensis:0.07216747):0.008864959,Antaresia_maculosa:0.06182634):0.009885569,((Morelia_viridisN:0.034578,Morelia_viridisS:0.04693376):0.01679416,Morelia_carinata:0.04904747):0.01559752):0.01005754,((Morelia_boeleni:0.07412517,(((Morelia_tracyae:0.02896581,(Morelia_amethistina:0.03479837,((Morelia_nauta:0.0063588,Morelia_kinghorni: [...]
+    TREE 16 = (Candoia_aspera:0.5004037,(Loxocemus_bicolor:0.2686312,((Python_regius:0.1227537,((Python_molurus:0.02929629,Python_sebae:0.07680908):0.01567275,Python_curtus:0.1201353):0.02038343):0.03700993,((Python_timoriensis:0.05402337,Python_reticulatus:0.06266955):0.0723655,(((Antaresia_maculosa:0.0824284,((Antaresia_childreni:0.02716977,Antaresia_stimsoni:0.01773016):0.03547345,Antaresia_perthensis:0.081437):0.0118044):0.01428121,(Morelia_carinata:0.06047333,(Morelia_viridisS:0.053 [...]
+    TREE 17 = (Candoia_aspera:0.3860517,(Loxocemus_bicolor:0.2262186,((Python_regius:0.108871,(Python_curtus:0.07047174,(Python_molurus:0.03462239,Python_sebae:0.07665672):0.009287629):0.01072433):0.04514998,((Python_timoriensis:0.0838981,Python_reticulatus:0.05669912):0.05609795,((((((Morelia_tracyae:0.03987745,((Morelia_clastolepis:0.007140712,(Morelia_nauta:0.01100948,Morelia_kinghorni:0.007874271):0.002130305):0.00610072,Morelia_amethistina:0.02963331):0.01019719):0.0334844,Morelia_o [...]
+    TREE 18 = ((Loxocemus_bicolor:0.2877769,((Python_regius:0.1397491,((Python_sebae:0.08341701,Python_molurus:0.031548):0.02842307,Python_curtus:0.1021217):0.0152687):0.05564439,((Python_timoriensis:0.06025919,Python_reticulatus:0.06209771):0.06208062,((Morelia_boeleni:0.07254053,((Morelia_spilota:0.0292054,Morelia_bredli:0.02617089):0.04941909,((Morelia_tracyae:0.04522253,(((Morelia_kinghorni:0.01142549,Morelia_clastolepis:0.01216984):0.007288991,Morelia_nauta:0.0008988171):0.01149753, [...]
+    TREE 19 = (Candoia_aspera:0.4315535,(((Python_regius:0.1083479,(Python_curtus:0.1078002,(Python_molurus:0.06315571,Python_sebae:0.07304212):0.03146347):0.02541859):0.05393959,((Python_timoriensis:0.0555151,Python_reticulatus:0.07256859):0.06332415,((((((Antaresia_childreni:0.02090179,Antaresia_stimsoni:0.02001766):0.03533676,Antaresia_perthensis:0.0606145):0.01254612,Antaresia_maculosa:0.06593747):0.007542348,(Morelia_carinata:0.06172531,(Morelia_viridisS:0.04635308,Morelia_viridisN: [...]
+    TREE 20 = ((Loxocemus_bicolor:0.3155324,((Python_regius:0.1228927,(Python_curtus:0.1097628,(Python_molurus:0.03835939,Python_sebae:0.07668466):0.008325477):0.01460641):0.06564612,((Python_timoriensis:0.07856463,Python_reticulatus:0.05958525):0.05230699,(Morelia_boeleni:0.06775803,(((Liasis_albertisii:0.0645956,Bothrochilus_boa:0.06127537):0.02999912,((Antaresia_ramsayi:0.0288452,Antaresia_melanocephalus:0.04084328):0.04550624,(((Liasis_fuscus:0.01444571,Liasis_mackloti:0.01290266):0. [...]
+    TREE 21 = (Candoia_aspera:0.4216067,(Loxocemus_bicolor:0.2956747,(((Python_curtus:0.08918839,(Python_molurus:0.05216853,Python_sebae:0.07150718):0.01801873):0.01726334,Python_regius:0.1668874):0.04387736,((Python_timoriensis:0.07135985,Python_reticulatus:0.05553729):0.06714049,(((Morelia_carinata:0.05402521,(Morelia_viridisS:0.04267584,Morelia_viridisN:0.03901156):0.01898352):0.01712506,(((Antaresia_childreni:0.02684644,Antaresia_stimsoni:0.01368649):0.04100616,Antaresia_perthensis:0 [...]
+    TREE 22 = ((Loxocemus_bicolor:0.2416069,((Python_regius:0.1243658,((Python_molurus:0.02672784,Python_sebae:0.07747051):0.01978814,Python_curtus:0.09906979):0.01446203):0.03923037,((Python_timoriensis:0.07259966,Python_reticulatus:0.05987173):0.06120053,(((Liasis_albertisii:0.0463971,Bothrochilus_boa:0.06180745):0.03566932,((Apodora_papuana:0.0726456,(Liasis_olivaceus:0.04192667,(Liasis_fuscus:0.02242715,Liasis_mackloti:0.01043229):0.04170356):0.01561047):0.01366414,(Antaresia_ramsayi [...]
+    TREE 23 = (((((Python_timoriensis:0.08258052,Python_reticulatus:0.05853735):0.08288238,(((((Morelia_viridisN:0.03066907,Morelia_viridisS:0.05095432):0.01767479,Morelia_carinata:0.07342125):0.01474768,(((Antaresia_stimsoni:0.01833412,Antaresia_childreni:0.02854834):0.0462008,Antaresia_perthensis:0.0652752):0.01291676,Antaresia_maculosa:0.08806668):0.007344264):0.02173363,((Antaresia_ramsayi:0.03082787,Antaresia_melanocephalus:0.04706824):0.05208642,((Liasis_olivaceus:0.04073541,(Liasi [...]
+    TREE 24 = (((Python_regius:0.1417174,(Python_curtus:0.110082,(Python_molurus:0.02637725,Python_sebae:0.07899893):0.01286143):0.01916527):0.06151694,((((((Liasis_fuscus:0.0178239,Liasis_mackloti:0.01868649):0.05704714,(Apodora_papuana:0.05741915,Liasis_olivaceus:0.0337298):0.01104941):0.01689001,(Antaresia_melanocephalus:0.03924603,Antaresia_ramsayi:0.03206718):0.06916624):0.004846253,(Liasis_albertisii:0.0753159,Bothrochilus_boa:0.07176204):0.03006251):0.00922749,(((Antaresia_maculos [...]
+    TREE 25 = (Candoia_aspera:0.3524799,((((Python_timoriensis:0.0740242,Python_reticulatus:0.05492826):0.06151397,(((Antaresia_maculosa:0.05970249,((Antaresia_stimsoni:0.01580222,Antaresia_childreni:0.03054988):0.03668299,Antaresia_perthensis:0.07063788):0.0183855):0.00864595,((Morelia_viridisN:0.03323697,Morelia_viridisS:0.05418915):0.02468022,Morelia_carinata:0.06302397):0.01568301):0.03371229,(((Liasis_albertisii:0.059584,Bothrochilus_boa:0.05887819):0.03012165,((Antaresia_ramsayi:0. [...]
+    TREE 26 = (((((Python_curtus:0.08556007,(Python_sebae:0.06193187,Python_molurus:0.04968326):0.005435923):0.01851413,Python_regius:0.1277156):0.04573327,((((Bothrochilus_boa:0.06112235,Liasis_albertisii:0.0591258):0.03619068,(Morelia_boeleni:0.06807541,((Morelia_oenpelliensis:0.0523391,(Morelia_tracyae:0.03849492,(Morelia_amethistina:0.01932389,(Morelia_clastolepis:0.006375849,(Morelia_nauta:0.005909729,Morelia_kinghorni:0.008158058):0.003842392):0.01774768):0.002145456):0.0269855):0. [...]
+    TREE 27 = ((Loxocemus_bicolor:0.2745395,((((Python_molurus:0.05626972,Python_sebae:0.07311616):0.02361951,Python_curtus:0.07978493):0.01955288,Python_regius:0.119671):0.0317135,(((((((Morelia_viridisN:0.03676345,Morelia_viridisS:0.03966575):0.03629429,Morelia_carinata:0.04791429):0.01092544,((Antaresia_perthensis:0.06241723,(Antaresia_stimsoni:0.01352217,Antaresia_childreni:0.02220556):0.03658676):0.01352575,Antaresia_maculosa:0.06451497):0.01398111):0.01464195,(((Liasis_olivaceus:0. [...]
+    TREE 28 = ((((Python_curtus:0.1009424,(Python_sebae:0.07073727,Python_molurus:0.0458842):0.01611277):0.02137165,Python_regius:0.1032613):0.05541138,(((((Morelia_bredli:0.02912984,Morelia_spilota:0.02397348):0.02903979,(Morelia_oenpelliensis:0.04869417,(Morelia_tracyae:0.02929244,((Morelia_clastolepis:0.006308689,(Morelia_kinghorni:0.007352282,Morelia_nauta:0.003787058):0.004816204):0.01431019,Morelia_amethistina:0.02075164):0.009746208):0.03313488):0.007625648):0.02159451,Morelia_boe [...]
+    TREE 29 = (Candoia_aspera:0.3987203,((((Python_timoriensis:0.07231611,Python_reticulatus:0.06693656):0.07092691,((((Antaresia_ramsayi:0.02601053,Antaresia_melanocephalus:0.03847052):0.04431386,((Liasis_olivaceus:0.03460486,Apodora_papuana:0.05290077):0.009100054,(Liasis_fuscus:0.01768649,Liasis_mackloti:0.01600562):0.04353961):0.01585652):0.01486235,(Liasis_albertisii:0.04911192,Bothrochilus_boa:0.05631675):0.03876376):0.0004149582,((((Morelia_oenpelliensis:0.04973058,(Morelia_tracya [...]
+    TREE 30 = (Candoia_aspera:0.4868755,(((Python_regius:0.1221145,((Python_molurus:0.05019593,Python_sebae:0.07748772):0.01368855,Python_curtus:0.1141123):0.01987758):0.04155347,((Python_timoriensis:0.07494973,Python_reticulatus:0.05862679):0.06559235,(((Morelia_oenpelliensis:0.05175119,(Morelia_tracyae:0.04466695,(Morelia_amethistina:0.0192166,(Morelia_clastolepis:0.005236865,(Morelia_kinghorni:0.009909338,Morelia_nauta:0.01748069):0.002925145):0.01592512):0.01615313):0.03149762):0.005 [...]
+    TREE 31 = ((((Python_regius:0.127571,((Python_sebae:0.08733474,Python_molurus:0.02636416):0.01988984,Python_curtus:0.08227164):0.0179738):0.04350246,((((((Morelia_carinata:0.06880104,(Morelia_viridisN:0.03915972,Morelia_viridisS:0.05202187):0.02713937):0.00535652,(Antaresia_maculosa:0.07387812,(Antaresia_perthensis:0.06075918,(Antaresia_stimsoni:0.01861873,Antaresia_childreni:0.01600216):0.03189661):0.01853196):0.006632877):0.01217124,((Antaresia_melanocephalus:0.0414989,Antaresia_ra [...]
+    TREE 32 = ((Loxocemus_bicolor:0.2606271,((((Python_sebae:0.0818305,Python_molurus:0.02727312):0.01367413,Python_curtus:0.08714736):0.02175664,Python_regius:0.1020426):0.07583032,(((((Morelia_carinata:0.05255298,(Morelia_viridisN:0.02892482,Morelia_viridisS:0.05407292):0.03711387):0.01244001,((Antaresia_perthensis:0.06543078,(Antaresia_stimsoni:0.02282742,Antaresia_childreni:0.02338983):0.0390911):0.008210593,Antaresia_maculosa:0.07095299):0.007290989):0.01250111,((Bothrochilus_boa:0. [...]
+    TREE 33 = (Candoia_aspera:0.3709596,(Loxocemus_bicolor:0.3007952,((Python_regius:0.1199573,(Python_curtus:0.1072403,(Python_molurus:0.03916451,Python_sebae:0.07200862):0.01487179):0.02108596):0.02065243,((Python_timoriensis:0.0774226,Python_reticulatus:0.05885978):0.06575871,((((Liasis_albertisii:0.06312153,Bothrochilus_boa:0.0504203):0.03711298,((((Liasis_fuscus:0.02797756,Liasis_mackloti:0.01218298):0.0538399,Liasis_olivaceus:0.04082057):0.01243284,Apodora_papuana:0.05917395):0.012 [...]
+    TREE 34 = ((Loxocemus_bicolor:0.3328977,(((Python_timoriensis:0.05424232,Python_reticulatus:0.06088091):0.0443408,(((((Morelia_bredli:0.03127133,Morelia_spilota:0.01963772):0.02802323,(Morelia_oenpelliensis:0.06068657,(((Morelia_clastolepis:0.003184588,(Morelia_nauta:0.008600695,Morelia_kinghorni:0.005238384):0.00852199):0.00685194,Morelia_amethistina:0.01906498):0.01556873,Morelia_tracyae:0.01951623):0.03165511):0.007153062):0.0306723,Morelia_boeleni:0.08086277):0.00606816,(((Antare [...]
+    TREE 35 = (((((((((Antaresia_perthensis:0.076839,(Antaresia_stimsoni:0.01678085,Antaresia_childreni:0.02946878):0.03759226):0.01176085,Antaresia_maculosa:0.06572966):0.005717706,(Morelia_carinata:0.05542747,(Morelia_viridisN:0.03474877,Morelia_viridisS:0.04205535):0.03438249):0.01795039):0.02082926,((Apodora_papuana:0.05639444,(Liasis_olivaceus:0.02437619,(Liasis_fuscus:0.02117262,Liasis_mackloti:0.007070453):0.04439346):0.0134516):0.01207259,((Antaresia_ramsayi:0.02854236,Antaresia_ [...]
+    TREE 36 = ((Loxocemus_bicolor:0.2483509,Candoia_aspera:0.4492172):0.06859066,((Python_regius:0.1255389,((Python_molurus:0.03130493,Python_sebae:0.0745412):0.01865452,Python_curtus:0.1036943):0.01279912):0.01985159,((((((Morelia_carinata:0.08370114,(Morelia_viridisS:0.04985579,Morelia_viridisN:0.03327157):0.03830219):0.008672443,(Antaresia_maculosa:0.0623934,((Antaresia_childreni:0.02799178,Antaresia_stimsoni:0.02148532):0.03666041,Antaresia_perthensis:0.07446277):0.01086222):0.020943 [...]
+    TREE 37 = (Candoia_aspera:0.4166711,((((Python_curtus:0.09816914,(Python_sebae:0.08240094,Python_molurus:0.05226293):0.01456777):0.01641748,Python_regius:0.137197):0.05957615,((Morelia_boeleni:0.08765504,((((Morelia_bredli:0.03444947,Morelia_spilota:0.02775664):0.02601056,(Morelia_amethistina:0.02924364,(Morelia_tracyae:0.03584065,((Morelia_nauta:0.01115885,Morelia_kinghorni:0.01021469):0.003210438,Morelia_clastolepis:0.006594632):0.03294197):0.001463494):0.03494781):0.002309134,More [...]
+    TREE 38 = (Candoia_aspera:0.4315115,(Loxocemus_bicolor:0.2596803,((((((Morelia_bredli:0.02230078,Morelia_spilota:0.02205087):0.03955585,(Morelia_oenpelliensis:0.05115782,(((Morelia_clastolepis:0.008149148,(Morelia_nauta:0.002970811,Morelia_kinghorni:0.006429062):0.002749328):0.00566445,Morelia_amethistina:0.01508516):0.01228169,Morelia_tracyae:0.03545761):0.03108932):0.005038192):0.02099316,((((Antaresia_melanocephalus:0.04737015,Antaresia_ramsayi:0.02937167):0.04575993,(Apodora_papu [...]
+    TREE 39 = ((Loxocemus_bicolor:0.2923877,((Python_regius:0.1073602,(Python_curtus:0.08588581,(Python_molurus:0.04365642,Python_sebae:0.07138998):0.01973334):0.03022542):0.04769119,((Python_timoriensis:0.05422595,Python_reticulatus:0.05878151):0.05748568,((Morelia_boeleni:0.07541993,((Morelia_spilota:0.02019428,Morelia_bredli:0.04109098):0.0378947,(Morelia_oenpelliensis:0.06183072,(Morelia_tracyae:0.03545849,(((Morelia_clastolepis:0.004731276,Morelia_kinghorni:0.005092597):0.0005185752 [...]
+    TREE 40 = ((Candoia_aspera:0.4156428,Loxocemus_bicolor:0.2269911):0.04826334,(((((((Antaresia_maculosa:0.07407624,(Antaresia_perthensis:0.05331123,(Antaresia_stimsoni:0.01391683,Antaresia_childreni:0.02353525):0.03288148):0.01385114):0.004249709,(Morelia_carinata:0.06564797,(Morelia_viridisN:0.04468362,Morelia_viridisS:0.0520571):0.02200155):0.008594821):0.01366193,((Antaresia_melanocephalus:0.04277621,Antaresia_ramsayi:0.02779418):0.04118139,((Liasis_olivaceus:0.04348896,Apodora_pap [...]
+    TREE 41 = (Candoia_aspera:0.3566623,(((((Python_sebae:0.07282374,Python_molurus:0.02476629):0.02705769,Python_curtus:0.09737761):0.01517889,Python_regius:0.1214553):0.02036528,((Python_timoriensis:0.06640685,Python_reticulatus:0.04522264):0.07405751,(((Morelia_boeleni:0.08328095,((Morelia_bredli:0.03283461,Morelia_spilota:0.01994478):0.03109293,(((Morelia_tracyae:0.03936545,Morelia_amethistina:0.03400436):0.004093362,((Morelia_kinghorni:0.004637601,Morelia_nauta:0.008610755):0.005974 [...]
+    TREE 42 = ((Loxocemus_bicolor:0.3158634,(((Python_timoriensis:0.0693274,Python_reticulatus:0.04754045):0.05546853,(((Bothrochilus_boa:0.06050538,Liasis_albertisii:0.0566936):0.03382026,((Antaresia_melanocephalus:0.04823754,Antaresia_ramsayi:0.03184894):0.04372982,((Liasis_olivaceus:0.04662664,Apodora_papuana:0.07828025):0.003462852,(Liasis_fuscus:0.02185453,Liasis_mackloti:0.009381108):0.04740364):0.02318003):0.004541615):0.004595739,((((Morelia_viridisN:0.03471745,Morelia_viridisS:0 [...]
+    TREE 43 = (Candoia_aspera:0.4779116,(Loxocemus_bicolor:0.3169837,(((((Liasis_albertisii:0.0474626,Bothrochilus_boa:0.0594194):0.03660165,(((Antaresia_melanocephalus:0.04410004,Antaresia_ramsayi:0.02017723):0.05126644,(Apodora_papuana:0.05914869,(Liasis_olivaceus:0.05195717,(Liasis_fuscus:0.02399651,Liasis_mackloti:0.007262315):0.04950535):0.01042366):0.01214327):0.01340623,(((Morelia_bredli:0.03687476,Morelia_spilota:0.03293356):0.02977614,((Morelia_tracyae:0.04761735,(Morelia_amethi [...]
+    TREE 44 = (Candoia_aspera:0.3818913,(Loxocemus_bicolor:0.2384072,(((((Morelia_carinata:0.05838946,(Morelia_viridisN:0.02674,Morelia_viridisS:0.04088508):0.02908822):0.01411169,(Antaresia_maculosa:0.0580162,((Antaresia_stimsoni:0.01965443,Antaresia_childreni:0.02601986):0.04652883,Antaresia_perthensis:0.07484478):0.01135406):0.008163954):0.01560239,((((Apodora_papuana:0.05828467,(Liasis_olivaceus:0.0320515,(Liasis_fuscus:0.0194024,Liasis_mackloti:0.009507815):0.04213784):0.01011161):0 [...]
+    TREE 45 = (Candoia_aspera:0.4749727,(Loxocemus_bicolor:0.2909081,(((((Liasis_albertisii:0.05014584,Bothrochilus_boa:0.05870229):0.03693661,((Antaresia_melanocephalus:0.04437313,Antaresia_ramsayi:0.02662717):0.04252003,(Apodora_papuana:0.05208564,(Liasis_olivaceus:0.04891133,(Liasis_fuscus:0.02040686,Liasis_mackloti:0.01262133):0.05273751):0.01025606):0.02029362):0.007923826):0.0124865,(((Morelia_carinata:0.0634373,(Morelia_viridisN:0.03721233,Morelia_viridisS:0.0516408):0.02993211):0 [...]
+    TREE 46 = (Candoia_aspera:0.5167322,((((Python_timoriensis:0.06709443,Python_reticulatus:0.06793708):0.07640251,(((Bothrochilus_boa:0.0713485,Liasis_albertisii:0.04180947):0.03760496,(((Antaresia_maculosa:0.08028006,((Antaresia_stimsoni:0.01299851,Antaresia_childreni:0.02807306):0.04246189,Antaresia_perthensis:0.07026985):0.01391976):0.0195585,((Morelia_viridisN:0.03546905,Morelia_viridisS:0.0645878):0.04086846,Morelia_carinata:0.06635054):0.009228577):0.01671697,((Antaresia_melanoce [...]
+    TREE 47 = ((Loxocemus_bicolor:0.243761,((Python_regius:0.1256326,((Python_sebae:0.07759863,Python_molurus:0.03319403):0.02745888,Python_curtus:0.1036193):0.01886196):0.04526608,((Python_timoriensis:0.07885125,Python_reticulatus:0.04078674):0.06705376,(((Morelia_boeleni:0.07244095,((((Morelia_amethistina:0.02612452,((Morelia_nauta:0.005952378,Morelia_kinghorni:0.01197636):0.003538517,Morelia_clastolepis:0.01001071):0.01796709):0.009481718,Morelia_tracyae:0.03859101):0.03004094,Morelia [...]
+    TREE 48 = (Candoia_aspera:0.4623149,(Loxocemus_bicolor:0.2848758,(((Python_timoriensis:0.08529002,Python_reticulatus:0.06117518):0.05230377,((((Liasis_albertisii:0.05560586,Bothrochilus_boa:0.05213018):0.03215789,(Antaresia_ramsayi:0.03804862,Antaresia_melanocephalus:0.04693488):0.04454437):0.007747695,(Apodora_papuana:0.05693809,(Liasis_olivaceus:0.03222194,(Liasis_fuscus:0.0141549,Liasis_mackloti:0.01803922):0.06031693):0.008449162):0.01240927):0.008461036,((Morelia_boeleni:0.07705 [...]
+    TREE 49 = ((((((Python_molurus:0.02966201,Python_sebae:0.07929907):0.02351445,Python_curtus:0.089452):0.02699706,Python_regius:0.1383436):0.04680606,((((((Bothrochilus_boa:0.05036822,Liasis_albertisii:0.05982621):0.03438025,(((Liasis_olivaceus:0.03713293,(Liasis_fuscus:0.01918723,Liasis_mackloti:0.01381359):0.04989393):0.01383069,Apodora_papuana:0.06764718):0.01789452,(Antaresia_melanocephalus:0.03643972,Antaresia_ramsayi:0.02577372):0.04709483):0.006821191):0.00657582,Morelia_boelen [...]
+    TREE 50 = (Candoia_aspera:0.4379332,(((((Python_sebae:0.07359605,Python_molurus:0.04254693):0.00898198,Python_curtus:0.1110208):0.02649152,Python_regius:0.09660345):0.05497974,(((Morelia_boeleni:0.07553051,((((Morelia_tracyae:0.04456018,Morelia_amethistina:0.02108639):0.002624646,(Morelia_nauta:0.008275892,(Morelia_kinghorni:0.01202487,Morelia_clastolepis:0.006690178):0.0003475322):0.0154572):0.03874607,Morelia_oenpelliensis:0.05392181):0.002551398,(Morelia_bredli:0.02786552,Morelia_ [...]
+    TREE 51 = ((Loxocemus_bicolor:0.3498798,(((Python_curtus:0.1199984,(Python_molurus:0.03343227,Python_sebae:0.06214762):0.01505318):0.01582637,Python_regius:0.1361952):0.06415399,(((Morelia_boeleni:0.07756513,((Morelia_oenpelliensis:0.07415,(Morelia_tracyae:0.04061802,(Morelia_amethistina:0.04029614,(Morelia_clastolepis:0.008050705,(Morelia_kinghorni:0.008623384,Morelia_nauta:0.01475328):0.003627272):0.01071592):0.01336837):0.02026353):0.002976652,(Morelia_bredli:0.02779951,Morelia_sp [...]
+    TREE 52 = (Candoia_aspera:0.4393582,(Loxocemus_bicolor:0.3036332,(((Python_timoriensis:0.07685404,Python_reticulatus:0.0588312):0.06606706,(((((Morelia_bredli:0.02885476,Morelia_spilota:0.02530417):0.03299627,((((Morelia_clastolepis:0.002007727,(Morelia_nauta:0.01156896,Morelia_kinghorni:0.002030209):0.007676394):0.01607767,Morelia_tracyae:0.04849488):0.01065517,Morelia_amethistina:0.02323289):0.04031674,Morelia_oenpelliensis:0.07842425):0.006012383):0.01645917,Morelia_boeleni:0.0821 [...]
+    TREE 53 = (Candoia_aspera:0.4354567,((((((((((Morelia_amethistina:0.01880946,Morelia_tracyae:0.03228031):0.007355948,(Morelia_nauta:0.002509787,(Morelia_kinghorni:0.008547859,Morelia_clastolepis:0.006177976):0.01163141):0.006092898):0.0437007,Morelia_oenpelliensis:0.05868744):0.01058369,(Morelia_bredli:0.02021243,Morelia_spilota:0.0236931):0.04113045):0.02420083,Morelia_boeleni:0.08697624):0.001693959,(((Apodora_papuana:0.05754786,((Liasis_fuscus:0.01519766,Liasis_mackloti:0.00888373 [...]
+    TREE 54 = ((((((((((Morelia_kinghorni:0.008665215,Morelia_nauta:0.0109783):0.001026878,Morelia_clastolepis:0.003541599):0.02206708,Morelia_amethistina:0.01969782):0.004866081,Morelia_tracyae:0.03401599):0.04312542,Morelia_oenpelliensis:0.05542273):0.004424566,(Morelia_bredli:0.02587295,Morelia_spilota:0.02686584):0.03220125):0.02335586,(Morelia_boeleni:0.07466805,((((Morelia_viridisN:0.03284982,Morelia_viridisS:0.05396462):0.02402509,Morelia_carinata:0.0733903):0.00859578,(Antaresia_ [...]
+    TREE 55 = (Candoia_aspera:0.4096436,(Loxocemus_bicolor:0.2825922,((Python_regius:0.143649,((Python_molurus:0.03263311,Python_sebae:0.09044284):0.03325118,Python_curtus:0.09589007):0.01728509):0.04095609,((Python_timoriensis:0.07044698,Python_reticulatus:0.06046401):0.0612632,((((Morelia_bredli:0.0283581,Morelia_spilota:0.01987454):0.02565555,(((Morelia_tracyae:0.04287551,(Morelia_clastolepis:0.006354092,(Morelia_kinghorni:0.007341021,Morelia_nauta:0.01217569):1.422462e-05):0.02275691 [...]
+    TREE 56 = ((((Python_timoriensis:0.0821696,Python_reticulatus:0.0374911):0.06419834,(((Morelia_boeleni:0.08052849,((Morelia_bredli:0.03064723,Morelia_spilota:0.02530753):0.03036284,(((Morelia_amethistina:0.02839346,((Morelia_kinghorni:0.005676871,Morelia_nauta:0.006679796):0.006206046,Morelia_clastolepis:0.002117769):0.02479271):0.007808779,Morelia_tracyae:0.056348):0.03795807,Morelia_oenpelliensis:0.06488113):0.002177624):0.01503186):0.006123575,((Bothrochilus_boa:0.07207754,Liasis_ [...]
+    TREE 57 = ((Loxocemus_bicolor:0.2325396,((((Python_molurus:0.03786264,Python_sebae:0.06502635):0.007936275,Python_curtus:0.09277477):0.01503368,Python_regius:0.1075102):0.03876495,((Python_timoriensis:0.06244723,Python_reticulatus:0.06023936):0.05824688,(((((Morelia_carinata:0.06145063,(Morelia_viridisN:0.03435088,Morelia_viridisS:0.04947603):0.02912258):0.01761857,(Antaresia_maculosa:0.07065326,(Antaresia_perthensis:0.07828664,(Antaresia_stimsoni:0.0170134,Antaresia_childreni:0.0266 [...]
+    TREE 58 = ((Loxocemus_bicolor:0.2468242,(((Python_curtus:0.1170279,(Python_sebae:0.09284164,Python_molurus:0.03687876):0.02243126):0.01334561,Python_regius:0.1148749):0.04587945,((Python_timoriensis:0.07785041,Python_reticulatus:0.06751723):0.08746517,(((Morelia_boeleni:0.08939881,(Bothrochilus_boa:0.07597912,Liasis_albertisii:0.05399311):0.0351783):0.0007660284,((Morelia_bredli:0.02840847,Morelia_spilota:0.02214527):0.03226572,(Morelia_oenpelliensis:0.07709114,(((Morelia_nauta:0.006 [...]
+    TREE 59 = (((Python_regius:0.1522807,(Python_curtus:0.1117174,(Python_sebae:0.1053864,Python_molurus:0.04630373):0.02259866):0.01967949):0.0319183,((Python_timoriensis:0.08010659,Python_reticulatus:0.03796243):0.06844831,(((Morelia_boeleni:0.09501911,((Morelia_oenpelliensis:0.06511799,(((Morelia_clastolepis:0.006418486,(Morelia_nauta:0.01409049,Morelia_kinghorni:0.01242694):0.009480745):0.01750396,Morelia_amethistina:0.02468036):0.005713716,Morelia_tracyae:0.05530899):0.02743302):0.0 [...]
+    TREE 60 = (Candoia_aspera:0.4340778,(((((Python_molurus:0.05527572,Python_sebae:0.08815507):0.01067865,Python_curtus:0.09143945):0.006439727,Python_regius:0.1452298):0.03931163,((Python_timoriensis:0.07775332,Python_reticulatus:0.056339):0.06948392,((((Antaresia_melanocephalus:0.0428833,Antaresia_ramsayi:0.02298154):0.05497927,(((Liasis_fuscus:0.0206774,Liasis_mackloti:0.01631532):0.04937458,Liasis_olivaceus:0.05034072):0.0149876,Apodora_papuana:0.06038298):0.02319475):0.008116068,(L [...]
+    TREE 61 = ((((((((Antaresia_perthensis:0.07638852,(Antaresia_stimsoni:0.02121778,Antaresia_childreni:0.02889215):0.0354173):0.01981606,Antaresia_maculosa:0.0693481):0.01032929,(Morelia_carinata:0.06492437,(Morelia_viridisN:0.02927527,Morelia_viridisS:0.04859942):0.01695186):0.01177804):0.01795348,(((Liasis_albertisii:0.0494817,Bothrochilus_boa:0.0678623):0.03585962,((Antaresia_ramsayi:0.02585155,Antaresia_melanocephalus:0.04108934):0.04877222,(Apodora_papuana:0.06419168,(Liasis_oliva [...]
+    TREE 62 = (((((Python_curtus:0.1007437,(Python_sebae:0.07847175,Python_molurus:0.03661869):0.01177114):0.01913868,Python_regius:0.1148672):0.044093,((Morelia_boeleni:0.08594061,(((((((Morelia_kinghorni:0.004810639,Morelia_nauta:0.01061287):0.01048466,Morelia_clastolepis:0.009231243):0.00945426,Morelia_amethistina:0.02563356):0.004799697,Morelia_tracyae:0.04114297):0.03006124,Morelia_oenpelliensis:0.07771829):0.009323358,(Morelia_bredli:0.03632089,Morelia_spilota:0.02523437):0.0420995 [...]
+    TREE 63 = (Candoia_aspera:0.4533356,((((((Liasis_albertisii:0.04457579,Bothrochilus_boa:0.05094706):0.0327761,(Morelia_boeleni:0.07913116,((Morelia_oenpelliensis:0.07029218,((Morelia_clastolepis:0.0019791,(Morelia_kinghorni:0.0075455,Morelia_nauta:0.009120248):0.009370934):0.01231059,(Morelia_amethistina:0.02332951,Morelia_tracyae:0.03878409):0.003046213):0.02605981):0.002390341,(Morelia_bredli:0.03384283,Morelia_spilota:0.02554936):0.03919047):0.02307381):0.001715514):0.0006266203,( [...]
+    TREE 64 = (Candoia_aspera:0.4760166,(Loxocemus_bicolor:0.2757868,(((Python_timoriensis:0.07564501,Python_reticulatus:0.05995008):0.08176805,(((Liasis_albertisii:0.05854234,Bothrochilus_boa:0.06265786):0.03611681,(((Morelia_carinata:0.06077572,(Morelia_viridisN:0.03375266,Morelia_viridisS:0.05060965):0.01678711):0.01210923,(Antaresia_maculosa:0.06233509,(Antaresia_perthensis:0.0815234,(Antaresia_stimsoni:0.01417981,Antaresia_childreni:0.02374216):0.02519215):0.01449892):0.00686185):0. [...]
+    TREE 65 = (Candoia_aspera:0.4138643,(((((Python_molurus:0.03606197,Python_sebae:0.08431371):0.0151031,Python_curtus:0.09509211):0.009259102,Python_regius:0.1134973):0.06223495,(((((Liasis_albertisii:0.06055545,Bothrochilus_boa:0.07139759):0.01471734,((Antaresia_ramsayi:0.03518096,Antaresia_melanocephalus:0.02931779):0.04659165,(Apodora_papuana:0.06222481,(Liasis_olivaceus:0.04596883,(Liasis_fuscus:0.02048555,Liasis_mackloti:0.01178189):0.0473872):0.005137335):0.01289113):0.01185418): [...]
+    TREE 66 = (Candoia_aspera:0.4089105,(Loxocemus_bicolor:0.2593586,(((Python_timoriensis:0.07988133,Python_reticulatus:0.05016129):0.07450671,(((((Morelia_carinata:0.07236462,(Morelia_viridisN:0.02473521,Morelia_viridisS:0.05530351):0.02301765):0.009019592,(Antaresia_maculosa:0.08353675,(Antaresia_perthensis:0.09658293,(Antaresia_stimsoni:0.013754,Antaresia_childreni:0.03040858):0.0405517):0.01371936):0.01787184):0.02750648,(Morelia_boeleni:0.08164632,((Morelia_bredli:0.03820559,Moreli [...]
+    TREE 67 = (Candoia_aspera:0.4449339,(((((((Antaresia_melanocephalus:0.0453505,Antaresia_ramsayi:0.03185906):0.04791288,(Apodora_papuana:0.06816093,((Liasis_fuscus:0.013558,Liasis_mackloti:0.0222779):0.04901131,Liasis_olivaceus:0.04632228):0.008134278):0.01378831):0.009530067,((((Morelia_bredli:0.03261452,Morelia_spilota:0.02358782):0.02347192,(((((Morelia_clastolepis:0.008472611,Morelia_kinghorni:0.005468857):0.003942563,Morelia_nauta:0.006005992):0.01842269,Morelia_tracyae:0.0368650 [...]
+    TREE 68 = (Candoia_aspera:0.5362472,((((Python_curtus:0.1037307,(Python_sebae:0.1005292,Python_molurus:0.05540015):0.02078274):0.01943558,Python_regius:0.1120188):0.03340796,((Python_timoriensis:0.07649895,Python_reticulatus:0.06999589):0.05984266,((((Bothrochilus_boa:0.07809498,Liasis_albertisii:0.04177523):0.03933876,((Antaresia_melanocephalus:0.04510809,Antaresia_ramsayi:0.0235075):0.05109955,(Apodora_papuana:0.07140937,((Liasis_fuscus:0.01855687,Liasis_mackloti:0.01844191):0.0446 [...]
+    TREE 69 = ((Loxocemus_bicolor:0.342135,((((((Liasis_albertisii:0.04830736,Bothrochilus_boa:0.07607338):0.03296,(((Liasis_fuscus:0.01679569,Liasis_mackloti:0.01195317):0.04018365,(Apodora_papuana:0.06570416,Liasis_olivaceus:0.04346911):0.01052556):0.01745802,(Antaresia_melanocephalus:0.03843291,Antaresia_ramsayi:0.03509593):0.0400436):0.01547011):0.002890318,(((((Morelia_tracyae:0.0552147,(Morelia_nauta:0.01088679,(Morelia_kinghorni:0.007242071,Morelia_clastolepis:0.01214149):0.004459 [...]
+    TREE 70 = ((Loxocemus_bicolor:0.2690822,(((Python_curtus:0.0925561,(Python_sebae:0.0880517,Python_molurus:0.04310565):0.03291147):0.02703655,Python_regius:0.1015576):0.03443013,((Python_timoriensis:0.07286464,Python_reticulatus:0.0484841):0.05235894,(((((Morelia_viridisN:0.04018959,Morelia_viridisS:0.04779184):0.01664751,Morelia_carinata:0.05738444):0.01658997,(Antaresia_maculosa:0.05536493,(Antaresia_perthensis:0.07150627,(Antaresia_stimsoni:0.01522095,Antaresia_childreni:0.02336449 [...]
+    TREE 71 = ((((((Morelia_carinata:0.06155951,(Morelia_viridisS:0.04464672,Morelia_viridisN:0.02762765):0.02463339):0.01333783,(((Antaresia_childreni:0.01969403,Antaresia_stimsoni:0.01686892):0.02815768,Antaresia_perthensis:0.07071792):0.01990291,Antaresia_maculosa:0.08918901):0.01022217):0.01366177,(((Liasis_albertisii:0.05270908,Bothrochilus_boa:0.064011):0.03191483,((Antaresia_ramsayi:0.02613638,Antaresia_melanocephalus:0.04321827):0.04677747,(Apodora_papuana:0.05451725,((Liasis_fus [...]
+    TREE 72 = (Candoia_aspera:0.453306,(Loxocemus_bicolor:0.3887322,((((Python_sebae:0.08365485,Python_molurus:0.03665528):0.0154688,Python_curtus:0.1050893):0.007793195,Python_regius:0.1534954):0.0613676,((Python_timoriensis:0.0680481,Python_reticulatus:0.0621895):0.06921195,(((Morelia_boeleni:0.07784198,((Morelia_bredli:0.02851329,Morelia_spilota:0.02151493):0.03156458,(Morelia_oenpelliensis:0.06409916,((Morelia_tracyae:0.03399367,Morelia_amethistina:0.03818573):0.008414893,(Morelia_na [...]
+    TREE 73 = (Candoia_aspera:0.5520749,(Loxocemus_bicolor:0.2377251,(((((((Antaresia_stimsoni:0.0125949,Antaresia_childreni:0.02313392):0.02206282,Antaresia_perthensis:0.07820299):0.01191254,Antaresia_maculosa:0.06207283):0.01709952,((Morelia_viridisN:0.02766603,Morelia_viridisS:0.05194837):0.02040493,Morelia_carinata:0.06265164):0.005846406):0.03017284,(((((((Morelia_clastolepis:0.002789684,(Morelia_nauta:0.008007311,Morelia_kinghorni:0.004015526):0.004611768):0.007731586,Morelia_ameth [...]
+    TREE 74 = (((((Python_curtus:0.1006856,(Python_sebae:0.08316927,Python_molurus:0.03516886):0.01796021):0.02440497,Python_regius:0.1332322):0.02834445,((Python_timoriensis:0.06957957,Python_reticulatus:0.05636223):0.05746936,((((Morelia_bredli:0.02128809,Morelia_spilota:0.02450162):0.03612316,((((Morelia_clastolepis:0.005198126,(Morelia_nauta:0.01442816,Morelia_kinghorni:0.01178255):0.002347382):0.02317242,Morelia_amethistina:0.01246185):0.01386049,Morelia_tracyae:0.03155355):0.024922 [...]
+    TREE 75 = (((((Python_timoriensis:0.07658318,Python_reticulatus:0.05110824):0.05329578,((((Morelia_bredli:0.03524291,Morelia_spilota:0.0308012):0.02868422,(Morelia_oenpelliensis:0.0754284,((Morelia_amethistina:0.02502096,(Morelia_clastolepis:0.007619496,(Morelia_kinghorni:0.00782635,Morelia_nauta:0.006335583):0.002912933):0.02746534):0.01734855,Morelia_tracyae:0.02524737):0.02578698):0.003275429):0.02849694,Morelia_boeleni:0.07492803):0.005232946,((((Morelia_viridisN:0.03856291,Morel [...]
+    TREE 76 = (Candoia_aspera:0.4393047,(Loxocemus_bicolor:0.2542101,(((((((Antaresia_maculosa:0.06741466,((Antaresia_childreni:0.0330332,Antaresia_stimsoni:0.01454498):0.05188499,Antaresia_perthensis:0.0586203):0.01222515):0.008918908,(Morelia_carinata:0.06125037,(Morelia_viridisS:0.06262673,Morelia_viridisN:0.03160418):0.01678727):0.01154299):0.01559785,((Antaresia_ramsayi:0.02670876,Antaresia_melanocephalus:0.0420795):0.05122368,((Liasis_fuscus:0.02065444,Liasis_mackloti:0.009335938): [...]
+    TREE 77 = (((((Python_timoriensis:0.08782096,Python_reticulatus:0.07040678):0.05833417,(((((Morelia_viridisN:0.03720383,Morelia_viridisS:0.05117086):0.02595,Morelia_carinata:0.07158621):0.01271671,((Antaresia_perthensis:0.07707314,(Antaresia_stimsoni:0.01530133,Antaresia_childreni:0.02824286):0.03871629):0.008834431,Antaresia_maculosa:0.08010591):0.009361026):0.01831402,((Morelia_bredli:0.03099292,Morelia_spilota:0.02404462):0.03387775,((((Morelia_nauta:0.005260173,(Morelia_kinghorni [...]
+    TREE 78 = (Candoia_aspera:0.4009131,((((Python_curtus:0.0869089,(Python_sebae:0.06311244,Python_molurus:0.04755661):0.01261219):0.02842917,Python_regius:0.09876378):0.03739969,(((((Liasis_albertisii:0.05324242,Bothrochilus_boa:0.06904211):0.02380102,(Antaresia_ramsayi:0.01610698,Antaresia_melanocephalus:0.04963503):0.06369576):0.001914608,(Apodora_papuana:0.05942414,((Liasis_fuscus:0.02549272,Liasis_mackloti:0.01196099):0.04490971,Liasis_olivaceus:0.03668288):0.01814909):0.0184324):0 [...]
+    TREE 79 = (((((Python_timoriensis:0.07553944,Python_reticulatus:0.07115621):0.07381626,(((((Morelia_oenpelliensis:0.06551793,(Morelia_amethistina:0.03619238,(Morelia_tracyae:0.05018406,(Morelia_clastolepis:0.002661674,(Morelia_kinghorni:0.01704692,Morelia_nauta:0.009792482):0.004593366):0.01308924):0.00478429):0.02824213):0.006003933,(Morelia_bredli:0.02129453,Morelia_spilota:0.03007546):0.03782753):0.02679029,Morelia_boeleni:0.06715453):0.0118746,(((Antaresia_melanocephalus:0.034965 [...]
+    TREE 80 = (((((Python_timoriensis:0.05253829,Python_reticulatus:0.08561018):0.06208115,(((Morelia_boeleni:0.09228959,((((Morelia_amethistina:0.02455933,(Morelia_nauta:0.002681559,(Morelia_kinghorni:0.008246165,Morelia_clastolepis:0.009838868):0.004629327):0.00972198):0.001301648,Morelia_tracyae:0.03946081):0.03981529,Morelia_oenpelliensis:0.05633571):0.0032127,(Morelia_bredli:0.02654698,Morelia_spilota:0.02108595):0.03328877):0.01067315):0.01415516,((((Liasis_olivaceus:0.04525111,Apo [...]
+    TREE 81 = (Loxocemus_bicolor:0.2566853,(Candoia_aspera:0.4693582,(((((((Morelia_viridisN:0.03146441,Morelia_viridisS:0.05593448):0.0249236,Morelia_carinata:0.06224873):0.01552312,(Antaresia_maculosa:0.05616948,((Antaresia_stimsoni:0.01007809,Antaresia_childreni:0.03079328):0.03638529,Antaresia_perthensis:0.06970695):0.01042068):0.008149887):0.009665657,((Morelia_bredli:0.02544855,Morelia_spilota:0.02456336):0.02973036,(((Morelia_tracyae:0.03685517,(Morelia_clastolepis:0.002197272,(Mo [...]
+    TREE 82 = ((((Python_regius:0.09520516,(Python_curtus:0.09522774,(Python_molurus:0.03950332,Python_sebae:0.05703805):0.02247388):0.02119926):0.03441932,((Python_timoriensis:0.06061827,Python_reticulatus:0.07481421):0.04666084,(((Morelia_boeleni:0.07246276,(((((Morelia_clastolepis:0.002509624,(Morelia_nauta:0.01384186,Morelia_kinghorni:0.009644053):0.007542425):0.009695616,Morelia_tracyae:0.02873899):0.008218219,Morelia_amethistina:0.02877):0.03316924,Morelia_oenpelliensis:0.05804569) [...]
+    TREE 83 = ((Loxocemus_bicolor:0.2952234,(((Python_curtus:0.09552038,(Python_molurus:0.03862383,Python_sebae:0.07267771):0.01718111):0.008183769,Python_regius:0.1364789):0.04738912,((Python_timoriensis:0.05761065,Python_reticulatus:0.0516746):0.06871581,((((((Liasis_fuscus:0.01480832,Liasis_mackloti:0.01183352):0.04883964,Liasis_olivaceus:0.0401242):0.0145517,Apodora_papuana:0.05118669):0.0195619,((Antaresia_melanocephalus:0.03374798,Antaresia_ramsayi:0.0376499):0.07148785,(Bothrochil [...]
+    TREE 84 = (Candoia_aspera:0.5016068,((((Python_curtus:0.09427624,(Python_molurus:0.0227545,Python_sebae:0.09191553):0.01452091):0.0283426,Python_regius:0.107775):0.0194763,((((Morelia_carinata:0.07375593,(Morelia_viridisS:0.0482057,Morelia_viridisN:0.03770947):0.01584058):0.01271865,(Antaresia_maculosa:0.0710502,((Antaresia_childreni:0.01593182,Antaresia_stimsoni:0.01507543):0.03282155,Antaresia_perthensis:0.06500106):0.01194462):0.002767007):0.01885032,((((((Liasis_fuscus:0.02328425 [...]
+    TREE 85 = ((Loxocemus_bicolor:0.2794339,((((Python_molurus:0.03312409,Python_sebae:0.07822667):0.02901692,Python_curtus:0.1018022):0.022905,Python_regius:0.1415919):0.05044797,((Python_timoriensis:0.06076838,Python_reticulatus:0.06819505):0.05838198,((((Morelia_bredli:0.03212745,Morelia_spilota:0.0331548):0.02415561,(((((Morelia_kinghorni:0.0102482,Morelia_nauta:0.008844634):0.002827055,Morelia_clastolepis:0.005156116):0.01969235,Morelia_amethistina:0.02046229):0.01034341,Morelia_tra [...]
+    TREE 86 = ((Loxocemus_bicolor:0.2683567,((Python_regius:0.1091685,((Python_molurus:0.03421631,Python_sebae:0.07909729):0.02669281,Python_curtus:0.1242822):0.01552606):0.03635674,(((((Antaresia_maculosa:0.05295493,(Antaresia_perthensis:0.07248801,(Antaresia_childreni:0.02284777,Antaresia_stimsoni:0.01413853):0.0379826):0.01342258):0.01068672,(Morelia_carinata:0.06262578,(Morelia_viridisS:0.04071067,Morelia_viridisN:0.03447906):0.01312935):0.01015363):0.01178044,((Antaresia_ramsayi:0.0 [...]
+    TREE 87 = (Candoia_aspera:0.4835051,((((((Antaresia_maculosa:0.0667961,(Antaresia_perthensis:0.07352979,(Antaresia_childreni:0.01595269,Antaresia_stimsoni:0.01602515):0.0377149):0.01316477):0.007044655,(Morelia_carinata:0.06471191,(Morelia_viridisS:0.05813323,Morelia_viridisN:0.02882801):0.01819743):0.01237297):0.01222415,((Morelia_boeleni:0.1043827,((Antaresia_ramsayi:0.03185868,Antaresia_melanocephalus:0.05093386):0.06070819,((Liasis_albertisii:0.04981429,Bothrochilus_boa:0.0710130 [...]
+    TREE 88 = ((Candoia_aspera:0.3563804,Loxocemus_bicolor:0.2285549):0.05516094,((Python_regius:0.1174086,((Python_sebae:0.0852972,Python_molurus:0.04013307):0.02585051,Python_curtus:0.1126192):0.01456538):0.03825918,((((((Morelia_bredli:0.02760348,Morelia_spilota:0.02851919):0.03478335,((Morelia_amethistina:0.02163415,((Morelia_clastolepis:0.003876799,(Morelia_nauta:0.01234636,Morelia_kinghorni:0.00726198):0.00929985):0.02923753,Morelia_tracyae:0.04439396):0.004379334):0.0374095,Moreli [...]
+    TREE 89 = (Candoia_aspera:0.4520071,(((Python_regius:0.1009007,(Python_curtus:0.09834139,(Python_sebae:0.0801233,Python_molurus:0.0414197):0.01531084):0.02265769):0.06058729,(((((Liasis_albertisii:0.04496162,Bothrochilus_boa:0.07014187):0.0258081,((Antaresia_melanocephalus:0.03625373,Antaresia_ramsayi:0.0349284):0.04496869,((Liasis_olivaceus:0.03381452,Apodora_papuana:0.06317617):0.01193837,(Liasis_fuscus:0.01998534,Liasis_mackloti:0.01399873):0.04608922):0.02671637):0.00873879):0.00 [...]
+    TREE 90 = (Candoia_aspera:0.4645928,((((Python_curtus:0.1195643,(Python_molurus:0.03740045,Python_sebae:0.08232443):0.01678619):0.02534988,Python_regius:0.1532762):0.02461821,(((((((Antaresia_childreni:0.02600772,Antaresia_stimsoni:0.01458769):0.02653477,Antaresia_perthensis:0.06571443):0.01466257,Antaresia_maculosa:0.05789742):0.01238719,((Morelia_viridisS:0.04478665,Morelia_viridisN:0.03251355):0.02382011,Morelia_carinata:0.08176551):0.01515166):0.01923722,(((Antaresia_ramsayi:0.01 [...]
+    TREE 91 = ((Candoia_aspera:0.4253582,Loxocemus_bicolor:0.287425):0.06440288,(((Python_curtus:0.09233986,(Python_molurus:0.03912399,Python_sebae:0.07176809):0.01470935):0.01220318,Python_regius:0.1215375):0.04595127,((Python_timoriensis:0.07923207,Python_reticulatus:0.07217266):0.06028461,(((((Morelia_oenpelliensis:0.05400092,(Morelia_tracyae:0.03579002,(Morelia_amethistina:0.02089989,((Morelia_clastolepis:0.004037098,Morelia_kinghorni:0.006879557):0.00439477,Morelia_nauta:0.009191494 [...]
+    TREE 92 = (Loxocemus_bicolor:0.3061242,((((((((Morelia_viridisS:0.0488484,Morelia_viridisN:0.02718428):0.03040546,Morelia_carinata:0.05363968):0.01366625,(((Antaresia_childreni:0.02372216,Antaresia_stimsoni:0.01250969):0.03749508,Antaresia_perthensis:0.05944827):0.009214165,Antaresia_maculosa:0.07487972):0.005791328):0.01428844,((((Liasis_olivaceus:0.03355395,Apodora_papuana:0.08493765):0.01773312,(Liasis_fuscus:0.01973323,Liasis_mackloti:0.01224162):0.04450705):0.02705612,(Antaresia [...]
+    TREE 93 = (((((Python_curtus:0.1052608,(Python_molurus:0.03656474,Python_sebae:0.0804119):0.01736489):0.01981846,Python_regius:0.118569):0.04305073,((((((((Antaresia_stimsoni:0.01799286,Antaresia_childreni:0.02440719):0.03664232,Antaresia_perthensis:0.06835721):0.01030149,Antaresia_maculosa:0.07669343):0.008762603,(Morelia_carinata:0.05903126,(Morelia_viridisN:0.02314959,Morelia_viridisS:0.05217384):0.01633361):0.01580508):0.02360512,((Antaresia_melanocephalus:0.04446293,Antaresia_ra [...]
+    TREE 94 = ((((((((Antaresia_stimsoni:0.01394605,Antaresia_childreni:0.02307618):0.03469821,Antaresia_perthensis:0.07348681):0.006527661,Antaresia_maculosa:0.07308458):0.009157082,((Morelia_viridisN:0.03283968,Morelia_viridisS:0.04931701):0.03098315,Morelia_carinata:0.06247037):0.01365183):0.01322039,((((Apodora_papuana:0.06792187,((Liasis_fuscus:0.02949712,Liasis_mackloti:0.006633742):0.04612603,Liasis_olivaceus:0.04939289):0.0173581):0.01385641,(Antaresia_melanocephalus:0.0383219,An [...]
+    TREE 95 = ((((Python_regius:0.1106696,((Python_molurus:0.03080011,Python_sebae:0.08768923):0.01799399,Python_curtus:0.09582502):0.0237615):0.0327879,((Python_timoriensis:0.05730162,Python_reticulatus:0.05610117):0.05178376,(((Morelia_carinata:0.05950242,(Morelia_viridisN:0.03009795,Morelia_viridisS:0.04999991):0.02362724):0.02350348,(((Antaresia_stimsoni:0.01536701,Antaresia_childreni:0.02915634):0.02933886,Antaresia_perthensis:0.08195631):0.01110259,Antaresia_maculosa:0.05806133):0. [...]
+    TREE 96 = (Candoia_aspera:0.4501354,(((Python_regius:0.1048399,(Python_curtus:0.106706,(Python_molurus:0.03471225,Python_sebae:0.07358986):0.02425296):0.02633883):0.04694233,(((((Morelia_bredli:0.03593233,Morelia_spilota:0.02438969):0.03173767,(((Morelia_tracyae:0.04654885,Morelia_amethistina:0.03659602):0.008259002,(Morelia_clastolepis:0.004036055,(Morelia_kinghorni:0.006595114,Morelia_nauta:0.006000134):0.007697233):0.007601145):0.03532755,Morelia_oenpelliensis:0.05754269):0.007221 [...]
+    TREE 97 = (Candoia_aspera:0.4334181,(Loxocemus_bicolor:0.2483078,(((Python_curtus:0.09120633,(Python_sebae:0.06811592,Python_molurus:0.02124031):0.01523931):0.01271599,Python_regius:0.1235802):0.04444922,(((((Morelia_bredli:0.02647463,Morelia_spilota:0.02569939):0.03189336,((Morelia_tracyae:0.02969979,(Morelia_amethistina:0.03375546,(Morelia_clastolepis:0.00246764,(Morelia_nauta:0.004177599,Morelia_kinghorni:0.01005074):0.006947956):0.01479339):0.009535147):0.01780938,Morelia_oenpell [...]
+    TREE 98 = ((Candoia_aspera:0.5016151,Loxocemus_bicolor:0.2444724):0.03221846,((((((Morelia_viridisS:0.04729769,Morelia_viridisN:0.03434786):0.01883497,Morelia_carinata:0.08120721):0.01275832,(Antaresia_maculosa:0.07250472,((Antaresia_childreni:0.0364534,Antaresia_stimsoni:0.01815363):0.04449293,Antaresia_perthensis:0.06613409):0.01531379):0.007934725):0.01554437,(((Liasis_albertisii:0.05705392,Bothrochilus_boa:0.07101109):0.03851546,((Antaresia_ramsayi:0.03344109,Antaresia_melanoceph [...]
+    TREE 99 = (((((Python_timoriensis:0.08109342,Python_reticulatus:0.0630196):0.0759962,(((((Morelia_viridisS:0.05289974,Morelia_viridisN:0.03362567):0.01717123,Morelia_carinata:0.06720496):0.01746322,(Antaresia_maculosa:0.07133522,(Antaresia_perthensis:0.0738161,(Antaresia_childreni:0.02466137,Antaresia_stimsoni:0.01100988):0.03619607):0.0128205):0.005775329):0.01917408,((Morelia_oenpelliensis:0.0642247,(Morelia_tracyae:0.03121744,(Morelia_amethistina:0.02118763,((Morelia_clastolepis:0 [...]
+    TREE 100 = ((Loxocemus_bicolor:0.3021923,(((((((((Morelia_tracyae:0.04026419,((Morelia_clastolepis:0.003650239,Morelia_kinghorni:0.008478112):0.007771618,Morelia_nauta:0.008646885):0.01688588):0.006949875,Morelia_amethistina:0.02684855):0.03323158,Morelia_oenpelliensis:0.06536625):0.004677463,(Morelia_bredli:0.03398565,Morelia_spilota:0.02089452):0.02877487):0.02051626,Morelia_boeleni:0.08799992):0.001944661,((Antaresia_maculosa:0.07478773,(Antaresia_perthensis:0.08661767,(Antaresia_ [...]
+    TREE 101 = (Candoia_aspera:0.5519415,(((Python_regius:0.1273714,(Python_curtus:0.1089187,(Python_molurus:0.03695303,Python_sebae:0.07848258):0.02253384):0.02297084):0.06241567,((((((Liasis_fuscus:0.01357072,Liasis_mackloti:0.01388168):0.04841025,Liasis_olivaceus:0.04581289):0.003137857,Apodora_papuana:0.06201379):0.0147125,(Antaresia_ramsayi:0.02819799,Antaresia_melanocephalus:0.03866448):0.05030802):0.006126414,(((((Morelia_clastolepis:0.01366347,(Morelia_kinghorni:0.004410157,Morel [...]
+END;
+
diff --git a/doc/source/examples/pythonidae_continuous.chars.nexml b/doc/source/examples/pythonidae_continuous.chars.nexml
new file mode 100644
index 0000000..f9a3f26
--- /dev/null
+++ b/doc/source/examples/pythonidae_continuous.chars.nexml
@@ -0,0 +1,1339 @@
+<?xml version="1.0" encoding="ISO-8859-1"?>
+<nex:nexml
+    version="0.9"
+    xsi:schemaLocation="http://www.nexml.org/2009"
+    xmlns="http://www.nexml.org/2009"
+    xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+    xmlns:xml="http://www.w3.org/XML/1998/namespace"
+    xmlns:nex="http://www.nexml.org/2009"
+>
+    <otus id="x4320375832" label="Taxa">
+        <otu id="x4320381520" label="Python regius" />
+        <otu id="x4320381584" label="Python sebae" />
+        <otu id="x4320381648" label="Python molurus" />
+        <otu id="x4320381712" label="Python curtus" />
+        <otu id="x4320381776" label="Morelia bredli" />
+        <otu id="x4320381840" label="Morelia spilota" />
+        <otu id="x4320381904" label="Morelia tracyae" />
+        <otu id="x4320381968" label="Morelia clastolepis" />
+        <otu id="x4320382032" label="Morelia kinghorni" />
+        <otu id="x4320382096" label="Morelia nauta" />
+        <otu id="x4320382160" label="Morelia amethistina" />
+        <otu id="x4320382288" label="Morelia oenpelliensis" />
+        <otu id="x4320382352" label="Antaresia maculosa" />
+        <otu id="x4320382480" label="Antaresia perthensis" />
+        <otu id="x4320382544" label="Antaresia stimsoni" />
+        <otu id="x4320382608" label="Antaresia childreni" />
+        <otu id="x4320382672" label="Morelia carinata" />
+        <otu id="x4320382736" label="Morelia viridisN" />
+        <otu id="x4320382800" label="Morelia viridisS" />
+        <otu id="x4320382864" label="Apodora papuana" />
+        <otu id="x4320382928" label="Liasis olivaceus" />
+        <otu id="x4320395344" label="Liasis fuscus" />
+        <otu id="x4320395408" label="Liasis mackloti" />
+        <otu id="x4320395536" label="Antaresia melanocephalus" />
+        <otu id="x4320395600" label="Antaresia ramsayi" />
+        <otu id="x4320395664" label="Liasis albertisii" />
+        <otu id="x4320395728" label="Bothrochilus boa" />
+        <otu id="x4320395792" label="Morelia boeleni" />
+        <otu id="x4320395856" label="Python timoriensis" />
+        <otu id="x4320395920" label="Python reticulatus" />
+        <otu id="x4320395984" label="Xenopeltis unicolor" />
+        <otu id="x4320396048" label="Candoia aspera" />
+        <otu id="x4320396112" label="Loxocemus bicolor" />
+    </otus>
+    <characters id="x4320396240" label="Matrix #1 simulated by Evolve Continuous Characters" otus="x4320375832" xsi:type="nex:ContinuousSeqs">
+        <format>
+        <char id="c0" />
+        <char id="c1" />
+        <char id="c2" />
+        <char id="c3" />
+        <char id="c4" />
+        <char id="c5" />
+        <char id="c6" />
+        <char id="c7" />
+        <char id="c8" />
+        <char id="c9" />
+        <char id="c10" />
+        <char id="c11" />
+        <char id="c12" />
+        <char id="c13" />
+        <char id="c14" />
+        <char id="c15" />
+        <char id="c16" />
+        <char id="c17" />
+        <char id="c18" />
+        <char id="c19" />
+        <char id="c20" />
+        <char id="c21" />
+        <char id="c22" />
+        <char id="c23" />
+        <char id="c24" />
+        <char id="c25" />
+        <char id="c26" />
+        <char id="c27" />
+        <char id="c28" />
+        <char id="c29" />
+        <char id="c30" />
+        <char id="c31" />
+        <char id="c32" />
+        <char id="c33" />
+        <char id="c34" />
+        <char id="c35" />
+        <char id="c36" />
+        <char id="c37" />
+        <char id="c38" />
+        <char id="c39" />
+        <char id="c40" />
+        <char id="c41" />
+        <char id="c42" />
+        <char id="c43" />
+        <char id="c44" />
+        <char id="c45" />
+        <char id="c46" />
+        <char id="c47" />
+        <char id="c48" />
+        <char id="c49" />
+        <char id="c50" />
+        <char id="c51" />
+        <char id="c52" />
+        <char id="c53" />
+        <char id="c54" />
+        <char id="c55" />
+        <char id="c56" />
+        <char id="c57" />
+        <char id="c58" />
+        <char id="c59" />
+        <char id="c60" />
+        <char id="c61" />
+        <char id="c62" />
+        <char id="c63" />
+        <char id="c64" />
+        <char id="c65" />
+        <char id="c66" />
+        <char id="c67" />
+        <char id="c68" />
+        <char id="c69" />
+        <char id="c70" />
+        <char id="c71" />
+        <char id="c72" />
+        <char id="c73" />
+        <char id="c74" />
+        <char id="c75" />
+        <char id="c76" />
+        <char id="c77" />
+        <char id="c78" />
+        <char id="c79" />
+        <char id="c80" />
+        <char id="c81" />
+        <char id="c82" />
+        <char id="c83" />
+        <char id="c84" />
+        <char id="c85" />
+        <char id="c86" />
+        <char id="c87" />
+        <char id="c88" />
+        <char id="c89" />
+        <char id="c90" />
+        <char id="c91" />
+        <char id="c92" />
+        <char id="c93" />
+        <char id="c94" />
+        <char id="c95" />
+        <char id="c96" />
+        <char id="c97" />
+        <char id="c98" />
+        <char id="c99" />
+        </format>
+        <matrix>
+            <row id="x4320806968" otu="x4320395344">
+                <seq>0.242981633989 0.0472157130976 0.905551187551
+                    0.391699939224 0.185238889166 0.344442425993
+                    -0.260159586706 0.686826609379 -0.0200572756785
+                    0.36684562115 -0.676126520465 0.10207496811
+                    0.197461576738 0.20252017203 -0.0321989884048
+                    -0.244616698157 -0.766876815452 -0.698465896474
+                    0.224356852611 -0.619804571655 -0.0460969802405
+                    0.731598651036 0.0906407779256 -0.0818034656847
+                    1.2640362514 -0.432056781025 0.015574560958
+                    -0.0421148951578 0.083808557727 0.994688954184
+                    -0.0784918743661 -0.597869337909 -0.0590102369353
+                    -0.780326662752 0.0388824078753 -0.182207969998
+                    0.000246468532045 0.12681390477 -0.475756936542
+                    0.840273219206 0.0317187551165 0.433491534771
+                    -0.0638405448002 0.105977212484 -0.927756937264
+                    -0.188992613914 0.246326757319 0.595707315162
+                    0.175518500154 0.00226194338503 -0.505465706212
+                    -0.307978704422 0.310098209856 0.718080543541
+                    -0.299043586969 0.346566444117 -0.363189415224
+                    -0.174744576336 -0.232711875227 0.491962874911
+                    0.123945376534 -0.117345202563 -0.308206796511
+                    0.545503324175 0.487521833793 -0.0569793143135
+                    0.526640764865 0.554493752202 0.345453483602
+                    -0.128741184869 0.0862166266005 -0.1672818573
+                    -0.507711475981 -0.516100379196 -0.0667307005447
+                    -0.266291611483 -0.527700551944 -0.00864777709728
+                    0.0513049647853 0.802493663177 0.0870875764371
+                    0.0965701129163 -0.0876275951395 -0.00420083332248
+                    0.133430178713 0.0580246634689 -0.508365499991
+                    0.186837623126 0.00397934623737 0.596796347205
+                    0.251846669145 -0.504123356541 -0.192678344283
+                    0.275848789649 0.0324528532044 0.0560037350239
+                    -0.725592602564 -0.103372181867 0.475865168591
+                    0.415883411166</seq>
+            </row>
+            <row id="x4320828328" otu="x4320395408">
+                <seq>0.249117286352 0.0856451597352 0.486142647272
+                    0.481953532819 0.0641171007223 0.342446614232
+                    -0.23888997119 0.458757612209 0.0938748138078
+                    0.340243939018 -0.381868503507 0.0830713390337
+                    0.142484029084 0.236690239261 -0.190269565619
+                    -0.470388046759 -0.86260332096 -0.600974237013
+                    0.138208259655 -0.618704376745 -0.0380972843235
+                    0.866207356606 -0.247254558139 -0.0542569793513
+                    1.08485656688 -0.93694673386 -0.430985282206
+                    -0.0715974665723 0.205258327885 0.798889974184
+                    0.121150497274 -0.486471954121 0.0401356944685
+                    -0.445537914886 0.0625676028946 -0.126046735169
+                    0.347672498122 -0.205609289466 -0.379891871289
+                    0.880532925213 -0.225089718571 0.388163998465
+                    -0.0512576144552 -0.174945566915 -0.847832516712
+                    -0.477746931792 0.237359705962 0.362478298088
+                    0.241953293046 -0.0881913816043 -0.459795483739
+                    -0.0949777689462 0.450125241787 0.662669441917
+                    -0.414938175799 0.580519726794 -0.238429360723
+                    -0.333744155554 -0.215206854573 0.478566088479
+                    -0.0109490929798 0.00466187505346 -0.557874535958
+                    0.131791375248 0.404056958866 -0.10534847792
+                    0.405955177573 0.399699275517 0.661676433944
+                    -0.0451256572948 0.0147688291586 0.0982904656211
+                    -0.346535628464 -0.792396862593 -0.0407422048278
+                    -0.326545134298 -0.571965493027 -0.0470829876806
+                    0.0411636442285 0.688449364326 0.269458067743
+                    0.0181252653319 -0.0785327919534 -0.0769060108022
+                    0.228648381204 -0.102415099464 -0.376992362839
+                    0.162538389924 0.269263817445 0.394679594438
+                    0.185139753066 -0.34880042466 -0.333655297851
+                    0.451191218647 -0.195292396067 0.232409095622
+                    -0.538918490359 -0.130280721261 0.490314364813
+                    0.445607257944</seq>
+            </row>
+            <row id="x4320991112" otu="x4320395984">
+                <seq>-0.126641060053 0.0329032921194 -0.297564209964
+                    -0.0948584108806 1.01409724129 -0.083973913478
+                    -0.861228000029 0.194999932448 0.0360768872035
+                    -0.405247543712 0.556861729062 0.0578161101477
+                    0.310759286832 0.101674221659 0.608253104079
+                    0.474390970857 0.436615109805 1.19307392108
+                    -0.406366047819 0.212446246478 -0.669188881
+                    0.240289231836 0.388013979546 -0.00317867615556
+                    0.218411325717 0.357316608719 0.00800800008631
+                    -0.232905907849 0.315514495114 -1.44904794532
+                    0.572752633334 -0.195449128233 -0.433945938379
+                    -0.0651554568784 0.533538088024 -0.165723337866
+                    -0.588699799945 0.12768068441 0.250595159586
+                    0.276446473629 -0.0899685472401 -0.358247152897
+                    0.0549623386267 0.437257572421 0.461972660583
+                    0.353282778563 -0.246421843522 -0.348917766584
+                    0.937183023892 -0.684875050749 0.423956217324
+                    0.539184026312 0.113581225795 0.627061976463
+                    -0.558649195708 -0.0236116712407 -0.231703164758
+                    0.0739434204556 0.260862093107 0.323506727347
+                    0.197508295932 0.124938119562 0.120089588515
+                    -0.220740683263 0.813814995219 0.302333516726
+                    -0.775928437728 -0.767660183033 -0.699908218574
+                    0.173514167941 0.361714724955 0.038912012091
+                    -0.181275288507 -0.18475518114 0.328418662959
+                    -0.480344400478 -0.257231185933 0.0651799921332
+                    0.341921273996 0.318682621432 -0.23065528758
+                    0.195295220262 -0.0620021779739 0.218909577901
+                    -0.358074915229 -0.0654420211288 0.244799123739
+                    0.0146788728321 -0.221422420129 -0.308712589564
+                    -0.221068887119 0.0488407776954 0.17918805308
+                    -0.0590069152826 -0.067964684809 -0.306970303058
+                    0.113186801516 -0.428054056294 -0.234125580866
+                    -0.433084033718</seq>
+            </row>
+            <row id="x4320845592" otu="x4320395536">
+                <seq>-0.18287632663 0.398817646735 0.219985701336
+                    0.772183288891 -1.05485561795 0.564955260321
+                    0.0371122151975 -0.092356469943 0.852667942011
+                    0.377158658725 -0.164533433928 0.632027327136
+                    0.496654773732 -0.247986654 -0.0339136155941
+                    -1.06997899623 -0.641194714747 -0.862070100587
+                    -0.349287153354 -0.911989771469 0.196862436694
+                    0.374161071908 -0.301901287655 0.414168279934
+                    0.124734191488 -0.528551638415 -0.721225139308
+                    -0.0112838047756 -0.663398779403 0.349491084953
+                    0.211331074873 -0.641144527824 0.361095752979
+                    -0.570609472572 0.195481198549 -0.0779157949284
+                    0.53734987533 -0.393122738159 -0.385394212895
+                    0.959872063081 -0.142054088805 0.534123969359
+                    -0.578028865551 -0.441931794061 -0.724281383702
+                    0.00306010885877 0.0357982224079 0.23048124579
+                    0.195406243752 -0.737851855137 -0.443045400494
+                    -0.0974662681538 0.942852251615 1.17902711923
+                    -0.306380658522 0.875583704336 0.18565314553
+                    -0.86270623447 0.332939627703 0.907483435773
+                    -0.0373635628918 0.598139795367 -0.256968076872
+                    0.216281997987 0.622344693887 -0.322185250986
+                    0.656044334055 0.0677447519465 0.7188110267
+                    0.753606309018 -0.301214919223 0.101208873415
+                    -0.030318439813 -0.0180235540079 0.0678258718115
+                    -0.168616468396 -0.406602898824 0.539771576757
+                    0.389704487327 -0.31531649706 0.212815184153
+                    0.788829935876 0.301699169473 0.563249447443
+                    0.0541635494759 0.441776271175 -0.359025725513
+                    0.45707671478 -0.0217523433581 0.648936908951
+                    -0.17444558903 -0.276858736308 0.450317270051
+                    0.112754879561 0.288068015489 0.119703929729
+                    -0.354718971396 -0.0647518931085 0.638295110394
+                    0.580535522592</seq>
+            </row>
+            <row id="x4320871096" otu="x4320395600">
+                <seq>0.0550136437894 0.348772141171 0.426501098891
+                    0.866068736284 -0.698150909342 0.573455380389
+                    0.00547027558409 0.0754329893726 1.00039716528
+                    0.351630194344 -0.228629161133 0.634782911399
+                    0.380302011848 -0.0898120702884 -0.270991721001
+                    -0.618257109253 -0.246906486563 -0.316873974208
+                    -0.204593744481 -0.898375845658 0.11048567978
+                    0.443233600346 -0.344630076531 0.150197510043
+                    0.348287671202 -0.984007450221 -0.419978624398
+                    0.0584162222561 -0.692282998557 0.4123064094
+                    0.215365196383 -0.30444418106 0.39861268398
+                    -0.385967620091 0.0963607602375 -0.635321816175
+                    0.726019250438 -0.359672459665 -0.517909283434
+                    0.992055941902 -0.595102281527 0.21664980147
+                    -0.460922569079 -0.559810084155 -0.738052434371
+                    0.233398225505 0.10635969533 -0.184330859806
+                    0.135464793262 -0.2636720663 -0.480170687834
+                    -0.239784226979 0.618046675721 0.702238062907
+                    -0.512614179423 0.459144674783 -0.0473243652944
+                    -0.971517804554 0.470736094058 0.4248959606
+                    0.0869600586478 -0.228007000382 -0.174847752513
+                    -0.0884759568883 -0.244586507494 -0.0252979320668
+                    0.334744193106 0.0926504430325 0.578123482895
+                    0.734063513874 -0.324372302215 -0.00842668859385
+                    -0.253885370456 0.116073273493 0.384118558421
+                    0.139716758884 -0.190635991693 0.255697205522
+                    0.257869466865 -0.0108800408269 -0.00219649202346
+                    0.39402450991 0.444138582874 0.20235903662
+                    -0.201673138142 0.179169273223 -0.397174278276
+                    0.242555853735 0.208978956371 0.55655026081
+                    -0.261565529702 -0.457562446596 0.214363413408
+                    -0.0661989452547 0.503350663464 -0.192789566392
+                    0.0542116022212 -0.0255081958925 0.650293115546
+                    0.341587370224</seq>
+            </row>
+            <row id="x4320892456" otu="x4320395664">
+                <seq>0.133856372056 1.29091068955 0.222527195449
+                    0.284144018477 -0.482148022363 0.787929355849
+                    0.258313741344 0.0626399737157 0.54837760944
+                    0.809387431542 -0.929497747954 0.00845461848048
+                    0.501182230532 0.24728021579 0.327474611529
+                    -0.51705175822 -0.12699067136 -0.867433325496
+                    0.233465645931 -0.319696120558 0.289788708478
+                    1.05804026956 -0.334167113471 0.442539915068
+                    0.567455908607 -0.929824969815 -0.484664543719
+                    -0.0116458417208 -0.21013734721 0.263763505087
+                    0.221375611396 -0.260521275452 -0.0164103437392
+                    -0.924957561024 -0.0515122985209 -0.656733139994
+                    0.201683221587 0.33699323282 -0.399856786842
+                    1.00291298251 -0.885934888601 0.145711033628
+                    -0.116052738624 -0.27305523902 -0.517956721144
+                    0.0799272259199 0.185014936643 0.462745488322
+                    -0.229552698529 0.174035201087 -0.662694182308
+                    -0.270998362578 0.538723579982 0.705486054997
+                    0.0683098190675 0.410479586822 0.547708733083
+                    -0.702281344436 -0.114384658112 0.390081912474
+                    -0.459747973714 0.49046898683 -0.43801867028
+                    -0.0457469121154 -0.206655947312 0.29374247278
+                    0.837565657572 0.15743994112 0.0194425153104
+                    0.0758831123041 -0.38654134488 0.255387553291
+                    -0.361038008055 -0.183563089576 -0.193156942578
+                    -0.254002887902 -0.447148050046 0.160428576858
+                    0.111293114039 0.219558109796 -0.130599866551
+                    -0.285237351393 0.292499344387 0.492726415026
+                    -0.360751949292 0.567019321583 0.109541715787
+                    -0.168460024784 0.245329552674 1.30334035213
+                    0.16157374095 -0.210587504037 0.550586929906
+                    -0.0998374771632 0.224254838774 0.102535106367
+                    -0.485985251308 -0.284776872715 0.352979288194
+                    0.346471263223</seq>
+            </row>
+            <row id="x4320909720" otu="x4320395728">
+                <seq>-0.465848984227 1.02111986444 0.0622961845304
+                    0.358971291159 -0.783286122425 0.420194090695
+                    0.239482998938 -0.105870346011 0.423933306567
+                    -0.151597789296 -0.685510639719 -0.00916855418089
+                    0.404016711389 -0.170083482456 0.00452887649087
+                    -0.649224875525 0.323367060204 -0.595730400781
+                    -0.373447816282 -0.272959068915 -0.316212407062
+                    0.758761871592 -1.15765090944 0.907467331589
+                    0.410155690585 -0.599098026546 -0.526918594412
+                    -0.429513572625 0.368604653333 0.453530656498
+                    -0.229346500117 -0.0208083900103 0.117254640517
+                    -0.561766025277 0.0479225100424 -0.749639259696
+                    0.47341805303 -0.299303607654 -0.98097250996
+                    0.980249731212 -0.479269785698 0.445075637786
+                    -0.150031061144 -0.227930426895 -0.0919306150854
+                    0.435329876682 0.537318340383 0.912026336748
+                    0.196785253795 -0.575722497535 -0.0932285264072
+                    -0.337037327278 0.770665958271 0.552268241962
+                    0.563677814844 0.396409315885 0.344063948557
+                    -0.299805202432 0.337225601085 0.5119540722
+                    -0.75318065227 0.282488273052 0.0862508289689
+                    -0.564722206807 -0.076121585706 -0.0459311123604
+                    0.518628907736 0.232904074384 0.274510302642
+                    0.737125728641 -0.299925594379 -0.184716959842
+                    -0.269830079505 0.519232294225 -0.129358433207
+                    0.00914652671005 -0.215409252081 0.551940646335
+                    -0.160865494893 0.642705812305 0.0227492101239
+                    -0.0712702305919 0.40681052292 0.335629196568
+                    -0.451579313908 0.289626116979 -0.341292625911
+                    0.0129400358139 -0.00728671921023 0.468697003715
+                    -0.313195647633 0.235357711524 -0.0167756040475
+                    -0.14287033405 -0.33067787872 0.788334598926
+                    -0.431577604322 -0.489983191072 0.215764622505
+                    0.166965837398</seq>
+            </row>
+            <row id="x4320926984" otu="x4320395792">
+                <seq>0.449562490486 0.381596491477 0.216372711795
+                    0.154446175561 -0.451354129252 0.954507054359
+                    -0.662374321754 0.155635750746 0.777433971723
+                    0.866929746557 -0.347844233785 0.604855081835
+                    0.14202231755 0.324734725097 -0.371387895242
+                    -1.31482395706 0.219006842038 -0.709602972448
+                    -0.276456431101 -1.24747233112 -0.966046116961
+                    0.111497696677 -0.551330402693 0.312636189897
+                    0.546101147343 -0.288954333688 -0.398872565232
+                    0.0524985801228 -0.189493119923 1.08477119933
+                    0.419385865746 0.0226414187131 -0.419662668504
+                    -0.300675783933 0.601572764419 -0.616492623954
+                    0.619807509068 0.20894078552 -0.736819469026
+                    0.123889518982 -0.488354485626 0.32463016262
+                    -0.235034371175 -0.138463406065 -0.0953330227345
+                    0.108664376248 0.00429011536701 0.127922145893
+                    -0.674972589379 -0.248079165157 -0.839639444268
+                    -0.481854099424 -0.394664586758 0.761033369258
+                    -0.435328245856 0.02742042124 -0.0951025319049
+                    -0.740531747002 0.632805834235 0.545357249657
+                    0.0957502881653 0.559003688943 -0.27045435551
+                    0.0435459821692 0.286142597046 0.328251329055
+                    0.366144379897 0.253709610336 0.559020295128
+                    0.429184631809 -0.165765154605 0.442331123309
+                    -0.398979636816 -0.688247432227 -0.583414023902
+                    -0.716867121499 0.0251022310369 0.685235870558
+                    -0.07874036135 -0.273749143177 -0.232868632104
+                    -0.213696677327 0.355523836613 0.486276045881
+                    0.0606153507024 0.591915226752 -0.180514256679
+                    0.0102125702708 0.137240864754 0.513225548645
+                    -0.288511390844 -0.221268852942 0.362591535251
+                    0.349314765952 -0.205155859278 0.260854791592
+                    -0.453555480624 0.640294119021 0.084183246792
+                    -0.0690490371654</seq>
+            </row>
+            <row id="x4320399792" otu="x4320381520">
+                <seq>-0.0230088801573 -0.327376261257 -0.483676644025
+                    0.0868649474847 -0.39427773481 -0.0752755707409
+                    -0.0652403458492 -0.360410658303 0.735681565475
+                    -0.479457753889 0.514275136235 -0.109355926281
+                    -0.0825743552229 -0.769993476826 -0.301809357028
+                    0.25699649732 0.208457078609 -1.00393772691
+                    0.0114270244044 -0.561103339449 0.081149803943
+                    -0.303112018598 0.0258442530587 0.146611927973
+                    -0.404026072244 0.028324650335 -0.128137638453
+                    -0.1589901339 0.347623644932 -0.537262419465
+                    -0.377377086233 0.29428680516 -0.318612171757
+                    -0.736559500763 -0.104997287152 -0.49092061851
+                    0.114567674255 0.166678695226 -0.970847110578
+                    0.921435106471 0.203017484435 -0.867428434601
+                    0.0339392749477 0.184899279238 0.518282719494
+                    -0.305345547245 0.591462049174 0.140321368764
+                    0.441778771105 -0.872985996508 -0.0293926720593
+                    -0.112075692473 -0.241879159883 -0.248126934877
+                    0.059823213554 -0.21703091241 0.43760551852
+                    -0.593717574 -0.0753211476574 0.0997434871114
+                    -0.701892667558 0.741629862978 -0.487022668519
+                    0.0278985944495 -0.316982957601 -0.320282552427
+                    0.400143352117 0.757726409165 -0.826820122128
+                    -0.396950409922 0.260606588652 0.176054841918
+                    -0.307140391907 0.369849030736 0.124699303594
+                    -0.086021720897 -0.642826038846 0.349366236852
+                    0.128660978324 -0.780088987096 0.683891941667
+                    -0.0809206475853 0.0124309536 0.331121372464
+                    0.157260636459 0.0545978987766 -0.279324026029
+                    0.709573414535 0.0327981490521 -0.183362995785
+                    0.254044015669 -0.14530463113 -0.329553724377
+                    -0.452094835096 0.0477256815095 -0.513517041159
+                    0.0220502643444 -0.147923640814 0.645850490499
+                    0.0770204254234</seq>
+            </row>
+            <row id="x4321012472" otu="x4320396048">
+                <seq>0.541507343915 -0.826185175218 0.280374228362
+                    -1.04581194757 -0.587947618718 0.57001434966
+                    -0.829264144485 -0.670269462255 -0.948090120102
+                    0.335020216172 0.680500970861 0.155868568636
+                    0.789248964681 -1.14535573698 0.364520624552
+                    1.1660725609 0.395352621512 1.66844938066
+                    0.00799674355898 -0.481903622555 0.0265595914606
+                    0.735257254476 0.590651238008 0.330788789611
+                    0.477895881073 -0.218229022826 -0.531956616579
+                    -0.124862535744 0.101780180095 -1.03690317265
+                    1.02842046108 -1.27229755145 0.862008447907
+                    -0.115696249026 -0.719107808272 -0.899343756933
+                    0.44130152278 -0.155728452602 -0.31908732877
+                    0.360162364726 0.20818887862 -0.179018840535
+                    0.199132077764 -0.285858119791 0.710000348558
+                    0.159040648349 0.441816453927 -1.33526794867
+                    0.110023969264 -0.0805706175257 0.72784916289
+                    0.496143335953 0.264485062794 -0.375087923616
+                    -0.171581111073 0.716449338043 -0.0466347517427
+                    -1.06136704759 0.516401535375 -0.328456810996
+                    0.321008322566 -0.232731121207 -0.845434679119
+                    -0.118436614827 0.259586142609 0.316847535139
+                    -0.573795479228 0.733034362551 0.200659839747
+                    0.440043475146 0.135446295545 0.0526790338054
+                    -0.617105814412 0.934668797557 0.814992092395
+                    -0.101955449661 0.106285996369 0.513322825546
+                    -0.00286901186748 -1.38000047739 0.321688333174
+                    0.0539230377606 -0.227210036104 0.0496832305558
+                    -0.0464845925838 1.29869175243 1.01138267282
+                    -0.481939075466 0.517698551084 -0.691651912493
+                    -1.66195619575 0.763433659001 -0.0234586565576
+                    1.65015221593 -1.04216900962 0.210200535434
+                    -1.91420143773 -0.12223995949 0.0252745020761
+                    0.675221037504</seq>
+            </row>
+            <row id="x4320400056" otu="x4320381584">
+                <seq>-0.315394388158 0.254247232199 0.427795791043
+                    1.12049173673 -0.104808494296 -0.0473782024001
+                    -0.0548432672995 0.150919748498 0.682143733909
+                    0.171062633188 0.0857431349916 0.106914367985
+                    0.682992477047 -0.257479035767 0.071997408135
+                    -0.216919678577 0.439438991552 -0.242982867632
+                    0.560312742515 -0.773243135621 0.122559185645
+                    -0.167328286846 -0.188242222796 -0.238667327488
+                    0.235096686333 0.02379083354 -0.21930063283
+                    0.271882242152 0.648896900602 -0.434644303393
+                    -0.303139335555 -0.126073244304 -0.159314915022
+                    -0.431213140659 -0.218996168207 0.0859796873359
+                    0.138422545844 0.434551244255 -0.651916455275
+                    0.542956079765 -0.528702372787 -0.41976550419
+                    -0.726549362307 -0.0100537448024 -0.031984419141
+                    -0.545719130374 0.872420486032 0.157948095833
+                    0.0314827788621 -0.268597281021 0.239192202135
+                    0.124779310896 -0.414406682553 -0.00706844240416
+                    0.503631422812 -0.158223966795 0.520410510382
+                    -0.679193654893 -0.0724624024546 0.457238680847
+                    -0.197225278356 0.0253855656103 -0.691212034274
+                    0.437806713548 -0.560826812982 -0.432353049042
+                    0.155013615133 0.586420847789 0.00790567335545
+                    -0.274117399116 0.184246546131 0.910320239826
+                    0.211430693783 0.511940915633 0.105243616669
+                    0.61274623902 -0.652908465502 0.66878050273
+                    0.429457245963 -0.476991391712 0.148545941031
+                    -0.61579890703 0.11100075234 0.449244902296
+                    -0.417192819619 -0.296685238118 0.0522410956738
+                    0.26109514299 0.254444824865 0.0345576586529
+                    -0.0321916518173 -0.0558848970556 -0.0544608116569
+                    -0.534051713008 0.427767919054 -0.287323753221
+                    0.168645170303 0.0129702633875 0.195733700479
+                    0.896251045794</seq>
+            </row>
+            <row id="x4320417320" otu="x4320381648">
+                <seq>-0.187076216655 0.00933990923267 0.481018283396
+                    0.687294134399 -0.129335538019 0.324660143262
+                    -0.279831460481 -0.0519168498226 1.15437566973
+                    0.114247556551 0.31714055598 0.483016788796
+                    0.0225565113038 -0.643178704285 -0.214736741095
+                    -0.532011360387 0.0194789288099 -0.515840196692
+                    0.0791908156819 -0.510929188327 -0.496737458404
+                    0.11747149215 0.0377590958657 -0.13329543976
+                    0.246252056522 -0.374343677871 -0.278974825175
+                    -0.167903261896 0.66504407437 -0.540892675919
+                    0.196061249566 0.0980890263462 -0.320422984845
+                    -0.436610835987 0.103569543859 0.243657065214
+                    -0.158073887563 0.278409817354 -0.820888855695
+                    0.395229350398 -0.110593324114 -0.269473841753
+                    -0.21841315035 0.0309911674966 0.0351577882624
+                    -0.255083355481 -0.064574339118 -0.296710235855
+                    0.108554175671 0.081971360998 0.408246773313
+                    -0.501337902381 -0.408874219739 -0.0519178623843
+                    0.369353527989 0.0767241230566 0.620872509018
+                    -0.748186771181 -0.548687846931 0.09199474913
+                    -0.117451006424 0.087159848622 -0.220592161316
+                    0.0723081422948 -0.205429937385 -0.606999462941
+                    -0.108578027356 0.49791895213 -0.310434566739
+                    -0.219020560035 0.110955153268 1.10847743492
+                    -0.173441013997 0.730881034095 0.170849276289
+                    0.32000358604 -0.568124547827 0.16494112641
+                    0.36521973368 -0.636509183615 0.0397531658263
+                    -0.0580975166773 0.352531598441 0.0470822521663
+                    -0.539650492943 0.164192792627 -0.147217104537
+                    0.412385550191 0.362412752389 0.0569034323429
+                    0.349112733145 -0.185003386086 -0.217197468083
+                    -0.444198505959 0.27456020722 -0.27437707956
+                    -0.398117354096 0.201257464242 0.041076267284
+                    0.342637295347</seq>
+            </row>
+            <row id="x4320434584" otu="x4320381712">
+                <seq>0.109765904686 0.14431928383 0.274662825253
+                    1.02202598019 -0.0353372925152 1.00859376361
+                    -0.166620335117 -0.904859681052 1.13097049264
+                    -0.0343109512236 -0.462301864985 0.448228098321
+                    -0.0148440966745 -0.168689421965 -0.0625336368523
+                    -0.168689113036 0.277660807244 -0.888984029326
+                    -0.435928104447 -0.606339886206 -0.0516997827323
+                    0.538740754376 0.562769052953 -0.860300318207
+                    0.291359101289 -0.134568625089 -0.220098031854
+                    -0.0670719935538 0.706617655026 -0.129422739418
+                    -0.253800469823 0.217746137272 -0.252405918025
+                    -1.14859508917 0.344708466323 -0.056148499003
+                    0.711626943982 -0.218743884134 -0.661769932518
+                    0.442027950287 -0.389683301084 -1.33667598698
+                    -0.283754159656 -0.0239473962462 0.0677024954289
+                    -0.65912036709 0.495156279228 -0.425393633976
+                    -0.220743760287 0.0386572286798 0.64333239653
+                    0.0848667195013 -0.361991436534 0.240357305894
+                    0.565396266754 0.291101695342 0.600480458611
+                    -0.339215587935 -0.502484835548 -0.986231417997
+                    -0.543732509414 0.115749541376 -0.13189636579
+                    0.0884139839152 -0.34368285535 -0.387351269655
+                    -0.0182793353659 0.414009565869 0.449712462371
+                    -0.320044187425 0.0637811312831 0.398344858987
+                    0.192631612232 0.184266669102 0.403497446498
+                    0.13823293029 0.127925179691 0.281907550281
+                    0.838804009753 -0.266307209225 -0.0565049643178
+                    0.289481509631 -0.174169785875 0.0913870326892
+                    0.182910478387 0.275623529188 0.517246239874
+                    0.380254892837 0.552430815723 0.89130158172
+                    -0.506949762236 -0.307854956615 -0.161542346346
+                    -0.440459291326 -0.437285230705 0.867093388146
+                    0.000276983225189 0.0510307452616 0.786730743609
+                    0.157804098211</seq>
+            </row>
+            <row id="x4320455944" otu="x4320381776">
+                <seq>0.358747788632 0.603433228556 -0.257436633014
+                    0.857635048712 -0.863443811284 0.5453676692
+                    -0.0347798892082 0.255123954455 0.0871078093824
+                    0.824403880461 -0.606917785253 0.193968545407
+                    -0.0447742847681 0.41909149946 0.784933323479
+                    -0.115664309952 0.319572117005 -0.686953061974
+                    -0.313903284104 -0.312287952155 -0.622154203762
+                    0.804251297752 -0.070968138103 0.295627411687
+                    1.05423915546 -0.585368939655 -0.768480552537
+                    0.0395350893917 0.0927123763712 0.660400635023
+                    -0.137777549949 0.322512469091 0.271444928122
+                    0.154208326384 0.434777136791 -0.467488699686
+                    0.40603546524 0.250988650863 -0.329657506834
+                    0.710073070595 -0.39836139716 0.00173272467068
+                    -0.319371722178 0.0797072845188 -0.329383331745
+                    -0.0787713919788 0.140926658335 -0.342485233378
+                    -0.0594347091643 -0.471422555153 -0.595693311983
+                    0.384728014939 1.23971471247 0.300052556022
+                    -0.0416380475729 1.2716137839 -0.349389178562
+                    -0.756521484886 0.325812360728 0.0165646731478
+                    -0.445523048017 0.384416171741 -0.617081114719
+                    -0.447904911684 -0.046503689938 -0.474477749526
+                    0.514216696429 0.899818282509 0.545286788625
+                    0.725286634287 0.245511146152 4.62809150647e-05
+                    0.016484944174 0.275280685648 0.0731524244189
+                    -0.434501110556 -0.212784557502 0.553466238592
+                    0.247517937542 0.229266365215 -0.0656727175287
+                    0.189364297697 0.03803490827 -0.114742208786
+                    -0.0331016876079 0.141273108402 -0.41908104418
+                    -0.02174926636 0.414312509335 0.152404452044
+                    -0.471061758047 0.323510414218 -0.185164044589
+                    0.270533212223 0.647045567749 0.8720250828
+                    -0.631703749084 -0.174049287388 -0.2851751919
+                    0.72458733353</seq>
+            </row>
+            <row id="x4320481448" otu="x4320381840">
+                <seq>0.427423206468 0.412716658322 -0.17213747632
+                    0.720721336497 -1.21341529229 0.486898671175
+                    -0.148077056108 0.277952355728 0.45352011525
+                    1.020550792 -0.528155611602 0.278622146173
+                    0.0482807867115 0.664775545996 0.297571336033
+                    -0.0611111855455 0.272712114488 -0.811292239433
+                    -0.0631826131327 -0.492726043299 -0.177605491435
+                    0.768009312488 0.0134906727459 0.568484286914
+                    0.896397001209 -0.390121630282 -0.590475968582
+                    0.148252257576 0.0174254880112 0.577658481274
+                    -0.081030697639 -0.0872202473003 0.0635897089924
+                    0.208220100048 0.444446040904 -0.495697741669
+                    0.531467241234 0.200162019288 -0.726775154843
+                    0.714404025241 -0.291100441469 0.0263255916995
+                    -0.143007913268 -0.104183272415 -0.593510224196
+                    -0.136957837423 -0.00230189297017 -0.291066033289
+                    -0.0642374210889 -0.74443709418 -0.902186580307
+                    0.501258194679 0.844705779661 0.174199133306
+                    0.141732307882 1.0683997827 -0.511799208825
+                    -0.977372463808 0.441575690845 0.0711090260278
+                    -0.129423322831 0.0642279382356 -0.798840084774
+                    -0.468589689003 -0.019696632592 -0.365112219578
+                    0.286481537995 0.639468657742 0.441420896271
+                    0.402485293132 0.0745049723199 -0.0217838430145
+                    -0.147891573214 0.104843727208 -0.105872783865
+                    -0.374808612957 -0.258003915933 0.342153745442
+                    0.224372357402 0.131590100382 -0.0376806671775
+                    0.134691614051 -0.0345211380568 -0.187151985457
+                    -0.677680720349 -0.231639291558 -0.125147587049
+                    -0.227109565134 0.223001699545 0.325097837173
+                    -0.341789954364 0.277399682089 -0.247955959494
+                    0.269782031025 0.595529410125 0.718565239009
+                    -0.754157375385 -0.0339382934899 0.215080404109
+                    0.905141545699</seq>
+            </row>
+            <row id="x4320498712" otu="x4320381904">
+                <seq>-0.169964427473 0.0885724277268 -0.312573568394
+                    0.66826717136 -0.90332721358 -0.0505127287469
+                    -0.277868605655 0.528321898492 0.332912986754
+                    -0.0497765005162 -0.545821713515 -0.0927924786512
+                    -0.0768583095748 0.546054096918 0.548411792718
+                    -0.80110997568 0.00339883944457 -1.09876502274
+                    -0.301350827645 -1.02367570413 -0.375393915551
+                    0.441341297801 0.134479300377 0.203976587618
+                    0.534126115297 -0.271971233087 -0.985271629264
+                    -0.532824755862 0.0816208532126 0.424400865758
+                    0.107689387251 -0.476141964812 0.0493877044238
+                    -0.268755992683 0.59897844131 -0.306757622406
+                    0.378433285057 -0.138313224637 -0.811483696648
+                    0.328873583513 -0.184059392236 0.387922138292
+                    -0.212602999602 0.0430872786432 -0.386030556263
+                    -0.0694691219841 -0.226787446836 0.0608802574224
+                    0.171138552884 -0.335139887473 -0.56864712597
+                    0.352390316141 0.777897269285 0.0696728617229
+                    -0.205038314017 1.08201826534 -0.364525236299
+                    -0.540378688469 0.17374278548 0.923206219507
+                    0.0688485706398 0.0140348343238 -0.538945790985
+                    -0.0936864080359 0.226475644188 -0.606147240743
+                    0.282690796043 1.0197318988 0.152364837045
+                    0.447096519141 -0.175712499994 0.0986102415509
+                    -0.31198194428 0.503821857774 -0.240021986274
+                    -0.807862772365 -0.58790876363 0.467252264585
+                    -0.0640253553958 0.165363450742 -0.179534160564
+                    -0.0520230385894 -0.294550607761 -0.00571211072278
+                    -0.206966028113 0.1415192691 -0.433004868463
+                    -0.000508897447865 -0.197815899713 -0.133799255882
+                    -0.151244843519 0.771956235968 0.0945813659668
+                    0.326745257618 1.13511134425 1.00435683561
+                    -0.3175002061 0.00948302529515 0.220606332267
+                    0.924303343197</seq>
+            </row>
+            <row id="x4321029736" otu="x4320396112">
+                <seq>-0.485828679635 0.556167776384 0.17458074714
+                    0.385053440787 -0.011367840214 0.142344446447
+                    0.559648021714 -0.187048842993 -0.347275378831
+                    0.628045137487 -0.49004203146 -0.25601854986
+                    0.0131253211793 -0.0452531087163 -1.04307115161
+                    0.237508216947 -1.46142678493 -0.736358786714
+                    -0.895676128563 -0.318687513066 -0.365186396948
+                    0.430970903697 -0.787488119359 -0.307199186643
+                    0.265658119507 -0.652382901755 -0.505026169799
+                    0.0289211668008 0.478973221573 -0.386614093524
+                    0.403759173952 -0.841294375594 -0.337465718132
+                    -0.886167660691 0.385471659812 -0.263540860898
+                    -0.22409483174 0.511615336602 -0.294669624365
+                    -0.0604496281659 -1.35309621364 0.66635786078
+                    -0.016438743414 0.344884208817 -0.294841674107
+                    0.309354813508 0.217979447509 -0.212462319465
+                    0.0445749556146 0.248634195192 -0.168380054176
+                    -0.304472065847 1.06413204879 -0.17146894748
+                    0.461518895828 0.45243106567 0.392127255169
+                    0.209595245298 0.424160319449 0.804053156025
+                    -0.248962388723 -0.346525734059 -0.71191062899
+                    0.350239115248 -0.398965745697 0.0231361811483
+                    -0.433309937857 -0.093073123059 0.0802902282696
+                    -0.53953732859 0.974092981479 -0.029423015088
+                    -0.42668136907 -1.19697950933 -0.116929056155
+                    0.208465055685 0.320764739385 -0.421462513758
+                    0.087655482278 0.316767110244 0.460543338014
+                    -0.172300217602 -0.650299875803 0.181819510811
+                    -1.24245794729 0.815226594747 0.453294192037
+                    -0.506467224288 0.392555820133 -0.20021681064
+                    0.130600897187 0.413149826765 -0.136353755644
+                    1.12369312641 0.345946181788 0.259612826303
+                    0.530198507465 0.590091570148 0.670247130925
+                    0.0812239164837</seq>
+            </row>
+            <row id="x4320520072" otu="x4320381968">
+                <seq>-0.0370771373507 0.357942605312 -0.343634310177
+                    0.308769284753 -0.714453634965 0.192224453943
+                    0.260473082939 0.510320792999 0.0333868066547
+                    0.612338057631 -0.698816488698 0.295347800989
+                    0.374342048554 0.619886312768 0.313385796587
+                    -0.175637207883 -0.0850137620422 -1.05595068284
+                    -0.459830950399 -0.708209912557 -0.467632332089
+                    0.723733810128 0.536797386098 0.117471770611
+                    1.07358244873 -0.342871267084 -0.712833249901
+                    -0.152516902425 0.0695013343478 0.498771053871
+                    0.551120077301 0.082333343437 0.259269014419
+                    -0.524377551145 0.335869328301 -0.280428419006
+                    0.463265005627 -0.189571909462 -0.740115496432
+                    0.503704933956 -0.286856161745 0.509476647628
+                    -0.241150854116 -0.124790939143 -0.723851371671
+                    0.0223303155252 -0.145882354043 0.0498964616058
+                    0.193015258053 -0.315107498078 -0.388072404373
+                    0.440069132791 1.07660727722 0.136319685468
+                    -0.253995156656 1.22896389823 -0.313498089854
+                    -1.06350670529 0.424898445319 0.842500150418
+                    -0.416637846237 -0.0834511184085 -0.650355879384
+                    -0.282527553208 0.132443847752 -0.045286699138
+                    0.247367368002 0.966279352343 0.300226242549
+                    0.475447562798 0.0551760969574 0.134636207985
+                    -0.0649106168981 0.6629935592 -0.15269817835
+                    -0.209923061568 -0.425663979479 0.465257740102
+                    -0.245356662133 0.112365359575 -0.0980980108904
+                    0.287756761963 0.170698134229 0.305143020906
+                    0.0779542914275 0.22788588041 -0.405779273372
+                    -0.233382175255 -0.306197062198 -0.157148122775
+                    -0.506906536527 0.624722989806 -0.358371523753
+                    0.477679965995 0.906049210101 0.911611508735
+                    -0.635829530856 0.200580801051 0.034102999221
+                    0.605007160202</seq>
+            </row>
+            <row id="x4320537336" otu="x4320382032">
+                <seq>0.0335018973583 0.248432126521 -0.542815434295
+                    0.279271827645 -0.66524768507 0.236666835166
+                    0.366151091015 0.445638635779 -0.103117232127
+                    0.673556975794 -0.726947788299 0.280651181988
+                    0.342170657701 0.772790407315 0.31645178049
+                    -0.131820255349 0.0383008624016 -0.988364892489
+                    -0.505850232801 -0.503638811952 -0.371396434014
+                    0.75705759643 0.320758853159 0.314731282628
+                    1.12218818066 -0.649265610787 -0.82454933378
+                    -0.241103268725 -0.173930700278 0.486674534111
+                    0.384853038917 0.000609174526144 0.290588549064
+                    -0.563441804584 0.245054608587 -0.332952166865
+                    0.414354346291 -0.0303834473893 -0.596530119779
+                    0.748594616853 -0.211692763613 0.463923255563
+                    -0.342717639361 -0.0770545677407 -0.845930656457
+                    0.0545326195297 -0.174486722325 0.101407098494
+                    0.4519013305 -0.242889971653 -0.45157237224
+                    0.574546049593 1.19168873965 0.14713085497
+                    -0.480625735538 1.26934974078 -0.123473776504
+                    -1.02028890616 0.322860652163 0.830485576813
+                    -0.319738880674 0.04389790258 -0.60140668425
+                    -0.234794270343 0.100990556661 -0.0878112961639
+                    0.474363399123 0.925009956658 0.303610034068
+                    0.450947491973 0.0897309504061 0.349243396601
+                    -0.0949359202294 0.766542347706 -0.263450651591
+                    -0.228037366882 -0.516405270017 0.346325779582
+                    -0.139510110014 0.0636112447726 -0.223750504081
+                    0.358210459673 0.0346117358219 0.271681304894
+                    -0.068116632924 0.204554709403 -0.553945545798
+                    -0.115743378068 -0.174282031361 -0.39033779019
+                    -0.460400580846 0.477461609659 -0.203714884206
+                    0.698703277274 0.838358775588 0.977095325113
+                    -0.694530032051 0.212768293792 0.127362669668
+                    0.731018710256</seq>
+            </row>
+            <row id="x4320558696" otu="x4320382096">
+                <seq>0.00515578276523 0.389825530228 -0.523957059846
+                    0.272154521156 -0.66553192399 0.279306545237
+                    0.3510996808 0.400220168705 -0.0428537434794
+                    0.467923838165 -0.711307098045 0.17300053582
+                    0.338418389359 0.628447502463 0.301783973264
+                    -0.125930503048 0.0279218947564 -1.08243644893
+                    -0.758176864504 -0.601593183988 -0.341730660571
+                    0.847678118188 0.448166488349 0.111496063097
+                    1.07158735003 -0.590336062301 -0.922229198967
+                    -0.0706431080717 -0.0180341393501 0.551252621339
+                    0.760570615852 0.137136743346 0.356561460156
+                    -0.642443923941 0.377775748139 -0.167886558352
+                    0.552018461315 -0.242652203492 -0.652873854012
+                    0.70122436187 -0.285014750434 0.413517770338
+                    -0.336867382612 -0.197342347264 -0.754257949211
+                    -0.166159711972 -0.113918642718 0.0109596436649
+                    0.465256274866 -0.219475896162 -0.539691519945
+                    0.430890057612 1.06930589576 0.152195345918
+                    -0.50518347413 1.35546948861 -0.239494307984
+                    -1.0670281869 0.452947543957 0.868550894259
+                    -0.25770503268 -0.048560419681 -0.563689805563
+                    -0.375164262137 0.0705738369062 -0.103335452417
+                    0.247892933175 1.01031368546 0.26337835791
+                    0.42251287302 0.0876796869192 0.230638628946
+                    -0.158270288712 0.88413654812 -0.304790127485
+                    -0.256920605738 -0.521523982949 0.350968488767
+                    -0.196850472921 0.0857223916226 -0.0955366031392
+                    0.0384774808237 0.0339900371172 0.269953731943
+                    -0.00840298027825 0.111343110148 -0.478516829303
+                    -0.194897245834 0.10267289845 -0.466775718203
+                    -0.562716016463 0.555130824352 -0.189718138272
+                    0.527769032384 0.96902989054 0.971635226216
+                    -0.643591699032 0.237697115786 -0.088635076146
+                    0.730397711012</seq>
+            </row>
+            <row id="x4320584200" otu="x4320382160">
+                <seq>-0.0118368178975 0.225230677955 -0.347353229988
+                    0.456471027769 -0.848614844575 0.102604815869
+                    0.231309866463 0.613425888997 0.377891690763
+                    0.361723420326 -0.725696759409 0.0883025845172
+                    0.30997931667 0.337925167389 0.503496982204
+                    -0.240233990821 0.0116944281047 -1.01090941116
+                    -0.0853478393847 -0.615040593349 -0.208689696648
+                    0.484226818917 0.304303116853 -0.0820675956013
+                    0.917665281722 -0.337934506387 -0.789159060851
+                    -0.402680285447 0.0022496589233 0.383689182598
+                    0.420100018812 0.088292079371 0.133139375777
+                    -0.0691407253705 0.228315950253 -0.275735119404
+                    0.450240629434 -0.279575184063 -0.571257782142
+                    0.465639040811 -0.310883867769 0.456112638168
+                    -0.292964625292 0.0697211842851 -0.341232255109
+                    -0.0407012232878 -0.33426512328 0.313888661173
+                    -0.100515006735 -0.316223039892 -0.152757151355
+                    0.420560125377 0.692097704841 -0.216097896602
+                    -0.0412314000934 1.03421148878 -0.182248518303
+                    -0.914463229098 0.618091506047 0.803504596428
+                    -0.281312183327 -0.237168812725 -0.573010569625
+                    -0.112861059187 0.412074360584 -0.254001627382
+                    0.704524203297 0.68487359865 0.633722782338
+                    0.535006762794 -0.0427650074877 0.124392735796
+                    0.0563801080792 0.505927853415 -0.466557578906
+                    -0.565007394601 -0.508060395826 0.406002235214
+                    0.0879492831357 0.267080289007 -0.0767714277003
+                    0.514872641556 -0.016773983983 0.0488175182565
+                    -0.0308972648332 0.390998344354 -0.0177388139559
+                    -0.347051436981 -0.315593156045 -0.0456248612471
+                    -0.438685196737 0.51701220505 -0.349385064011
+                    0.101083581988 0.939233794672 1.18721532364
+                    -0.703897468839 0.230080840699 0.354754267331
+                    0.538874667488</seq>
+            </row>
+            <row id="x4320601464" otu="x4320382288">
+                <seq>0.185946506264 0.00642927440737 -0.127033261269
+                    0.540097670176 0.0337647774682 0.86606239485
+                    -0.105849114139 0.436699884017 -0.0817449380827
+                    0.973439429416 -0.596599692872 0.622830547558
+                    0.188706094171 0.659037082934 0.375572006656
+                    -0.225905144469 0.381922076429 -0.899976818488
+                    -0.592230000124 -1.0277939958 -0.466090452623
+                    1.08160984758 -0.407098612382 0.115373240668
+                    1.54285935318 -0.52678126617 -0.949321799621
+                    -0.0223121147886 0.277400185836 0.0140393541692
+                    0.390051595937 -0.0142075631641 -0.220196555285
+                    -0.192163782525 0.014805736095 -0.0103572278559
+                    0.692180631732 -0.475608841062 -0.860452436307
+                    1.00066369936 -0.276228498804 0.391629118412
+                    0.289921206266 0.0147190842736 -0.475879712993
+                    0.018726201059 -0.115953062969 0.293283030827
+                    0.150171937301 -0.417507683193 -0.678494496711
+                    0.39392539065 0.830142950913 0.895916115491
+                    0.14758405946 1.0899140657 -0.635354942677
+                    -0.945105840719 0.0884102702698 0.342363007181
+                    -0.0541768894194 -0.157335817645 -0.292165963918
+                    -0.696924877105 -0.0237747213563 -0.718707015995
+                    0.417144828812 0.483484691031 0.319895693763
+                    0.909705766445 -0.00278604177619 -0.21537265214
+                    -0.532221645805 0.419993894475 0.254528900322
+                    0.0944614070297 0.151039997738 0.243187846031
+                    -0.0427598431972 0.256089553962 0.395774348856
+                    0.627271782451 0.358308706755 0.395445276805
+                    -0.262974514624 -0.114777698135 0.124399438174
+                    0.0671197699281 0.00910296063067 0.136393386704
+                    -0.132240214673 0.326810264935 -0.243653774443
+                    0.129669694958 0.495289651928 0.380785930818
+                    -0.898984150368 -0.107958112608 0.175447164908
+                    1.23375357882</seq>
+            </row>
+            <row id="x4320622824" otu="x4320382352">
+                <seq>0.002839626096 -0.0328263599949 0.0905575850176
+                    0.938464323306 -0.588390068275 0.385837065655
+                    -0.117653164811 -0.30396992808 0.357221668991
+                    0.786070435176 -0.270028557837 0.517816175998
+                    0.391270956437 0.4581233086 -0.0776540063388
+                    -0.724580767277 0.134715444786 -0.728235075284
+                    -0.566230412462 -0.496551383005 -0.690782795655
+                    0.894558386875 -0.251637379203 0.80525912829
+                    0.338572741284 -0.71273078403 -0.910108289265
+                    -0.0676235294382 -0.510212084999 0.237139467524
+                    0.696984374136 0.275105570789 0.285754803216
+                    -0.441433201052 0.717113905584 -0.267193325301
+                    0.619142924842 -0.119242894085 -0.0770944935507
+                    0.618700102221 -0.0510658685731 0.0969628363322
+                    -0.185331422567 -0.083833517007 -0.197748662513
+                    -0.00442353884829 0.293991068453 0.0269033298924
+                    0.702444276249 0.136973910745 -0.730168239761
+                    -0.213862860674 0.504416572107 0.62520968089
+                    -0.780617845292 0.727491479457 -0.11999009269
+                    -0.479722568899 0.599483450116 0.4905435323
+                    -0.790861448615 0.385881977133 -0.738565051353
+                    0.570382970968 0.427334490404 -0.254355386963
+                    -0.0493328484502 0.76698829656 0.0557804045429
+                    0.921779268231 -0.672310743407 0.158268324824
+                    0.0996078665347 -0.422951548922 0.388246645951
+                    -0.35731766019 -0.486340123371 0.572851109129
+                    0.421441518192 0.542126047519 -0.145743286304
+                    -0.04188000321 0.270214139403 0.0337421945555
+                    0.628793366363 0.713611514292 -0.164887232621
+                    0.639771115406 0.0685767134889 0.880613363327
+                    -0.141588491903 -0.16397844831 -0.294362719097
+                    0.228099769721 0.818813889723 0.711154047779
+                    -0.368221110475 0.0167525037737 0.218216437097
+                    0.761216654399</seq>
+            </row>
+            <row id="x4320956584" otu="x4320395856">
+                <seq>-0.299320195268 -0.358303438788 0.511640822248
+                    0.530536964266 -0.0309734351761 0.15835056145
+                    -0.355109947942 -0.0835141607595 0.392928271761
+                    0.467622907582 -0.323824570071 0.188976557953
+                    -0.39195095503 0.412392289314 -0.1659140723
+                    -0.838345816352 0.209742732007 -1.03969211435
+                    0.011387753388 0.0364960818969 -0.730000382987
+                    0.78475541008 0.169154464792 -0.188015407769
+                    -0.770962674757 0.0330306294713 -0.741356388425
+                    -0.207571611872 0.316005715349 0.199908170269
+                    -0.162916997986 -0.190856654707 -0.484220442422
+                    -0.496430342968 -0.399485570033 -0.929774667605
+                    0.846870761565 0.34952205906 -1.23185519198
+                    0.138806205071 -0.061284945202 0.248910085933
+                    -0.227214723075 0.394532144243 -0.148552367373
+                    0.234975198754 -0.47939165717 -0.42893277515
+                    -0.194286079903 0.13714739209 -0.457071560877
+                    -0.262502770001 0.521116078108 0.0956334871956
+                    0.654242620815 0.51450042626 0.18789858702
+                    0.221381246908 0.632180711502 0.624679555762
+                    0.27467235892 -0.72050800995 0.236956708786
+                    -0.168563527634 -0.200864958663 -0.568040851103
+                    0.507273918732 0.645520560672 0.0786999897149
+                    0.162509421898 0.255530159271 0.260078850788
+                    0.177419953615 -0.0141026324552 -0.0636720476503
+                    0.0254338743964 0.235100719616 -0.311213582559
+                    0.73168830757 0.210713932984 0.681706845867
+                    0.0598293270463 -0.0616968436911 0.410157316899
+                    -0.0103146402922 0.677200566547 -0.281765269035
+                    0.11507502909 0.265209913983 0.390383963678
+                    -0.154737824965 -0.102318752524 -0.271719146005
+                    -0.11049465485 0.0405646667745 0.461733308208
+                    0.335642227885 0.351908234462 0.560669951535
+                    0.270452615091</seq>
+            </row>
+            <row id="x4320640088" otu="x4320382480">
+                <seq>-0.0891360076536 0.4772211193 0.285796191482
+                    0.762346581487 -0.678493513199 0.069435740707
+                    -0.511716174576 -0.156369352754 1.38052606173
+                    -0.253712007287 -0.653794225491 -0.082514530813
+                    -0.0623851897937 0.609710646239 -0.485067931506
+                    -0.743964215281 0.32695513798 -0.905720707349
+                    -0.377600184308 -0.973518114615 -0.377944048981
+                    0.679591080565 -0.35670190453 0.177452817659
+                    0.0505357653972 -0.831850378381 -0.646387754094
+                    -0.368752321775 0.084287978497 0.5552718043
+                    0.733136747225 0.242491809191 -0.0674265936471
+                    -0.0367442890343 0.635109207275 -0.568073405915
+                    0.295570306335 0.370209611888 -0.710172699521
+                    0.661269612635 -0.251748260718 -0.187489180526
+                    -0.529719046314 -0.127629413876 0.157733806493
+                    -0.224539088697 0.0960441200802 0.0652831796293
+                    0.172024146433 -0.320588520354 -0.468343844276
+                    0.181772874347 0.635491220645 1.16541204168
+                    -0.517718545746 0.770289991658 0.123782602561
+                    -0.286454167662 0.201238894399 1.25370326528
+                    -0.284844137127 0.180354817146 -0.629635062359
+                    0.180574436779 0.580434359916 -0.543006471187
+                    0.638368291721 0.714077701547 0.275855368786
+                    0.300673452095 -0.044238649058 0.290992833197
+                    -0.281471526729 -0.268242600107 -0.551997640677
+                    -0.445995981222 -0.184391134146 0.429685090744
+                    0.549540493912 0.280424211975 0.704797125905
+                    -0.165636299315 0.836011334797 0.0859330696468
+                    -0.0981730764467 0.63644879965 -0.336044369001
+                    0.244370764781 -0.000215723270168 0.663265691432
+                    -0.195221890578 -0.0939075798281 -0.292198602952
+                    0.37880435685 0.734743000033 0.189112136805
+                    -0.258324235632 -0.269874443945 0.00799953866973
+                    0.351686394562</seq>
+            </row>
+            <row id="x4320661448" otu="x4320382544">
+                <seq>-0.406487195211 0.236755350917 0.0861938792995
+                    1.20988488109 -0.509820421375 -0.112814912464
+                    -0.340033911792 -0.308982496289 0.152822285153
+                    0.524103905227 -0.856669473628 0.29026441263
+                    0.516655868243 0.709046720532 -0.0531052995917
+                    -0.613753361079 0.0509792740828 -0.740256525137
+                    -0.555055588795 -0.373188872464 -0.750125336118
+                    0.819807369496 0.35655587741 0.881306190583
+                    0.944596613891 -1.01153089916 -0.842140817531
+                    -0.378863354278 -0.278241210598 0.504907845778
+                    0.25614867785 0.238630605257 0.239364690368
+                    -0.236191287951 0.472311158906 -0.86502162047
+                    0.21029997921 -0.141912832052 -0.428798187228
+                    0.0369937879665 -0.385428875463 -0.0639900209381
+                    0.171417327564 0.0856962080659 -0.127282039294
+                    -0.617864217262 0.411167745221 0.324490540631
+                    0.312986936757 0.152393356607 -0.436936968257
+                    -0.0745746578534 0.431162900207 0.923262602062
+                    -0.469095640713 0.787683258727 -0.121107423328
+                    -0.171210011174 0.402629764154 0.391693933419
+                    -0.396709717407 0.397854467385 -0.850838680759
+                    0.112108814498 0.225963111696 -0.418938388809
+                    0.635453774124 0.923422543074 0.257486925352
+                    0.89901588653 -0.204306816687 0.0474290776877
+                    -0.0276887205971 -0.403746453856 0.0702165900037
+                    -0.172961448009 -0.0464045757577 0.274577259423
+                    -0.299074775793 0.295613146714 0.299843812148
+                    0.109838047058 0.373550133626 -0.217258739263
+                    0.325757292527 0.188872667685 -0.334390880188
+                    0.306664146363 -0.31516124799 0.675813595306
+                    -0.285496653732 -0.479962884152 -0.273281320804
+                    0.593093440465 0.52545771495 0.553451765076
+                    -0.114391966733 -0.418941706914 0.678524737439
+                    0.868452036667</seq>
+            </row>
+            <row id="x4320686952" otu="x4320382608">
+                <seq>-0.310625400627 0.0848496299315 0.227877771497
+                    0.942954976636 -0.923267844392 -0.0473324563073
+                    -0.48709756555 -0.0998301549562 0.1525278735
+                    0.623575542879 -0.940279990829 0.328988062893
+                    0.558615831493 0.572999931496 0.0191416964326
+                    -0.550283273367 -0.0906410346764 -0.881230510588
+                    -0.375908361046 -0.610681005944 -0.741664452767
+                    0.827219915526 0.052994307702 0.736928638301
+                    0.94032403495 -0.902546715435 -0.844585652044
+                    -0.810346553477 0.233771373744 0.799356202575
+                    0.295169553541 0.575147308037 0.120697958737
+                    -0.284581682654 0.352888622401 -0.686108766853
+                    0.606098812048 0.00627428530955 -0.535609413969
+                    0.273030783077 -0.206142970858 0.149578613279
+                    0.0187212095597 0.186176851239 -0.385624416286
+                    -0.400569868614 0.241748902178 0.0763706141994
+                    0.0365692081247 0.200148163116 -0.320531468594
+                    -0.0118421059573 0.298050742552 0.844598268033
+                    -0.157636974793 0.723906988422 0.0747525831758
+                    -0.0103959137225 0.304822445407 0.631129070343
+                    -0.488612022914 0.520031596034 -0.984286833685
+                    0.19483231896 0.374496392455 -0.343805237894
+                    0.452610945567 0.762566576221 0.247356604128
+                    0.937527598051 -0.264727623938 -0.0883993040012
+                    -0.156932464456 -0.492141434652 -0.275927168811
+                    -0.505306580801 0.17586533959 0.495759169989
+                    -0.456278094911 0.294009995671 0.539741879821
+                    -0.0993213213017 0.158848589605 -0.124402256496
+                    0.198417857997 0.150181880046 0.0424946856462
+                    0.138077386201 -0.337543071596 0.621347849732
+                    -0.130240811915 -0.200438021262 -0.211913886973
+                    0.259187587455 0.455040897435 0.356763993248
+                    0.00502234326952 -0.147742970823 0.800545064377
+                    0.608414267971</seq>
+            </row>
+            <row id="x4320704216" otu="x4320382672">
+                <seq>0.37859004754 0.215674776836 0.138829612654
+                    -0.055765747572 -0.411400349544 0.339656053058
+                    -0.563607482645 0.190018972693 0.921141590664
+                    0.294394995601 0.0364938461257 0.236868464888
+                    0.0830375631168 0.694877244946 -0.3606070318
+                    -0.688934473231 -0.00485553411322 -0.549319606284
+                    -0.42794767503 -0.946535348987 -0.399130643933
+                    0.88532173285 -0.505379143384 0.633124119395
+                    0.632188042932 -0.153993321707 -0.986327475006
+                    0.349826325193 -0.10609477246 0.359128821361
+                    0.0293203030347 -0.0678019875437 -0.285636695589
+                    -0.515079369442 0.124661302631 -0.430372417229
+                    0.452025613214 0.496979092448 -1.35868502199
+                    0.824441641023 -0.041235792691 0.00697870870537
+                    0.0942135291973 -0.0935020271213 -0.057978163071
+                    0.101419091885 0.317431940036 0.280800496383
+                    0.0606407350644 0.0115767872651 -0.378465601211
+                    0.102209733127 0.74501175453 0.698636979819
+                    -0.404050952093 0.330402685391 -0.14559118084
+                    -0.360934732186 1.18045538927 0.691416438894
+                    -0.146838798121 0.724093390558 -1.06270283722
+                    -0.519876338216 -0.327496989429 -0.797830861929
+                    0.635468089959 0.0693070383116 0.112775075177
+                    0.423638669025 -0.350671760221 0.561134921723
+                    -0.079984137623 -0.54788251851 -0.212555879953
+                    0.0486493967007 -0.47093702296 1.06221342907
+                    0.644215035096 0.549454797242 0.561847545072
+                    0.0130081398468 0.377349557561 -0.164809170541
+                    0.32865527267 0.518951110801 -0.620156940724
+                    -0.543739244756 0.185947994631 0.839411587573
+                    -0.73525257817 0.257381629407 -0.320763210661
+                    -0.0194722750024 0.740698656938 0.0718824931227
+                    -0.201513000636 0.0796775139523 0.0932165990511
+                    0.285931294416</seq>
+            </row>
+            <row id="x4320725576" otu="x4320382736">
+                <seq>0.111171434472 -0.14480593458 0.349768236741
+                    0.423293994934 -0.310033183749 0.731547505464
+                    -0.599506577769 0.0674869198094 0.554917946027
+                    0.496870051175 -0.53697040357 0.155101127932
+                    0.115213467207 0.2773715851 -0.00609790646294
+                    -0.636265076836 0.0916060079509 -0.981290211546
+                    -0.578021680991 -0.200978145724 -0.0919589067032
+                    1.05567650482 -0.366800455965 0.188638635598
+                    0.361518340721 -0.238802116475 -0.796303362591
+                    0.28541224101 0.0760226927502 0.636519573183
+                    0.306418074531 0.227707523795 -0.0846340246832
+                    -0.502723040285 0.200143509123 -0.267223445221
+                    0.718403010184 -0.265356402358 -0.698631136844
+                    1.00054992982 -0.293453679609 0.532256400118
+                    0.20699237603 -0.0365454581451 -0.512861207702
+                    0.479850117167 0.188831498824 0.515772273604
+                    -0.0248157711932 0.00746748760196 -0.229580295453
+                    -0.0698137552034 0.19258124657 0.668784848151
+                    -0.277421347065 0.690377356365 0.0232796995408
+                    -0.630559573173 0.77593116485 0.743718858268
+                    0.0880845057812 0.741707374047 -0.596143981141
+                    -0.0553575416064 0.243855795789 -0.62110647407
+                    0.307630306062 0.0410002536015 0.479513619965
+                    0.277630129482 -0.208148541863 0.250109927582
+                    -0.244278777025 -0.606033733596 0.414923701389
+                    -0.596632042554 -0.608246292655 0.361780546256
+                    0.321971025776 0.208388158786 0.212131041972
+                    0.276666300328 0.167032195138 0.209131594992
+                    0.37433926773 0.588946891469 -0.118623925206
+                    0.275772357498 -0.0183334045606 0.147931668138
+                    -0.0182160097664 -0.15758177157 -0.537332695036
+                    0.0954114645455 0.544467322609 0.0702562528174
+                    -0.1983244673 0.0256630311517 0.595313450167
+                    0.694698385529</seq>
+            </row>
+            <row id="x4320742840" otu="x4320382800">
+                <seq>-0.195641843528 0.180860627838 0.419261068791
+                    0.716413666136 -0.305549984522 0.684372912299
+                    -0.665271669478 -0.210273726367 0.794888494136
+                    0.502050656056 -0.458429905219 0.221051491841
+                    0.256237231582 0.257814808128 0.296549089526
+                    -0.737641564182 0.454276520057 -1.06017210204
+                    -1.20667570598 -0.647822932029 0.0568030251477
+                    0.813877960728 -0.336552457745 0.313077685177
+                    0.247694156157 -0.352868286788 -1.12659084581
+                    0.15591795923 0.0107558898858 0.00392608255147
+                    0.32121357248 -0.0760586831881 -0.346658497556
+                    -0.753261253525 0.16144778804 -0.308588573422
+                    0.814200395852 0.406216467851 -0.722754767646
+                    0.57181805649 -0.311309604832 0.115528355918
+                    0.173238863572 0.117743950523 -0.439431316248
+                    -0.226545486531 0.62334757461 0.537725746718
+                    -0.0898018477248 -0.211774539749 -0.528912282613
+                    -0.114502837557 0.294517481962 0.214429920003
+                    -0.444166468842 0.766985923534 -0.373167173695
+                    -0.95505899905 0.755200726503 0.912104537176
+                    0.154063301343 0.704856090546 -0.152716539517
+                    -0.0562039453002 0.947084552326 -0.660806679643
+                    0.818345329238 0.182224038444 0.0612617258315
+                    0.929494773135 -0.143168708456 0.664153449367
+                    -0.165345986924 -0.383301896758 -0.105099583359
+                    -0.440427478715 -0.00155839040015 0.988525572494
+                    0.161159200897 0.535665513335 0.3741846922
+                    0.307791506237 -0.129081964351 0.0165436428408
+                    0.0947979770259 0.994960044471 0.0587626058205
+                    0.369296547289 -0.47154542017 0.0944428561711
+                    0.140670395755 -0.25742629624 -0.277802313972
+                    0.188778095102 0.419947404598 -0.213897484569
+                    0.222014848223 0.147699412816 0.372419965221
+                    0.843474410618</seq>
+            </row>
+            <row id="x4320973848" otu="x4320395920">
+                <seq>0.360538255562 -0.399655759393 0.527640899537
+                    0.445308614141 -0.173118337943 0.353202702799
+                    0.141078549293 -0.366550103526 0.666718915016
+                    0.766334717445 -0.645222587758 0.230938562878
+                    0.146757686253 0.430725843692 -0.315565295532
+                    -0.667182511662 -0.378508576682 -0.475670805344
+                    -0.308660910636 -0.193991545341 -0.669667257128
+                    0.675068555945 0.239162031497 0.183790138032
+                    -0.484318823791 -0.832559180796 -0.506566635857
+                    -0.055834541236 0.357971941795 0.305675557653
+                    0.405714049209 -0.70761121178 -0.260812903706
+                    -0.276454846746 -0.383901116457 -0.890148616943
+                    0.289652420085 0.146137294938 -0.703063290615
+                    -0.084815172653 0.149437579353 0.0582842269178
+                    -0.478541846881 -0.417266378081 -0.0209530260007
+                    3.93222907014e-09 -0.773081037803 0.244610367493
+                    0.439652342738 0.0153723415468 -0.720875774562
+                    -0.0431366237333 0.325662669357 0.25495165813
+                    0.430815792647 0.526796183294 -0.176700914143
+                    0.194850160538 0.333138926482 0.452924128967
+                    -0.281384004378 -0.368780324 -0.0507018334708
+                    0.0218815036213 0.374410591537 -0.804372900387
+                    0.0378004606798 0.60165879748 0.332456004874
+                    0.297630226543 0.433107036184 0.90742475244
+                    0.0281428489702 -0.0680352873675 0.318782168052
+                    0.0106051681819 -0.238598287131 0.157194494465
+                    1.07703538156 0.465955523866 1.33980842313
+                    0.00865534304106 0.254968338224 0.207751777316
+                    0.371757077645 0.402517716718 -0.103332718248
+                    0.261844668518 0.0854768121246 0.193375951681
+                    0.0729174265161 0.266964189605 0.133530201989
+                    -0.178386631158 0.236059806698 0.0553815581514
+                    -0.230535241938 0.891577915955 0.313194449126
+                    -0.054601310321</seq>
+            </row>
+            <row id="x4320768344" otu="x4320382864">
+                <seq>-0.333349760316 0.937999681209 0.190416841942
+                    0.633837526505 0.290311325173 0.341723812225
+                    0.45252819042 0.425907653485 -0.222188023458
+                    0.72341162552 -0.198115059243 0.0486237497875
+                    0.392923524257 0.953772343485 0.241427801062
+                    -0.758469925674 0.0186965927653 -0.0853828282173
+                    0.250860100669 -0.0512916615583 -0.334195832886
+                    0.613714374891 -0.933545403872 0.651373521572
+                    0.409776039662 -0.659606444172 0.26172193964
+                    -0.0917732596063 -0.33267483085 0.412940652279
+                    0.309429964886 0.557907367203 0.145576783843
+                    -0.305849788314 0.454780767616 -0.416187803246
+                    0.0751229573712 0.197157127139 -0.964492347627
+                    0.413876106787 -0.465214852098 0.277552914671
+                    0.320882648144 -0.0375747149239 -0.253494684642
+                    -0.200623853352 -0.26186605583 0.619014740548
+                    0.311089342833 -0.208554320153 -0.607873455512
+                    0.42404199286 0.609935166738 -0.151018610788
+                    -0.203616629588 0.886014721768 -0.465249247312
+                    -0.716717522615 0.373079559369 0.749834104327
+                    -0.688533050329 -0.0432921582281 -0.622593417169
+                    0.0890962584083 -0.13474707326 -0.501894840413
+                    -0.0357460357622 -0.113716625785 0.557279941864
+                    0.255985368953 -0.0791952002678 0.129456471255
+                    -0.209250591871 0.192257478518 -0.149748180833
+                    -0.0310389923151 -0.149252528097 0.868699360752
+                    -0.198516933799 0.342389606078 0.525031585957
+                    -0.173721312389 0.106448164786 0.112931518974
+                    -0.136646104963 0.521081701104 -0.540620884262
+                    0.659762629122 0.15869520249 0.272776914354
+                    0.171029934146 0.102787985203 0.258366078606
+                    0.331066382208 0.122834013115 0.183847466985
+                    -0.00118580075695 -0.376858162282 0.30070882239
+                    0.183057034665</seq>
+            </row>
+            <row id="x4320789704" otu="x4320382928">
+                <seq>0.11615122835 0.457857813617 0.674789492308
+                    0.437907751253 -0.0889010327804 0.573852357781
+                    -0.336788309082 0.595286711623 0.429187110814
+                    0.539235151514 0.0285985078064 0.569747461907
+                    0.297323675873 0.056330768672 0.686024235858
+                    -0.669459607587 -0.499477990246 -0.184948903876
+                    -0.166401854301 -0.797706719421 -0.297720767212
+                    0.555376944191 -0.578254090407 0.961245337047
+                    0.540629167825 -0.614568965938 -0.380923431907
+                    0.0960559356897 0.410306141421 0.86223535738
+                    -0.119692123228 -0.0628686568976 -0.363864747399
+                    -0.215274585288 -0.100237206075 -0.456641298039
+                    0.849049249444 -0.384919610574 -0.820864292219
+                    0.763927997888 -0.645403429483 0.251939293164
+                    -0.115243480515 -0.190227614941 -0.50923898634
+                    0.180972516996 0.346905530495 0.235701058854
+                    0.410071963046 -0.120962546754 -0.585064292567
+                    -0.371385848654 0.341012147976 0.764209154841
+                    0.0408825775264 0.440090282763 -0.314246791982
+                    -0.533085258673 0.548059434658 0.194729068304
+                    -0.59679730126 0.399034616025 -0.342222816753
+                    0.0650985539801 0.204807216093 -0.0356643610494
+                    0.415398084903 0.113169662184 0.630033559301
+                    0.343073911215 -0.149100926089 0.112398484425
+                    0.262782208415 0.153241591554 0.0901231653197
+                    -0.202133059848 -0.341056538803 0.191445614577
+                    -0.162802994956 0.671686805868 0.23669471253
+                    -0.200052099773 0.368376809743 0.116352215707
+                    -0.0860409806397 0.593296065966 -0.344354977998
+                    0.866641102459 0.193096154639 0.104064707399
+                    -0.279560207285 -0.0301239713628 0.613158315193
+                    0.232356963241 -0.182325029587 -0.158361390186
+                    -0.465341768276 0.0851215900622 0.689299700081
+                    -0.0648517325651</seq>
+            </row>
+        </matrix>
+    </characters>
+</nex:nexml>
diff --git a/doc/source/examples/pythonidae_cytb.fasta b/doc/source/examples/pythonidae_cytb.fasta
new file mode 100644
index 0000000..a0c5414
--- /dev/null
+++ b/doc/source/examples/pythonidae_cytb.fasta
@@ -0,0 +1,99 @@
+>Xenopeltis unicolor
+???????????????????????????????????????????????????????AAACT--AAAATTCCCATTT-CCCACATATAT----GGATATT--ACGAAG--AAAAAA---GGACTAAAAAAAG---TCCTCTCTCGACCCCCCCCCTACCCCCCCCC-ACAGTT---AGTACGGGT------TTTCC-ATATATGTAACTCTTATAGATTTGCCTATCAAGGC-ATACTATGTATAATCATACATTAATGGCTTGCCCCATGAATATTAAACAGGAATTTCCCTTTAAATATTTTAGCCTAAAAAAGCCTTCGTACAGAACTTTAATA----CCACATTTCT-CAGTCGTTCAATGAAGCACGGAT-ATAGTA--TTGTT-GATAACCATGACTATCC--ACATCCAACTTGTCTTACAGGATCTTGCTA-TTCACGTGAAATCCTCTATCCTTTCATAGCAGGCATACCATTCGACTTCTCACG [...]
+
+>Loxocemus bicolor
+???????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????? [...]
+
+>Morelia spilota
+?GCCACACCCCTCACTTCCTCC-------------------CAACCATAGTCTGTAA-TTTACAGACTATGGT--CCATGCCTTAATATA-AAGCCAAAAATCCATATAATTTACCACAAAATAAAG-----CTCTCTC-TCGGCCCCCCCCCTACCCCCCCCC---AARAA-CATTGGGGAR------ACCGGCACACAAAACCA--TTARAAAACTCTTAACAAACCT--CTCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAGCAGGAATTTCCCTTTTATTATTTTAGTCTAAAATGGCCTTTGTACAAAATATTCTG----TCCTCATTCTCTTGGTCGTTCTATGCAGCACGAGTT--AACTA-ATCTT-ATTAATCATGGATATTC-TCAAC-CTAAGGGTGTCTCTTAGTCTAGCG-CTTCCCGTGAAATCCTCTATCCTTCCATAGAATGCTAACCATTCGACTTCTCACG [...]
+
+>Morelia bredli
+?GCCAC-CCCCTCACTTCCT---------------------TAACCATAGTCTGTAA-TTTACAGACTATGGT--CCATGCCTTAATATA-AAACCAAAAATCCATATAATTTACCACAAAATAAAG-----YTYTYTY-TYGGCCCCCCCCCTACMCCCCCCC--AAAGAA-CATTGGGAAA------ACCGGCACACAAAACTA--TTAGAAAACTCTTAACAAACCC--CTCTATGTATAATCTTACATTAATGGTTTGCCTCATGAATATTAAGCAGGAATTTCCCTTTTATTATTTTAGTCTAAAATGGCCTTTGTACAAAACATTCCG----TCCTCATTCTCCTGGTCGTTCTATGCAGCATGAGTT--AACCA-ATCTT-ATTGATCATGGATATTC-TTAAC-CTAAGGGTGTCTCTTATCCTAGCA-CTTCCCGTGAAATCCTCTATCCTTCCATAGAATGCTAACCATTCGACTTCTCACG [...]
+
+>Morelia carinata
+?GCCACAACCCTCACTTCCT--------------------AAAACCATAGTCTGTAAA--TACAGACTATGGT--TCTTACCTCAATATA-AAGCCAAAAACCCATATAAAACGC-ACACAATAAAACG---CTCTC-C-TCGGCCCCCCCCCTACCCCCCCC--ATAATAAACATAGGAGAA------ATCAGCACACAAAACTA--CTGAAGATACCCCCTCATCTCT--CCCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAGCAGGAATTTCCCTTCAAATATTTTAGTCTAAATAAGCCTTCGTACAGAATATTTAG----TCCTCATTTTC-TGGTCGTTCAATGCAACACGGATT--AATGG-ATCTT-ACTAACCATGGCTATCC-TTGAT-CAAGKGGKGTCTYTTAATCTAGTA-CTTCCCGTGAAACCCTCTATCCTTCCATAGAATGCTAACCATTCGACTTYTCACG [...]
+
+>Morelia amethistina
+?ACCACACCCCTCACTTCCTC--------------------CAACCATAGTCTGTAA-TTTACAGACTATGGT--CCATGCCTTAATATA-AAGCCAAAAATCCATATAATTTACCACAAAATAAAG-----CTCTCTC-TCGGCCCCCCCCCTACCCCCCCCC--AAAAAAACATTGGGGAA------ACCGGCACACAAAACCA--TTAAAAAACTCTTAACAAACCT--CTCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAGCAGGAATTTCCCTTTTATTATTTTAGTCTAAAATGGCCTTTGTACAAAATATTCTG----TCCTCATTTTCTTGGTCGTTCTATGCAGCATGAGCT--AACTA-ATCTT-ATTAATCATGGATATTC-TTAAC-CTARGGGTGTCTCTTAGTCTAGCG-CTTCCCGTGAAATCCTCTATCCTTCCATAGAATGCTAACCATTCGACTTCTCACG [...]
+
+>Morelia oenpelliensis
+?GCCAC-MCCCTCACTTCCT---------------------TAACCATAGTCTGTAA-TTTACAGACTATGGT--CCATGCCTTAATATA-AAACCAGAAATCCATATAATTTACCACCAAATAAAG-----YTYTYTY-TYGGCCCCCCCCCTACCCCCCCCC--AAAGAA-CATTGGGAGA------ACCGGCACACAAAATTA--TTAGAAGACTTTTAACATACCC--CTCTATGGATAATTTTACATTAATGGTTTGCCTCATGRATATTAAGCAGGGAWTTCCCTTTTATTATTTTAGTCTAAAACGGCCCTTGTACCAGACATTCCG----TCCTCATTCTCCTGGTCGTTCTATGCAGCATGAGTT--AACCA-ATCTT-ATTGATCATGGATATTCCTTGAC-CTAAGGGTGTSTCTTATCCTAGCA-CTTCCCGTGAAATCCT-TATCCTTCCATAGAATGCTAACCATTCGACTTCTCACG [...]
+
+>Morelia boeleni
+??AAACAACCCTCACTTCCTT--------------------CAACCATAGTCTGGA---TTCCAGACTATGGT--TGTTACCTAAAAAACTAAAGAAAAAATCCATATAAAC----------TAAAAA----CTCTCTC-TCGGCCCCCCCCCTACMCCCCCC---GGGTCAGCACAAAAAC-------ATCAC---------------CCAAAAATCCCCCTTTTT-CC--CCCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATAATAAGCAGGAATTTCCCTTTAAATATTTTAGTCTAAAATAGCCTTT-TACATAAAATTATG----TCCTCATTTCT-TGGTCGTTCAATGCAGCACGGATT--AATAT-ATCTT-ATTGATCATGGATATCC-TTGGT-CTAATGGTGTCTCTTAGTCTAACA-CTTCCCGTGAAATCCTCTATCCTTCCATAGAATGCTAACCATTCGACTTCTCACG [...]
+
+>Morelia viridisS
+?A-ATCAACCCTCACTTCCTCC-------------------TAGCCATAGTCTGTAAG-TTACAGACTATGGCT--CATGCCTTAATATATAAACCAAAAACCCATATAAT-CACTGAACAATAAAA-----CTYTYTYCTCGGCCCCCCCCCTACCCCCCCC---GGAAAACCATAAAA---------ATCAGCACATAAATAAA--CCTACTAATCCCATTGCTTCCT---CCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAGCAGGAATTTCCCTTCAAATATTTTAGCCTAAATTAGCTTCCGTACAAAATATCTAG----CCCTCATTTTC-TGGTCGTTCAATGCAATCGGGGTT--AATAA-ATCTT-ACTAACCATGGATATCC-TTGAT-CAGGTGGTGTCTCTTAATTTAGTA-CTTCCCGTGAAATCCTCTATCCTTCCATAGAATGCTAACCATTCGACTTCTCACG [...]
+
+>Morelia viridisN
+?A-CTCAACCCTCACTTCCTTC-------------------CAGCCATAGTCTGTAAA-TTACAGGCTATGGCT--CATACCTTGATATATAAACCAAAAACCCATATAATTCACCACACAACAAAA-----CTCTCTCCTCGGCCCCCCCC-TACCCCCCCCC--GGAAAAACATAGAAGAA------GTCAGCACAATTAAACT--TACTGATAACCCCTTGCTTCCT--CCCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAGCAGGAATTTCCCTTCAAATATTTTAGCCTAAAATAGCCTTTGTACAAAATACCTTG----TCCTCATTTTC-TGGTCGTYCAATGCAATCGGGTCT--AACAA-ATCTT-ACTAACCATGGATATCC-TTGAT-CAAGTTGTGTCTCTTAATCTAGTAACTTCCCGTGAAATCCTCTATCCT-CCATAGAATGCTAACCATTCGACTTCTCACG [...]
+
+>Liasis olivaceus
+?GCCACAACCCTCACTTCCCC-----------------ACCTAACCATAGTCTGTAAA-TTACAGACTATGGT--TGATACCTTAATACA-AAGCCGAAACCCCATATAAACAGCACCACAACAAAA----CTCTACTC-TCGGCCCCCCCCCTACMCCCCCCC--ACAAAAACATAGGARAA------ATCAGCACAAACAATC---MCCTAAAATCCCCCCTTAACCC--CCCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAGCAGGAATTCCCCTTTAAATATTTTAGTCTGAATTAGCCCTTGTACAAAAAATCTTG----TCCTCATTTTC-TGGTCGTTCAATGCAGCACGGATT--AATAG-ATCTT-ATTAACCATGGCTATCC-TTGAT-CTAGTGGTGTCCCATGATCTAGTA-CTTCCCGTGAAATCCTCTATCCTTCCATTGAATGCTAACCATTCGACTTCTCACG [...]
+
+>Liasis mackloti
+?ACCACAACCCTCACTTCCTT--------------------CAGCCATAGTCTGTAA-TTTACAGGCTATGGC--TGATACCTTAATATA-AAACCAAAATCCCATATAAATACCACCACAACAAAG-----CTCTCTC-TCGGCCCCCCCCCTACMCCCCCCC--ACCAAAACATAGAARAA------ATCAGCACAATAAATA---CTARAAGTATTTGCTTCCTTCC--CCCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTTAGCAGGAATTTCCCTTTAAATATTTTAGCCTAAAATAGCCTTTGTACACAAAACTATG----TCCTCATTTCT-TGGTCGTTCAATGCAGCACGGATT--AATAG-ATTTT-AATAACCATGACTATCC-TTGAT-CTAGTGGTGTCCCATGATTTAGTA-CTTCCCGTGAAATCCTCTATCCTTCCATTGAATGCTAACCATTCGACTTCTCACG [...]
+
+>Liasis fuscus
+?ACCACAACCCTCACTTCCTC--------------------CAGCCATAGTCTGTAA-TTTACAGGCTATGGC--TGATACCTTAATATA-AAACCAAAATCCCATATAAATACCACCACAACAAAG-----CTCTCTY-TCGGCCCCCCCCCTACCCCCCCCC--ACCAAAACATAGAAGAA------ATCAGCACA-AAATAACA-CTAGAAGTATTACTTCCTTGCC--CCCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTTAGCAGGAATTTCCCTTCAAATATTTTAGCCTAAAATAGCCTTCATACATAAAATTATG----TCCTCATTTCT-TGGTCGTTCAATGCAGCACGGATT--AATGG-ATTTT-AATAACCATGACTATCC-TTGAT-CTAGTGGTGTCCCATGATTTAGTA-CTTCCCGTGAAATCCTCTATCCTTCCATTGAATGCTAACCATTCGACTTCTCACG [...]
+
+>Liasis albertisii
+????GCTCCTCTCACTTCCTC--------------------AGACCACAGTCTGCAA---TGCAGACTGTGGTTTTGTGCCCAGAATATA--AACCAAAAAACCATATAAACAACACCRCGACAAAAAAGA--TCTCTC-TCGGCCCCCCCCMTACMCCCCCCC--AAAAAAACATARAGGAA------ATCAG--------------------TTCATARACT--------CTCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAGCAGGAATTCCCCTTTAAATATTTTAATCTAAATTAGCCTTCGTACACAAAATTCAG----TCCTCATTCTC-TGGTCGTTCAATGCAGCACGGATT--AATCA-GTCTT-ACTAACCATGGATATCC-TTGAT-CTAGTCGTCTCTCTTAGTCTAACA-CTTCCCGTGAAACCCTCTATCCTTCCACTGAATGCTAACCATTCGACTTCTCACG [...]
+
+>Apodora papuana
+?GCCACAACCCTCAMTTCCTT--------------------CAGCCACAGTYTGTAA-TTTACAGACTGTGGC--CCATGCCTCAATATA-AAGCCGAAAATCCATATAAATAACACCAAAACAAAG-----CTYTCCC-TYGGCCCCCCCCCTACCCCCCCCC--AAAAAAATATAGAGAA------CTATAGAACAAATAACCA---CCAAGAAGTTCACTATCCCCC--TCCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAGCCGGAATTTCCCTTTAAATATTTTAGTCTAAATATGCCCTTGTACACAAAATTCAG----TCCTCATTTCT-TGGTCGTTCAATGCAGCCAGGAAT--AATCA-ATCTT-ATTAACCATGGATATCC-TTGAT-CTAGTGGTGTCTCTTGGTCTAGTA-CTTCCCGTGAAATCCTCTATCCTTCCATTGAATGCTAACCATTCGACTTCTCACG [...]
+
+>Bothrochilus boa
+??GCACCGCCCTCACTTCCTC--------------------CGACCGCAGTCTGCC----AGCAGGCTGCGGTC-GCATGCCCAAAAACACAAACCAAAAAACCATATAAACAACGCCGCAACAAAAGG----YCYCYC-TCGGCCCCCCCCCTAC-CCCCCCC-ACAAAAAACATAGAGAAA------ATCAG--------------------TTTTCACAC---------CCCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAGCAGGAATTTCCCTTCAAATATTTTAATCTAAAATAGCCTTTGTACATAAAATTTGC----CCCTCATTTCT-TGGTCGTTCAATGCAGCATGGATT--AATCA-GTCTT-ATTAACCATGGATATTC-TCAGT-CTAGTTGTGTCTCTTAGCCTAACA-CTTCCCGTGAAATCCTCTATCCTTCCACTGAATGCTAACCATTCGACTTCTCACG [...]
+
+>Antaresia maculosa
+?ACCACAACCCTCACTTCCTCC--------------------AGCCATAGTCTGTAAATTTACAGACTATGGC--TGATACCTCAACATA-CAGCCAAAATTCCATATAATAT-CCCCACAACAA-----CTYTYTYTCYTYGGCCCCCCCCCTACCCCCCCC---ATCCAAATATATAAGAA------ATCAGCACAATAAACCT--ACTAGGAATTGCCAATAACTCC--CCCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAGCAGGAATTCCCCTTCAAATATTTTAGTCTAAAATAGCCTTTGTACAGAATATTTAG----TCCTCATTTCT-TGGTCGTTCAATGCAACACGGATT---ATCAGTTCTT-ACTAACCATGGATATCC-TTGAT-CTAGTGGTGTCTCTTAATCTAGTA-CTTCCCGTGAAATCCTCTATCCTTCCATAGAATGCTAACCATTCGACTTCTCACG [...]
+
+>Antaresia stimsoni
+?ACCACAACCCTCAMTTCCTTTCAGCCATAGTCTGTAAATACAGCCATAGTCTGTAAA--TACAGACTATGGC--TGATACCGCCATATA-GAGCCGAAAACCCATATAATATGCCACACAATAAA------CTYTYTCCTYGGCCCCCCCCCTACCCCCCCCC--ATTAAAACATATGGGAA------AACAGCACAAATACATA--TTAAAGAATGTCCAATTAATCC--TCCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAGCAGGAATTCCCCTTTAAATATTTTAGCCTAAAATGTCCTTCGTACAGAATATTAAG----TCCTCATTTTC-TGGTCGTTCAATGCAATCAGGATT--AATCA-TTCTT-ACTAACCATGGCTATCC-TTGAT-CTAGTGGTGTCCCTTAATTTAGTA-CTTCCCGTGAAATCCTCTATCCTTCCATAGAATGCTAACCATTCGACTTCTCACG [...]
+
+>Antaresia childreni
+?GCCACAACCCTCACTTCCTTCCAGCCATAGTCTGTAAATACAGCCATAGTCTGTAAA--TACAGACTATGGC--TGATGCCGCCATATA-GAGCCGAAAAACCATATAATATACCACACAATAAA------CTCTCTCCTCGGCCCCCCCCCTACCCCCCCCC--ATTAAAACATATGGGAA------AGCAGCACAAATACATA--TTAAAGAATGTCCAATTAATCC--TCCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAGCAGGAATTCCCCTTTAAATATTTTAGTCTAAAATGTCCTTTGTACAGAATATTTAG----CCCTCATTCTC-TGGTCGTTCAATGCAATCGGGATT--AATCA-TTCTT-AATAACCATGACTATCC-TTGAT-CTAGTGGTGTCCCTTGATTTAGTA-CTTCCCGTGAAATCCTCTATCCTTTCATAGAATGCTAACCATTCGACTTCTCACG [...]
+
+>Antaresia perthensis
+?ATCA-AACCC------------------------AAAACTAAGCCACAGCCTGTTT--AAACAGGCTGTGGC--TGATGCCGCCATACA-AAGCCGAAATTCCATATAACACACCACAATATAAA------CTYTYTCCTYGGCCCCCCCCCTACCCCCCCCC--AACCAAACATATAAGAA------AACAGAACAGTGAACAA--TTAGAGATTCTCCAATTAACTC--TCCTATGTATAATCTTACATTAATGGTTTGCCCCATGGATATTAAGCAGGAATTTCCCTTCAAATATTTTAGTCTAAAATAGCCTTTGTACAGTCTATTTAG----TCCTCATTTTC-TGGTCGTTCAATGCAGCATGGATT--AATCA-TTCTT-ACCGATCATGACTATCC-TTGAT-CTAGTGGTGTCTCTTAATTTAGTA-CTTCCCGTGAAATCCTCTATCCTTCCCTAGAATGCTAACCATTCGACTTCTCACG [...]
+
+>Antaresia melanocephalus
+?ACCACA----------CCTTCC-----------------CCAACCATAGTCTGTAACC--ACAGACTATGGT--CGATGTCTCAATATA-AAGCCAAAAATCTATATAAATAAA-ACACAATAAAG-----CTCTCTCCTCGGCCCCCCCC-TACMCCCCCC--ACAAGAAATATAGAAGAA------ACCAGCACATAAGACTA--TAAGGATTCCCCCCTTCTTTCC--CCCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAGCCGGAATTCCCATTTAAATATTTTAATCTAAATTTGCCTTTGTACTTAAAATTCAG----TCCTCATTTCT-TGGTCGTTCAATGCAGCACGGATT--AATAG-ATCTT-ATTAACCATGGCTATCC-TTGAT-CTAGTGGTGTCCCATGATCTAGCT-CTTCCCGTGAAATCCTCTATCCTTCCTCTGAATGCTAACCATTCGACTTCTCACG [...]
+
+>Antaresia ramsayi
+?ACCACG----------CCTTCC-----------------CCA-CCATAGTCTGTAAA-TTACAGACTATGGT--CGTTGCCTCAACATA-AAGCCAAAAACCCATATAAACAAAAC--ATATAAA----CTCTCTCTCCTCGACCCCCCCC-TACCCCCCCC--ACAAGAAATATAGAAGAA------ACCAGCACATAAGACTA--TAAGGATTTCCCCCTCCTTTCC--CCCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAGCCGGAATTCCCATTTAAATATTTTAATCTAAATT-GCCTTCGTACCTAAAATTCAG----TCCTCATTTCT-TGGTCGTTCAATGCAGCACGGATT--AATAG-ATCTT-ATTAACCATGGCTATCC-TTGAT-CTAGTGGTGTCCCATGATCTAGCT-CTTCCCGTGAAATCCTCTATCCTTCCTCTGAATGCTAACCATTCGACTTCTCACG [...]
+
+>Python reticulatus
+??CCATCACCCTCACTTCCTCC--------------------AACCATAGCCAAATA-TTT---GGCTATGGTT-TCATGCCAAAATATATCAACCAAAAACCCATATTAATATAATGCTATAAAATGG-------TCCCTCGACCCCCCCCCTACCCCCCCC--AAAAAA--CATAAGGAAA------GTCCG-CACATCATAAACCTCGTACTTTTCCCTATTTTTT-GCTCCTATGTATAATCTTACATTAATGGCTTGCCCCATGGATAATAAGCAGGAATTTCCCTTTTAATATTTTAGTCTAAATTAGCCTTCGTACAGGTAATTCAGT----CCTCATTTTC-TGGTCGTTCAATGCAGCATGGATT--AATAA-TTGTT-GATAACCATGGATATCC-TTGAT-CTAGTTGTGTCCCTTGATTTAACA-CTTCCCGTGAAATCCTCTATCCTTCCGCGTAATGCTAACCATTCGACTTCTCACG [...]
+
+>Python timoriensis
+TA-CACCACCA------------------------------AGACCATAGTCGGTAAATC----GACTATGGTCTTTTTACGCCAAAAATACAACCAAAAATCCATATTAATATAGCAATATAAAATAG-------CCCCTCGACCCCCCCCCTACCCCCCCCC-ACAAAAA-TATAAAGAAA------ACCCG-TATGTCATAAACTCCGAATTTTTCCCTATTTTT--GCCCCTATGTATAATCATACATTATTGGCTTGCCCCATGGATAATAAGCAAGAATTCCCTTTTTAATATTTTAGTCTAAAATTGCCTTT-TACAAAAAACTCAGT----CCTCATTTCT-TGGTCGTTCAATGCAGCATGGGCT--AATAA-TTATT-AATAACCATGACTATCC-TTGAT-CTAGTTGTGTCTCTTAGTTTGGTA-CTTCCCGTGAAATCCTCTATCCTTCCGCGTAATGCTAACCATTCGACTTCTCACG [...]
+
+>Python sebae
+?????????????????????????????????????????????????????????????????????????????CTTCCTCAGACAC-AAACTCA-ACCTCAAATAAAAATAAAAATAAT-----------CCTACCTCGGCCCCCCCCCTACCCCCCCC--ACTATTT-CATATGGAA-------TACAGGATATATAC-TTTGTTAGAAAAATCCATATTTTTTCTACCCTATGTATAATCTTACATTAATGGCTTGCCCCATGAATAATAAGCGGGAATTCCTAATAAAATATTTTAGCCTAAAATTGCCTTCGTACATAAAATT-AGC---TCCACATTTCTTTGGTCGTTCAATGCTGCANGGATTATAGTAC-TTCTT-AATACACATGACTATCC-TTGAT-CTAGTCGTCTCTCTTAACTTAACA-CTTCCCGTGAAATCCTCTATCCTTTCATA-CATGCTAACCATTCGACTTCTCACG [...]
+
+>Python molurus
+?????????????????????????????????????????????????????????????????????????????CTTCCTCAGACAC-AAACTCA-ACCTCAAATAAAAATAAAAACAAT-----------CCTACCTCGGCCCCCCCCCTACCCCCCCCC-ACTATTT-CATATGGAA-------TACAGGATATATACATTTGTTAGAAAAATCCATATTTTTTCTACCCTATGTATAATCTTACATTAATGGCTTGCCCCATGAATAATAAGCGGGAATTCCTAATAAAATATTTTAGCCTAAAATTGCCTTCGTACATAAAATT-AGC---TCCACATTTCTTTGGTCGTTCAATGCTGCACGGATTATAGTAC-TTCTT-AATACACATGACTATCC-TTGAT-CTAGTCGTCTCTCTTAACTTAACA-CTTCCCGTGAAATCCTCTATCCTTTCATA-CATGCTAACCATTCGACTTCTCACG [...]
+
+>Python curtus
+?CCACAAAA-----------------------------------CCAT-----------------ATTAATYTT--CCCACCTATAAYTA-AACCCGAAATTCCCTATAAA--CACAACAAAAAATA-----CTCCTTCYTCGCCCCCCCCC-TACCCCCCCCCCAC-ATTT-AATATAAGAT------TCTGG--AATATACACACATCGTTAATTTCCATATTTTTT--ATGCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATAATAAGCCGGAATTCCATATTAAATATTTTAGCCTAAAATTGCCTTAGTACCTAAAACT-AGTCCTTCCTCATTTTC-TGGTCGTTCAATGCTGCATGGATT--AATCA-TTCTTTAACAGATATGTCTATCC-TTGAT-CTAGTCGTCTCTCTTAACCTGGCG-CTTCCCGTGAAATCCTCTATCCTTTCATA-CATGCTAACCATTCGACTTCTCACG [...]
+
+>Python regius
+?????????????????????????????????????????????????????????????????????????????TTACCTCAAT----AAACCCAAACCCACTATAAAAATATAA-----------------CCCCCTCGGCCCCCCCCCTTCCCCCCCCC-ACTTACA---TAGGAGGA------TTTAG-ATATATACACATATTAGGATTTTCCCTATCTTTTC-ACCCTATGTATAATCTTACATTAATGGTTTGCCCCATGAATATTAAACCAGAATTTCCAATTAAATATTTTAACCTAAAATTGCCTTCGTACACTACACC-AGT---CCCTCATTTCT-TGGTCGTTCAATGCTGCACGGATTATAGTAC-TTATT-AATGCTCATGTCTATCC-TTGGT-CTAGTGGTGTCTCTTAGTTTAACA-CTTCCCGTGAAATCCTCTATCCTTTCATA-CATGCTAACCATTCGACTTCTCACG [...]
+
+>Candoia aspera
+?????????????????????????????????????????????????????????????????????????????????????????????????AAA----CTA-------------------------CT-CTCTGG-GACCCCCCCC-TACCCCCCCC--AGATAAACTATACTAAAATTTACCTGAGTACACTATGTAAATATTGTACATTAGTCTATATTTC--ATGCTATGTATAATCATACATTAATGATCTGCCCCATGGATAATAAGCAGGAATTTCCCTATTAATATTTCAGCCTATTAATGCCTTAGTACAGTCAGTGTGTC---ACCACATCAT--GGGTCGTTTTATGCAGCAAGGATTA-ACTA--TTATT-GGTAATCATGCCTATCC--TGATCCAAGTTGTC-CTCTTAATCTACCTA-CTCACGTGAAATCCTCTATCCTTCCAAGAATGGCTAACAGTCCTGCTTTTCACG [...]
+
+>Morelia nauta
+???????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????? [...]
+
+>Morelia clastolepis
+???????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????? [...]
+
+>Morelia tracyae
+???????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????? [...]
+
+>Morelia kinghorni
+???????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????? [...]
+
diff --git a/doc/source/examples/raxml_estimate_tree.py b/doc/source/examples/raxml_estimate_tree.py
new file mode 100644
index 0000000..1dd6b2d
--- /dev/null
+++ b/doc/source/examples/raxml_estimate_tree.py
@@ -0,0 +1,14 @@
+#! /usr/bin/env python
+
+import dendropy
+from dendropy.interop import raxml
+
+data = dendropy.DnaCharacterMatrix.get(
+    path="pythonidae.nex",
+    schema="nexus")
+rx = raxml.RaxmlRunner()
+tree = rx.estimate_tree(
+        char_matrix=data,
+        raxml_args=["--no-bfgs"])
+print(tree.as_string(schema="newick"))
+
diff --git a/doc/source/examples/readroot.py b/doc/source/examples/readroot.py
new file mode 100644
index 0000000..a747e07
--- /dev/null
+++ b/doc/source/examples/readroot.py
@@ -0,0 +1,20 @@
+import dendropy
+
+tree_strs = [
+    "     (A, (B, (C, (D, E))));",
+    "[&U] (A, (B, (C, (D, E))));",
+    "[&R] (A, (B, (C, (D, E))));",
+]
+
+rootings = (None, "force-unrooted", "force-rooted", "default-unrooted", "default-rooted",)
+
+for tree_str in tree_strs:
+    for rooting in rootings:
+        tree = dendropy.Tree.get(
+                data=tree_str,
+                schema="newick",
+                rooting=rooting)
+        rooting_token = tree_str[:4]
+        rooting_keyword_value = "'{}'".format(rooting) if rooting is not None else "None"
+        print("rooting={:20}, token='{}', tree.is_rooted: {}".format(
+            rooting_keyword_value, rooting_token, tree.is_rooted))
diff --git a/doc/source/examples/readroot2.py b/doc/source/examples/readroot2.py
new file mode 100644
index 0000000..ae1fdf5
--- /dev/null
+++ b/doc/source/examples/readroot2.py
@@ -0,0 +1,14 @@
+import dendropy
+
+# force tree to be read as rooted
+mle.rooted = dendropy.Tree.get(
+        path='pythonidae.mle.nex',
+        schema='nexus',
+        rooting='force-rooted')
+
+# force tree to be read as unrooted
+mle.rooted = dendropy.Tree.get(
+        path='pythonidae.mle.nex',
+        schema='nexus',
+        rooting='force-unrooted')
+
diff --git a/doc/source/examples/remove_branch_lengths.py b/doc/source/examples/remove_branch_lengths.py
new file mode 100644
index 0000000..1af074b
--- /dev/null
+++ b/doc/source/examples/remove_branch_lengths.py
@@ -0,0 +1,11 @@
+#! /usr/bin/env python
+
+import dendropy
+
+mle = dendropy.Tree.get(
+    path='pythonidae.mle.nex',
+    schema='nexus')
+mle_len = mle.length()
+for edge in mle.postorder_edge_iter():
+    edge.length = None
+print(mle.as_string(schema="newick"))
diff --git a/doc/source/examples/reroot_at_external_edge.py b/doc/source/examples/reroot_at_external_edge.py
new file mode 100644
index 0000000..7c4b39b
--- /dev/null
+++ b/doc/source/examples/reroot_at_external_edge.py
@@ -0,0 +1,19 @@
+#! /usr/bin/env python
+
+import dendropy
+
+tree_str = "[&R] (A, (B, (C, (D, E))));"
+
+tree = dendropy.Tree.get(
+        data=tree_str,
+        schema="newick")
+
+print("Before:")
+print(tree.as_string(schema='newick'))
+print(tree.as_ascii_plot())
+node_D = tree.find_node_with_taxon_label("D")
+tree.reroot_at_edge(node_D.edge, update_bipartitions=False)
+print("After:")
+print(tree.as_string(schema='newick'))
+print(tree.as_ascii_plot())
+
diff --git a/doc/source/examples/reroot_at_internal_edge.py b/doc/source/examples/reroot_at_internal_edge.py
new file mode 100644
index 0000000..b637f4d
--- /dev/null
+++ b/doc/source/examples/reroot_at_internal_edge.py
@@ -0,0 +1,19 @@
+#! /usr/bin/env python
+
+import dendropy
+
+tree_str = "[&R] (A, (B, (C, (D, E))));"
+
+tree = dendropy.Tree.get(
+        data=tree_str,
+        schema="newick")
+
+print("Before:")
+print(tree.as_string(schema='newick'))
+print(tree.as_ascii_plot())
+mrca = tree.mrca(taxon_labels=["D", "E"])
+tree.reroot_at_edge(mrca.edge, update_bipartitions=False)
+print("After:")
+print(tree.as_string(schema='newick'))
+print(tree.as_ascii_plot())
+
diff --git a/doc/source/examples/reroot_at_midpoint.py b/doc/source/examples/reroot_at_midpoint.py
new file mode 100644
index 0000000..9775f84
--- /dev/null
+++ b/doc/source/examples/reroot_at_midpoint.py
@@ -0,0 +1,18 @@
+#! /usr/bin/env python
+
+import dendropy
+
+tree_str = "[&R] (A:0.55, (B:0.82, (C:0.74, (D:0.42, E:0.64):0.24):0.15):0.20):0.3;"
+
+tree = dendropy.Tree.get(
+        data=tree_str,
+        schema="newick")
+
+print("Before:")
+print(tree.as_string(schema='newick'))
+print(tree.as_ascii_plot(plot_metric='length'))
+tree.reroot_at_midpoint(update_bipartitions=False)
+print("After:")
+print(tree.as_string(schema='newick'))
+print(tree.as_ascii_plot(plot_metric='length'))
+
diff --git a/doc/source/examples/reroot_at_node.py b/doc/source/examples/reroot_at_node.py
new file mode 100644
index 0000000..2e02879
--- /dev/null
+++ b/doc/source/examples/reroot_at_node.py
@@ -0,0 +1,18 @@
+#! /usr/bin/env python
+
+import dendropy
+
+tree_str = "[&R] (A, (B, (C, (D, E))));"
+
+tree = dendropy.Tree.get(
+        data=tree_str,
+        schema="newick")
+
+print("Before:")
+print(tree.as_string(schema='newick'))
+print(tree.as_ascii_plot())
+mrca = tree.mrca(taxon_labels=["D", "E"])
+tree.reroot_at_node(mrca, update_bipartitions=False)
+print("After:")
+print(tree.as_string(schema='newick'))
+print(tree.as_ascii_plot())
diff --git a/doc/source/examples/rescale_tree_length.py b/doc/source/examples/rescale_tree_length.py
new file mode 100644
index 0000000..5b5b4a4
--- /dev/null
+++ b/doc/source/examples/rescale_tree_length.py
@@ -0,0 +1,12 @@
+#! /usr/bin/env python
+
+import dendropy
+
+mle = dendropy.Tree.get(path='pythonidae.mle.nex', schema='nexus')
+mle_len = mle.length()
+for edge in mle.postorder_edge_iter():
+    if edge.length is None:
+        edge.length = 0
+    else:
+        edge.length = float(edge.length)/mle_len
+print(mle.as_string(schema="newick"))
diff --git a/doc/source/examples/retain_taxa_with_labels.py b/doc/source/examples/retain_taxa_with_labels.py
new file mode 100644
index 0000000..641d79c
--- /dev/null
+++ b/doc/source/examples/retain_taxa_with_labels.py
@@ -0,0 +1,18 @@
+#! /usr/bin/env python
+
+import dendropy
+
+tree_str = "[&R] ((A, (B, (C, (D, E)))),(F, (G, H)));"
+
+tree = dendropy.Tree.get(
+        data=tree_str,
+        schema="newick")
+
+print("Before:")
+print(tree.as_string(schema='newick'))
+print(tree.as_ascii_plot())
+tree.retain_taxa_with_labels(["A", "C", "G"])
+print("After:")
+print(tree.as_string(schema='newick'))
+print(tree.as_ascii_plot())
+
diff --git a/doc/source/examples/seqgen.py b/doc/source/examples/seqgen.py
new file mode 100644
index 0000000..ac8b2b3
--- /dev/null
+++ b/doc/source/examples/seqgen.py
@@ -0,0 +1,28 @@
+#! /usr/bin/env python
+
+import dendropy
+from dendropy.interop import seqgen
+
+trees = dendropy.TreeList.get(
+        path="pythonidae.mcmc.nex",
+        schema="nexus")
+s = seqgen.SeqGen()
+
+# generate one alignment per tree
+# as substitution model is not specified, defaults to a JC model
+# will result in a DataSet object with one DnaCharacterMatrix per input tree
+d0 = s.generate(trees)
+print(len(d0.char_matrices))
+print(d0.char_matrices[0].as_string("nexus"))
+
+# instruct Seq-Gen to scale branch lengths by factor of 0.1
+# note that this does not modify the input trees
+s.scale_branch_lens = 0.1
+
+# more complex model
+s.char_model = seqgen.SeqGen.GTR
+s.state_freqs = [0.4, 0.4, 0.1, 0.1]
+s.general_rates = [0.8, 0.4, 0.4, 0.2, 0.2, 0.1]
+d1 = s.generate(trees)
+print(len(d0.char_matrices))
+print(d0.char_matrices[0].as_string("nexus"))
diff --git a/doc/source/examples/setroot1.py b/doc/source/examples/setroot1.py
new file mode 100644
index 0000000..d90542e
--- /dev/null
+++ b/doc/source/examples/setroot1.py
@@ -0,0 +1,26 @@
+import dendropy
+
+tree_str = "[&R] (A, (B, (C, (D, E))));"
+
+tree = dendropy.Tree.get_from_string(
+        tree_str,
+        "newick")
+
+print("Original:")
+print(tree.as_ascii_plot())
+
+tree.is_rooted = False
+print("After `is_rooted=False`:")
+print(tree.as_ascii_plot())
+
+tree.update_bipartitions()
+print("After `update_bipartitions()`:")
+print(tree.as_ascii_plot())
+
+tree2 = dendropy.Tree.get_from_string(
+        tree_str,
+        "newick")
+tree2.is_rooted = False
+tree2.update_bipartitions(suppress_unifurcations=False)
+print("After `update_bipartitions(suppress_unifurcations=False)`:")
+print(tree2.as_ascii_plot())
diff --git a/doc/source/examples/sim_and_count_deepcoal1.py b/doc/source/examples/sim_and_count_deepcoal1.py
new file mode 100644
index 0000000..69f4279
--- /dev/null
+++ b/doc/source/examples/sim_and_count_deepcoal1.py
@@ -0,0 +1,71 @@
+#! /usr/bin/env python
+
+import dendropy
+from dendropy.simulate import treesim
+from dendropy.model import reconcile
+
+# simulation parameters and output
+num_reps = 10
+
+# population tree descriptions
+stepwise_tree_str = "[&R](A:120000,(B:80000,(C:40000,D:40000):40000):40000):100000;"
+frag_tree_str = "[&R](A:120000,B:120000,C:120000,D:120000):100000;"
+
+# taxa and trees
+containing_taxa = dendropy.TaxonNamespace()
+stepwise_tree = dendropy.Tree.get(
+        data=stepwise_tree_str,
+        schema="newick",
+        taxon_namespace=containing_taxa)
+frag_tree = dendropy.Tree.get(
+        data=frag_tree_str,
+        schema="newick",
+        taxon_namespace=containing_taxa)
+
+# taxon set association
+genes_to_species = dendropy.TaxonNamespaceMapping.create_contained_taxon_mapping(
+        containing_taxon_namespace=containing_taxa,
+        num_contained=8)
+
+# convert to containing tree
+stepwise_tree = reconcile.ContainingTree(stepwise_tree,
+            contained_taxon_namespace=genes_to_species.domain_taxon_namespace,
+            contained_to_containing_taxon_map=genes_to_species)
+frag_tree = reconcile.ContainingTree(frag_tree,
+            contained_taxon_namespace=genes_to_species.domain_taxon_namespace,
+            contained_to_containing_taxon_map=genes_to_species)
+
+# for each rep
+for rep in range(num_reps):
+    gene_tree1 = treesim.contained_coalescent_tree(containing_tree=stepwise_tree,
+        gene_to_containing_taxon_map=genes_to_species,
+        default_pop_size=40000)
+    stepwise_tree.embed_tree(gene_tree1)
+    gene_tree2 = treesim.contained_coalescent_tree(containing_tree=frag_tree,
+        gene_to_containing_taxon_map=genes_to_species,
+        default_pop_size=40000)
+    frag_tree.embed_tree(gene_tree2)
+
+# write results
+
+# returns dictionary with contained trees as keys
+# and number of deep coalescences as values
+stepwise_deep_coals = stepwise_tree.deep_coalescences()
+stepwise_out = open("stepwise.txt", "w")
+for tree in stepwise_deep_coals:
+    stepwise_out.write("%d\n" % stepwise_deep_coals[tree])
+
+# returns dictionary with contained trees as keys
+# and number of deep coalescences as values
+frag_deep_coals = frag_tree.deep_coalescences()
+frag_out = open("frag.txt", "w")
+for tree in frag_deep_coals:
+    frag_out.write("%d\n" % frag_deep_coals[tree])
+
+
+
+
+
+
+
+
diff --git a/doc/source/examples/sim_and_count_deepcoal2.py b/doc/source/examples/sim_and_count_deepcoal2.py
new file mode 100644
index 0000000..66e2dd5
--- /dev/null
+++ b/doc/source/examples/sim_and_count_deepcoal2.py
@@ -0,0 +1,65 @@
+#! /usr/bin/env python
+
+import dendropy
+from dendropy.simulate import treesim
+from dendropy.model import reconcile
+
+# simulation parameters and output
+num_reps = 10
+
+# population tree descriptions
+stepwise_tree_str = "[&R](A:120000,(B:80000,(C:40000,D:40000):40000):40000):100000;"
+frag_tree_str = "[&R](A:120000,B:120000,C:120000,D:120000):100000;"
+
+# taxa and trees
+containing_taxa = dendropy.TaxonNamespace()
+stepwise_tree = dendropy.Tree.get(
+        data=stepwise_tree_str,
+        schema="newick",
+        taxon_namespace=containing_taxa)
+frag_tree = dendropy.Tree.get(
+        data=frag_tree_str,
+        schema="newick",
+        taxon_namespace=containing_taxa)
+
+# taxon set association
+genes_to_species = dendropy.TaxonNamespaceMapping.create_contained_taxon_mapping(
+        containing_taxon_namespace=containing_taxa,
+        num_contained=8)
+
+# convert to containing tree
+stepwise_tree = reconcile.ContainingTree(stepwise_tree,
+            contained_taxon_namespace=genes_to_species.domain_taxon_namespace,
+            contained_to_containing_taxon_map=genes_to_species)
+frag_tree = reconcile.ContainingTree(frag_tree,
+            contained_taxon_namespace=genes_to_species.domain_taxon_namespace,
+            contained_to_containing_taxon_map=genes_to_species)
+
+# for each rep
+for rep in range(num_reps):
+    stepwise_tree.embed_contained_kingman(default_pop_size=40000)
+    frag_tree.embed_contained_kingman(default_pop_size=40000)
+
+# write results
+
+# returns dictionary with contained trees as keys
+# and number of deep coalescences as values
+stepwise_deep_coals = stepwise_tree.deep_coalescences()
+stepwise_out = open("stepwise.txt", "w")
+for tree in stepwise_deep_coals:
+    stepwise_out.write("%d\n" % stepwise_deep_coals[tree])
+
+# returns dictionary with contained trees as keys
+# and number of deep coalescences as values
+frag_deep_coals = frag_tree.deep_coalescences()
+frag_out = open("frag.txt", "w")
+for tree in frag_deep_coals:
+    frag_out.write("%d\n" % frag_deep_coals[tree])
+
+
+
+
+
+
+
+
diff --git a/doc/source/examples/splitfreq.py b/doc/source/examples/splitfreq.py
new file mode 100644
index 0000000..aa8fa68
--- /dev/null
+++ b/doc/source/examples/splitfreq.py
@@ -0,0 +1,16 @@
+#! /usr/bin/env python
+
+import dendropy
+
+trees = dendropy.TreeList()
+for tree_file in ['pythonidae.mb.run1.t',
+        'pythonidae.mb.run2.t',
+        'pythonidae.mb.run3.t',
+        'pythonidae.mb.run4.t']:
+    trees.read(
+            path=tree_file,
+            schema='nexus',
+            tree_offset=20)
+split_leaves = ['Morelia amethistina', 'Morelia tracyae']
+f = trees.frequency_of_bipartition(labels=split_leaves)
+print('Frequency of split %s: %s' % (split_leaves, f))
diff --git a/doc/source/examples/splits_on_trees.py b/doc/source/examples/splits_on_trees.py
new file mode 100644
index 0000000..0214361
--- /dev/null
+++ b/doc/source/examples/splits_on_trees.py
@@ -0,0 +1,16 @@
+#! /usr/bin/env python
+
+import dendropy
+
+labels = ["A","B","C","D","E","F","G","H"]
+taxa = dendropy.TaxonNamespace(labels)
+tree = dendropy.Tree.get(
+        data="((A,(B,(C,D))),((E,F),(G,H)));",
+        schema="newick",
+        taxon_namespace=taxa)
+tree.is_rooted = False
+tree.encode_bipartitions()
+for node in tree:
+    node.label = taxa.bitmask_as_bitstring(node.edge.split_bitmask)
+print(tree.as_ascii_plot(show_internal_node_labels=True,
+    width=40))
diff --git a/doc/source/examples/symdiff1.py b/doc/source/examples/symdiff1.py
new file mode 100644
index 0000000..c676d1e
--- /dev/null
+++ b/doc/source/examples/symdiff1.py
@@ -0,0 +1,21 @@
+import dendropy
+from dendropy.calculate import treecompare
+
+s1 = "(a,(b,(c,d)));"
+s2 = "(a,(d,(b,c)));"
+
+# establish common taxon namespace
+tns = dendropy.TaxonNamespace()
+
+# ensure all trees loaded use common namespace
+tree1 = dendropy.Tree.get(
+        data=s1,
+        schema='newick',
+        taxon_namespace=tns)
+tree2 = dendropy.Tree.get(
+        data=s2,
+        schema='newick',
+        taxon_namespace=tns)
+
+## Unweighted Robinson-Foulds distance
+print(treecompare.symmetric_difference(tree1, tree2))
diff --git a/doc/source/examples/symdiff2.py b/doc/source/examples/symdiff2.py
new file mode 100644
index 0000000..1ea4850
--- /dev/null
+++ b/doc/source/examples/symdiff2.py
@@ -0,0 +1,37 @@
+import dendropy
+from dendropy.calculate import treecompare
+
+s1 = "(a,(b,(c,d)));"
+s2 = "((a,b),(c,d));"
+
+tns = dendropy.TaxonNamespace()
+
+unrooted_tree1 = dendropy.Tree.get(
+        data=s1,
+        schema='newick',
+        taxon_namespace=tns)
+unrooted_tree2 = dendropy.Tree.get(
+        data=s2,
+        schema='newick',
+        taxon_namespace=tns)
+
+rooted_tree1 = dendropy.Tree.get(
+        data=s1,
+        schema='newick',
+        rooting="force-rooted",
+        taxon_namespace=tns)
+rooted_tree2 = dendropy.Tree.get(
+        data=s2,
+        schema='newick',
+        rooting="force-rooted",
+        taxon_namespace=tns)
+
+## Unweighted Robinson-Foulds distance (rooted) = 2
+print(treecompare.symmetric_difference(rooted_tree1, rooted_tree2))
+## Unweighted Robinson-Foulds distance (unrooted) = 0
+print(treecompare.symmetric_difference(unrooted_tree1, unrooted_tree2))
+## Unweighted Robinson-Foulds distance (rooted1 to unrooted2) = 3
+print(treecompare.symmetric_difference(rooted_tree1, unrooted_tree2))
+## Unweighted Robinson-Foulds distance (unrooted1 to rooted2) = 5
+print(treecompare.symmetric_difference(unrooted_tree1, rooted_tree2))
+
diff --git a/doc/source/examples/taxa_mgmt1.py b/doc/source/examples/taxa_mgmt1.py
new file mode 100644
index 0000000..8b38ce5
--- /dev/null
+++ b/doc/source/examples/taxa_mgmt1.py
@@ -0,0 +1,12 @@
+import dendropy
+
+tree_str1 = "((A,B),C);"
+
+tree_list = dendropy.TreeList()
+tree_list.read(data=tree_str1, schema="newick")
+print(tree_list.taxon_namespace)
+tree_list.read(data=tree_str1, schema="newick")
+print(tree_list.taxon_namespace)
+for nd1, nd2 in zip(tree_list[0], tree_list[1]):
+    assert nd1.taxon is nd2.taxon # OK
+
diff --git a/doc/source/examples/taxa_mgmt1a.py b/doc/source/examples/taxa_mgmt1a.py
new file mode 100644
index 0000000..21a2116
--- /dev/null
+++ b/doc/source/examples/taxa_mgmt1a.py
@@ -0,0 +1,10 @@
+import dendropy
+
+tree_str1 = "((A,B),C);"
+
+tree1 = dendropy.Tree.get(data=tree_str1, schema="newick")
+tree2 = dendropy.Tree.get(data=tree_str1, schema="newick")
+print(tree1.taxon_namespace is  tree2.taxon_namespace) # False
+for nd1, nd2 in zip(tree1, tree2):
+    assert nd1.taxon is nd2.taxon # Assertion Error
+
diff --git a/doc/source/examples/taxa_mgmt1b.py b/doc/source/examples/taxa_mgmt1b.py
new file mode 100644
index 0000000..435cc83
--- /dev/null
+++ b/doc/source/examples/taxa_mgmt1b.py
@@ -0,0 +1,13 @@
+import dendropy
+
+tree_str1 = "((A,B),C);"
+
+tree1 = dendropy.Tree.get(data=tree_str1, schema="newick")
+tree2 = dendropy.Tree.get(
+        data=tree_str1,
+        schema="newick",
+        taxon_namespace=tree1.taxon_namespace)
+print(tree1.taxon_namespace is  tree2.taxon_namespace) # True
+for nd1, nd2 in zip(tree1, tree2):
+    assert nd1.taxon is nd2.taxon # OK
+
diff --git a/doc/source/examples/taxa_mgmt2.py b/doc/source/examples/taxa_mgmt2.py
new file mode 100644
index 0000000..2d35004
--- /dev/null
+++ b/doc/source/examples/taxa_mgmt2.py
@@ -0,0 +1,18 @@
+import dendropy
+from dendropy.calculate import treecompare
+
+tree_str1 = "((A,B),C);"
+tree_str2 = "((A,B),C);"
+
+ds = dendropy.DataSet()
+ds.read(data=tree_str1, schema="newick")
+ds.read(data=tree_str1, schema="newick")
+
+print(len(ds.taxon_namespaces))
+print(ds.tree_lists[0].taxon_namespace is ds.tree_lists[1].taxon_namespace)
+print(ds.tree_lists[0].taxon_namespace[0] is ds.tree_lists[1].taxon_namespace[0])
+
+# Results in:
+# 2
+# False
+# False
diff --git a/doc/source/examples/taxa_mgmt3.py b/doc/source/examples/taxa_mgmt3.py
new file mode 100644
index 0000000..619314c
--- /dev/null
+++ b/doc/source/examples/taxa_mgmt3.py
@@ -0,0 +1,42 @@
+import dendropy
+from dendropy.calculate import treecompare
+
+tree_str1 = "((A,B),C);"
+tree_str2 = "((A,B),C);"
+
+ds2 = dendropy.DataSet()
+ds2.read(data=tree_str1, schema="newick")
+ds2.read(
+        data=tree_str1,
+        schema="newick",
+        taxon_namespace=ds2.tree_lists[0].taxon_namespace)
+
+print(len(ds2.taxon_namespaces))
+print(ds2.tree_lists[0].taxon_namespace is ds2.tree_lists[1].taxon_namespace)
+print(ds2.tree_lists[0].taxon_namespace[0] is ds2.tree_lists[1].taxon_namespace[0])
+
+# Results in:
+# 1
+# True
+# True
+
+ds2 = dendropy.DataSet()
+tns = ds2.new_taxon_namespace()
+ds2.read(
+        data=tree_str1,
+        schema="newick",
+        taxon_namespace=tns)
+ds2.read(
+        data=tree_str1,
+        schema="newick",
+        taxon_namespace=tns)
+
+print(len(ds2.taxon_namespaces))
+print(ds2.tree_lists[0].taxon_namespace is ds2.tree_lists[1].taxon_namespace)
+print(ds2.tree_lists[0].taxon_namespace[0] is ds2.tree_lists[1].taxon_namespace[0])
+
+# Results in:
+# 1
+# True
+# True
+
diff --git a/doc/source/examples/taxa_mgmt4.py b/doc/source/examples/taxa_mgmt4.py
new file mode 100644
index 0000000..73cc004
--- /dev/null
+++ b/doc/source/examples/taxa_mgmt4.py
@@ -0,0 +1,18 @@
+import dendropy
+from dendropy.calculate import treecompare
+
+tree_str1 = "((A,B),C);"
+
+tree_list1 = dendropy.TreeList()
+tree_list1.read(data=tree_str1, schema="newick")
+tree_list2 = dendropy.TreeList()
+tree_list2.read(data=tree_str1, schema="newick")
+
+for taxon in tree_list1.taxon_namespace:
+    if taxon in tree_list2.taxon_namespace:
+        # this branch is never visited
+        print("Taxon '{}': found in both trees".format(taxon.label))
+
+## Following will result in:
+## dendropy.utility.error.TaxonNamespaceIdentityError: Non-identical taxon namespace references: ...
+# print(treecompare.symmetric_difference(tree_list1[0], tree_list2[0]))
diff --git a/doc/source/examples/taxa_mgmt5.py b/doc/source/examples/taxa_mgmt5.py
new file mode 100644
index 0000000..233d2ae
--- /dev/null
+++ b/doc/source/examples/taxa_mgmt5.py
@@ -0,0 +1,12 @@
+import dendropy
+from dendropy.calculate import treecompare
+
+tree_str1 = "((A,B),C);"
+
+tree_list1 = dendropy.TreeList()
+tree_list1.read(data=tree_str1, schema="newick")
+tree_list2 = dendropy.TreeList(taxon_namespace=tree_list1.taxon_namespace)
+tree_list2.read(data=tree_str1, schema="newick")
+
+# Results in: 0
+print(treecompare.symmetric_difference(tree_list1[0], tree_list2[0]))
diff --git a/doc/source/examples/taxon_labels1.py b/doc/source/examples/taxon_labels1.py
new file mode 100644
index 0000000..31be63f
--- /dev/null
+++ b/doc/source/examples/taxon_labels1.py
@@ -0,0 +1,35 @@
+#! /usr/bin/env python
+
+import dendropy
+
+nexus1 = """
+#NEXUS
+
+begin taxa;
+    dimensions ntax=2;
+    taxlabels Python_regius Python_sebae;
+end;
+
+begin characters;
+    dimensions nchar=5;
+    format datatype=dna gap=- missing=? matchchar=.;
+    matrix
+        Python_regius ACGTA
+        Python_sebae   ACGTA
+    ;
+end;
+"""
+
+fasta1 = """
+>Python_regius
+AAAA
+>Python_sebae
+ACGT
+"""
+
+d = dendropy.DataSet()
+tns = d.new_taxon_namespace()
+d.attach_taxon_namespace(tns)
+d.read(data=nexus1, schema="nexus")
+d.read(data=fasta1, schema="dnafasta")
+print(d.taxon_namespaces[0].description(2))
diff --git a/doc/source/examples/taxon_labels2.py b/doc/source/examples/taxon_labels2.py
new file mode 100644
index 0000000..7f182f6
--- /dev/null
+++ b/doc/source/examples/taxon_labels2.py
@@ -0,0 +1,35 @@
+#! /usr/bin/env python
+
+import dendropy
+
+nexus1 = """
+#NEXUS
+
+begin taxa;
+    dimensions ntax=2;
+    taxlabels 'Python regius' 'Python sebae';
+end;
+
+begin characters;
+    dimensions nchar=5;
+    format datatype=dna gap=- missing=? matchchar=.;
+    matrix
+        'Python regius' ACGTA
+        'Python sebae'   ACGTA
+    ;
+end;
+"""
+
+fasta1 = """
+>Python regius
+AAAA
+>Python sebae
+ACGT
+"""
+
+d = dendropy.DataSet()
+tns = d.new_taxon_namespace()
+d.attach_taxon_namespace(tns)
+d.read(data=nexus1, schema="nexus")
+d.read(data=fasta1, schema="fasta", data_type="dna")
+print(d.taxon_namespaces[0].description(2))
diff --git a/doc/source/examples/taxon_labels2b.py b/doc/source/examples/taxon_labels2b.py
new file mode 100644
index 0000000..9a8ad6e
--- /dev/null
+++ b/doc/source/examples/taxon_labels2b.py
@@ -0,0 +1,35 @@
+#! /usr/bin/env python
+
+import dendropy
+
+nexus1 = """
+#NEXUS
+
+begin taxa;
+    dimensions ntax=2;
+    taxlabels Python_regius Python_sebae;
+end;
+
+begin characters;
+    dimensions nchar=5;
+    format datatype=dna gap=- missing=? matchchar=.;
+    matrix
+        Python_regius ACGTA
+        Python_sebae   ACGTA
+    ;
+end;
+"""
+
+fasta1 = """
+>Python regius
+AAAA
+>Python sebae
+ACGT
+"""
+
+d = dendropy.DataSet()
+tns = d.new_taxon_namespace()
+d.attach_taxon_namespace(tns)
+d.read(data=nexus1, schema="nexus")
+d.read(data=fasta1, schema="fasta", data_type="dna")
+print(d.taxon_namespaces[0].description(2))
diff --git a/doc/source/examples/taxon_labels3.py b/doc/source/examples/taxon_labels3.py
new file mode 100644
index 0000000..46eb07f
--- /dev/null
+++ b/doc/source/examples/taxon_labels3.py
@@ -0,0 +1,35 @@
+#! /usr/bin/env python
+
+import dendropy
+
+nexus1 = """
+#NEXUS
+
+begin taxa;
+    dimensions ntax=2;
+    taxlabels 'Python_regius' 'Python_sebae';
+end;
+
+begin characters;
+    dimensions nchar=5;
+    format datatype=dna gap=- missing=? matchchar=.;
+    matrix
+        'Python_regius' ACGTA
+        'Python_sebae'   ACGTA
+    ;
+end;
+"""
+
+fasta1 = """
+>Python_regius
+AAAA
+>Python_sebae
+ACGT
+"""
+
+d = dendropy.DataSet()
+tns = d.new_taxon_namespace()
+d.attach_taxon_namespace(tns)
+d.read(data=nexus1, schema="nexus")
+d.read(data=fasta1, schema="fasta", data_type="dna")
+print(d.taxon_namespaces[0].description(2))
diff --git a/doc/source/examples/taxon_labels4.py b/doc/source/examples/taxon_labels4.py
new file mode 100644
index 0000000..ef91f4c
--- /dev/null
+++ b/doc/source/examples/taxon_labels4.py
@@ -0,0 +1,36 @@
+#! /usr/bin/env python
+
+import dendropy
+
+nexus1 = """
+#NEXUS
+
+begin taxa;
+    dimensions ntax=2;
+    taxlabels Python_regius Python_sebae;
+end;
+
+begin characters;
+    dimensions nchar=5;
+    format datatype=dna gap=- missing=? matchchar=.;
+    matrix
+        Python_regius ACGTA
+        Python_sebae   ACGTA
+    ;
+end;
+"""
+
+fasta1 = """
+>Python_regius
+AAAA
+>Python_sebae
+ACGT
+"""
+
+d = dendropy.DataSet()
+tns = d.new_taxon_namespace()
+d.attach_taxon_namespace(tns)
+d.read(data=nexus1, schema="nexus", preserve_underscores=True)
+d.read(data=fasta1, schema="fasta", data_type="dna")
+print(d.taxon_namespaces[0].description(2))
+
diff --git a/doc/source/examples/to_outgroup_position.py b/doc/source/examples/to_outgroup_position.py
new file mode 100644
index 0000000..da63032
--- /dev/null
+++ b/doc/source/examples/to_outgroup_position.py
@@ -0,0 +1,18 @@
+#! /usr/bin/env python
+
+import dendropy
+
+tree_str = "[&R] (A, (B, (C, (D, E))));"
+
+tree = dendropy.Tree.get(
+    data=tree_str,
+    schema="newick")
+
+print("Before:")
+print(tree.as_string(schema='newick'))
+print(tree.as_ascii_plot())
+outgroup_node = tree.find_node_with_taxon_label("C")
+tree.to_outgroup_position(outgroup_node, update_bipartitions=False)
+print("After:")
+print(tree.as_string(schema='newick'))
+print(tree.as_ascii_plot())
diff --git a/doc/source/examples/tree_copy1.py b/doc/source/examples/tree_copy1.py
new file mode 100644
index 0000000..d8987a6
--- /dev/null
+++ b/doc/source/examples/tree_copy1.py
@@ -0,0 +1,26 @@
+import dendropy
+
+# original list
+s1 = "(A,(B,C));"
+tree1 = dendropy.Tree.get(
+        data=s1,
+        schema="newick")
+
+# taxon namespace-scoped deep copy by calling Tree.clone(1)
+# I.e. Everything cloned, but with Taxon and TaxonNamespace references shared
+tree2 = tree1.clone(depth=1)
+
+# taxon namespace-scoped deep copy by copy-construction
+# I.e. Everything cloned, but with Taxon and TaxonNamespace references shared
+tree3 = dendropy.Tree(tree1)
+
+# *different* tree instances, with different nodes and edges
+for nd1, nd2, nd3 in zip(tree1, tree2, tree3):
+    assert nd1 is not nd2
+    assert nd1 is not nd3
+    assert nd2 is not nd3
+
+# Note: TaxonNamespace is still shared
+# I.e. Everything cloned, but with Taxon and TaxonNamespace references shared
+assert tree2.taxon_namespace is tree1.taxon_namespace
+assert tree3.taxon_namespace is tree1.taxon_namespace
diff --git a/doc/source/examples/tree_copy2.py b/doc/source/examples/tree_copy2.py
new file mode 100644
index 0000000..98fdec5
--- /dev/null
+++ b/doc/source/examples/tree_copy2.py
@@ -0,0 +1,23 @@
+import copy
+import dendropy
+
+# original list
+s1 = "(A,(B,C));"
+tree1 = dendropy.Tree.get(
+        data=s1,
+        schema="newick")
+
+# Full deep copy by calling copy.deepcopy()
+# I.e. Everything cloned including Taxon and TaxonNamespace instances
+tree2 = copy.deepcopy(tree1)
+
+# *different* tree instances
+for nd1, nd2 in zip(tree1, tree2):
+    assert nd1 is not nd2
+
+# Note: TaxonNamespace is also different
+assert tree2.taxon_namespace is not tree1.taxon_namespace
+for tx1 in tree1.taxon_namespace:
+    assert tx1 not in tree2.taxon_namespace
+for tx2 in tree2.taxon_namespace:
+    assert tx2 not in tree1.taxon_namespace
diff --git a/doc/source/examples/tree_evolve_char1.py b/doc/source/examples/tree_evolve_char1.py
new file mode 100644
index 0000000..fb522b3
--- /dev/null
+++ b/doc/source/examples/tree_evolve_char1.py
@@ -0,0 +1,20 @@
+#! /usr/bin/env python
+
+import random
+import dendropy
+
+def process_node(node, start=1.0):
+    if node.parent_node is None:
+        node.value = start
+    else:
+        node.value = random.gauss(node.parent_node.value, node.edge.length)
+    for child in node.child_nodes():
+        process_node(child)
+    if node.taxon is not None:
+        print("%s : %s" % (node.taxon, node.value))
+
+mle = dendropy.Tree.get(
+    path='pythonidae.mle.nex',
+    schema='nexus')
+process_node(mle.seed_node)
+
diff --git a/doc/source/examples/tree_evolve_char2.py b/doc/source/examples/tree_evolve_char2.py
new file mode 100644
index 0000000..912d9b4
--- /dev/null
+++ b/doc/source/examples/tree_evolve_char2.py
@@ -0,0 +1,19 @@
+#! /usr/bin/env python
+
+import random
+import dendropy
+
+def evolve_char(tree, start=1.0):
+    for node in tree.preorder_node_iter():
+        if node.parent_node is None:
+            node.value = 1.0
+        else:
+            node.value = random.gauss(node.parent_node.value, node.edge.length)
+    return tree
+
+mle = dendropy.Tree.get(
+        path='pythonidae.mle.nex',
+        schema='nexus')
+evolve_char(mle)
+for node in mle.leaf_iter():
+    print("%s : %s" % (node.taxon, node.value))
diff --git a/doc/source/examples/tree_iter1.py b/doc/source/examples/tree_iter1.py
new file mode 100644
index 0000000..d96d07b
--- /dev/null
+++ b/doc/source/examples/tree_iter1.py
@@ -0,0 +1,30 @@
+#! /usr/bin/env python
+
+import dendropy
+from dendropy.calculate import treecompare
+
+distances = []
+taxa = dendropy.TaxonNamespace()
+mle_tree = dendropy.Tree.get(
+    path='pythonidae.mle.nex',
+    schema='nexus',
+    taxon_namespace=taxa)
+burnin = 20
+source_files = [
+        open("pythonidae.mcmc1.nex", "r"), # Note: for 'Tree.yield_from_files',
+        open("pythonidae.mcmc2.nex", "r"), # sources can be specified as file
+        "pythonidae.mcmc3.nex", "r",       # objects or strings, with strings
+        "pythonidae.mcmc4.nex", "r",       # assumed to specify file paths
+        ]
+tree_yielder = dendropy.Tree.yield_from_files(
+        files=source_files,
+        schema='nexus',
+        taxon_namespace=taxa,
+        )
+for tree_idx, mcmc_tree in enumerate(tree_yielder):
+    if tree_idx < burnin:
+        # skip burnin
+        continue
+    distances.append(treecompare.symmetric_difference(mle_tree, mcmc_tree))
+print("Mean symmetric distance between MLE and MCMC trees: %d"
+        % float(sum(distances)/len(distances)))
diff --git a/doc/source/examples/tree_iter2.py b/doc/source/examples/tree_iter2.py
new file mode 100644
index 0000000..8dee882
--- /dev/null
+++ b/doc/source/examples/tree_iter2.py
@@ -0,0 +1,22 @@
+#! /usr/bin/env python
+
+import dendropy
+from dendropy.calculate import treecompare
+
+distances = []
+taxa = dendropy.TaxonNamespace()
+mle_tree = dendropy.Tree.get(
+    path='pythonidae.mle.nex',
+    schema='nexus',
+    taxon_namespace=taxa)
+mcmc_tree_file_paths = ['pythonidae.mb.run1.t',
+        'pythonidae.mb.run2.t',
+        'pythonidae.mb.run3.t',
+        'pythonidae.mb.run4.t']
+for mcmc_tree in dendropy.Tree.yield_from_files(
+        files=mcmc_tree_file_paths,
+        schema='nexus',
+        taxon_namespace=taxa):
+    distances.append(treecompare.symmetric_difference(mle_tree, mcmc_tree))
+print("Mean symmetric distance between MLE and MCMC trees: %d"
+        % float(sum(distances)/len(distances)))
diff --git a/doc/source/examples/tree_length_crit.py b/doc/source/examples/tree_length_crit.py
new file mode 100644
index 0000000..b05a2e9
--- /dev/null
+++ b/doc/source/examples/tree_length_crit.py
@@ -0,0 +1,14 @@
+#! /usr/bin/env python
+
+import dendropy
+
+trees = dendropy.TreeList.get(
+    path="pythonidae.random.bd0301.tre",
+    schema="nexus")
+tree_lengths = [tree.length() for tree in trees]
+tree_lengths.sort()
+crit_index_95 = int(0.95 * len(tree_lengths))
+crit_index_99 = int(0.99 * len(tree_lengths))
+
+print("95%% critical value: %s" % tree_lengths[crit_index_95])
+print("99%% critical value: %s" % tree_lengths[crit_index_99])
diff --git a/doc/source/examples/tree_list_add1.py b/doc/source/examples/tree_list_add1.py
new file mode 100644
index 0000000..c24963c
--- /dev/null
+++ b/doc/source/examples/tree_list_add1.py
@@ -0,0 +1,6 @@
+import dendropy
+
+trees = dendropy.TreeList()
+trees.read(path="sometrees.nex", schema="nexus", tree_offset=10)
+trees.read(data="(A,(B,C));((A,B),C);", schema="newick")
+
diff --git a/doc/source/examples/tree_list_copy1.py b/doc/source/examples/tree_list_copy1.py
new file mode 100644
index 0000000..f92afe0
--- /dev/null
+++ b/doc/source/examples/tree_list_copy1.py
@@ -0,0 +1,24 @@
+import dendropy
+
+# original list
+s1 = "(A,(B,C));(B,(A,C));(C,(A,B));"
+treelist1 = dendropy.TreeList.get(
+        data=s1,
+        schema="newick")
+
+# shallow copy by calling Tree.clone(0)
+treelist2 = treelist1.clone(depth=0)
+
+# shallow copy by slicing
+treelist3 = treelist1[:]
+
+# same tree instances are shared
+for t1, t2 in zip(treelist1, treelist2):
+    assert t1 is t2
+for t1, t2 in zip(treelist1, treelist3):
+    assert t1 is t2
+
+# note: (necessarily) sharing same TaxonNamespace
+assert treelist2.taxon_namespace is treelist1.taxon_namespace
+assert treelist3.taxon_namespace is treelist1.taxon_namespace
+
diff --git a/doc/source/examples/tree_list_copy2.py b/doc/source/examples/tree_list_copy2.py
new file mode 100644
index 0000000..f774caf
--- /dev/null
+++ b/doc/source/examples/tree_list_copy2.py
@@ -0,0 +1,26 @@
+import dendropy
+
+# original list
+s1 = "(A,(B,C));(B,(A,C));(C,(A,B));"
+treelist1 = dendropy.TreeList.get(
+        data=s1,
+        schema="newick")
+
+# taxon namespace-scoped deep copy by calling Tree.clone(1)
+# I.e. Everything cloned, but with Taxon and TaxonNamespace references shared
+treelist2 = treelist1.clone(depth=1)
+
+# taxon namespace-scoped deep copy by copy-construction
+# I.e. Everything cloned, but with Taxon and TaxonNamespace references shared
+treelist3 = dendropy.TreeList(treelist1)
+
+# *different* tree instances
+for t1, t2, t3 in zip(treelist1, treelist2, treelist3):
+    assert t1 is not t2
+    assert t1 is not t3
+    assert t2 is not t3
+
+# Note: TaxonNamespace is still shared
+# I.e. Everything cloned, but with Taxon and TaxonNamespace references shared
+assert treelist2.taxon_namespace is treelist1.taxon_namespace
+assert treelist3.taxon_namespace is treelist1.taxon_namespace
diff --git a/doc/source/examples/tree_list_copy3.py b/doc/source/examples/tree_list_copy3.py
new file mode 100644
index 0000000..faffb34
--- /dev/null
+++ b/doc/source/examples/tree_list_copy3.py
@@ -0,0 +1,23 @@
+import copy
+import dendropy
+
+# original list
+s1 = "(A,(B,C));(B,(A,C));(C,(A,B));"
+treelist1 = dendropy.TreeList.get(
+        data=s1,
+        schema="newick")
+
+# Full deep copy by calling copy.deepcopy()
+# I.e. Everything cloned including Taxon and TaxonNamespace instances
+treelist2 = copy.deepcopy(treelist)
+
+# *different* tree instances
+for t1, t2 in zip(treelist1, treelist2):
+    assert t1 is not t2
+
+# Note: TaxonNamespace is also different
+assert treelist2.taxon_namespace is not treelist1.taxon_namespace
+for tx1 in treelist1.taxon_namespace:
+    assert tx1 not in treelist2.taxon_namespace
+for tx2 in treelist2.taxon_namespace:
+    assert tx2 not in treelist1.taxon_namespace
diff --git a/doc/source/examples/tree_list_ops1.py b/doc/source/examples/tree_list_ops1.py
new file mode 100644
index 0000000..12361b0
--- /dev/null
+++ b/doc/source/examples/tree_list_ops1.py
@@ -0,0 +1,38 @@
+import dendropy
+from dendropy.calculate import treecompare
+
+trees = dendropy.TreeList.get(
+        path="pythonidae.random.bd0301.tre",
+        schema="nexus")
+
+for tree in trees:
+    print(tree.as_string("newick"))
+
+print(len(trees))
+
+print(trees[4].as_string("nexus"))
+print(treecompare.robinson_foulds_distance(trees[0], trees[1]))
+print(treecompare.weighted_robinson_foulds_distance(trees[0], trees[1]))
+
+first_10_trees = trees[:10]
+last_10_trees = trees[-10:]
+
+# Note that the TaxonNamespace is propogated to slices
+assert first_10_trees.taxon_namespace is trees.taxon_namespace
+assert first_10_trees.taxon_namespace is trees.taxon_namespace
+
+
+print(id(trees[4]))
+print(id(trees[5]))
+trees[4] = trees[5]
+print(id(trees[4]))
+print(id(trees[5]))
+print(trees[4] in trees)
+
+trees.remove(trees[-1])
+tx = trees.pop()
+print(trees.index(trees[0]))
+
+trees.sort(key=lambda t:t.label)
+trees.reverse()
+trees.clear()
diff --git a/doc/source/examples/tree_list_ops2.py b/doc/source/examples/tree_list_ops2.py
new file mode 100644
index 0000000..05fd60e
--- /dev/null
+++ b/doc/source/examples/tree_list_ops2.py
@@ -0,0 +1,28 @@
+import dendropy
+from dendropy.calculate import treecompare
+
+trees = dendropy.TreeList.get(
+        path="pythonidae.random.bd0301.tre",
+        schema="nexus")
+
+print(len(trees))
+
+tree = dendropy.Tree.get(path="pythonidae.mle.nex", schema="nexus")
+
+# As we did not specify a |TaxonNamespace| instance to use above, by default
+# 'tree' will get its own, distinct |TaxonNamespace|
+original_tree_taxon_namespace = tree.taxon_namespace
+print(id(original_tree_taxon_namespace))
+assert tree.taxon_namespace is not trees.taxon_namespace
+
+# This operation adds the |Tree|, 'tree', to the |TreeList|, 'trees',
+# *and* migrates the |Taxon| objects of the tree over to the |TaxonNamespace|
+# of 'trees'. This will break things if the tree is contained in another
+# |TreeList| with a different |TaxonNamespace|!
+trees.append(tree)
+
+# In contrast to before, the |TaxonNamespace| of 'tree' is not the same
+# as the |TaxonNamespace| of 'trees. The |Taxon| objects have been imported
+# and/or remapped based on their label.
+assert tree.taxon_namespace is trees.taxon_namespace
+print(id(original_tree_taxon_namespace))
diff --git a/doc/source/examples/tree_rootings1.py b/doc/source/examples/tree_rootings1.py
new file mode 100644
index 0000000..ae199c7
--- /dev/null
+++ b/doc/source/examples/tree_rootings1.py
@@ -0,0 +1,36 @@
+#! /usr/bin/env python
+
+import dendropy
+
+# tree assumed to be unrooted unless '[&R]' is specified
+tree = dendropy.Tree.get(path="pythonidae.mle.nex", schema="nexus")
+
+# same as above, explicit
+tree = dendropy.Tree.get(
+        path="pythonidae.mle.nex",
+        schema="nexus",
+        rooting="default-unrooted",
+        )
+
+# forces tree to be rooted
+tree = dendropy.Tree.get(path="pythonidae.mle.nex",
+        schema="nexus",
+        rooting="force-rooted")
+
+# forces tree to be unrooted
+tree = dendropy.Tree.get(path="pythonidae.mle.nex",
+        schema="nexus",
+        rooting="force-unrooted")
+
+# forces trees to be rooted
+tree_list = dendropy.TreeList.get(
+        path="pythonidae.mcmc.nex",
+        schema="nexus",
+        rooting="force-rooted")
+
+# forces trees to default to rooted, unless '[&U]' is specified
+tree_list = dendropy.TreeList()
+tree_list.read(
+    path="pythonidae.mcmc.nex",
+    schema="nexus",
+    rooting="default-rooted")
diff --git a/doc/source/examples/treemeasures1.py b/doc/source/examples/treemeasures1.py
new file mode 100644
index 0000000..4bfa005
--- /dev/null
+++ b/doc/source/examples/treemeasures1.py
@@ -0,0 +1,30 @@
+import collections
+import dendropy
+from dendropy.calculate import treemeasure
+from dendropy.calculate import statistics
+
+# Since we do not want to waste memory by keeping the actual trees around
+# after we are done calculating the statistics, we use the tree yielder
+# instead of:
+#       dendropy.TreeList.get(
+#           path="pythonidae.beast-mcmc.trees",
+#           schema="nexus",
+#           tree_offset=200)
+
+tree_stats = collections.defaultdict(list)
+for tree_idx, tree in enumerate(dendropy.Tree.yield_from_files(
+            files=["pythonidae.beast-mcmc.trees"],
+            schema="nexus")):
+    if tree_idx < 200:
+        continue # burnin
+    tree_stats["B1"].append(treemeasure.B1(tree))
+    tree_stats["colless"].append(treemeasure.colless_tree_imbalance(tree))
+    tree_stats["PBH"].append(treemeasure.pybus_harvey_gamma(tree))
+    tree_stats["sackin"].append(treemeasure.sackin_index(tree))
+    tree_stats["treeness"].append(treemeasure.treeness(tree))
+
+for key in tree_stats:
+    values = tree_stats[key]
+    mean, var = statistics.mean_and_sample_variance(values)
+    hpd = statistics.empirical_hpd(values)
+    print("{:15}: mean = {}, variance = {}, hpd = ({}, {})".format(key, mean, var, hpd[0], hpd[1]))
diff --git a/doc/source/examples/weightedrf1.py b/doc/source/examples/weightedrf1.py
new file mode 100644
index 0000000..c1c8e60
--- /dev/null
+++ b/doc/source/examples/weightedrf1.py
@@ -0,0 +1,22 @@
+import dendropy
+from dendropy.calculate import treecompare
+
+s1 = "((t5:0.161175,t6:0.161175):0.392293,((t4:0.104381,(t2:0.075411,t1:0.075411):0.028969):0.065840,t3:0.170221):0.383247);"
+s2 = "((t5:2.161175,t6:0.161175):0.392293,((t4:0.104381,(t2:0.075411,t1:0.075411):1):0.065840,t3:0.170221):0.383247);"
+
+tns = dendropy.TaxonNamespace()
+
+tree1 = dendropy.Tree.get(
+        data=s1,
+        schema='newick',
+        taxon_namespace=tns)
+tree2 = dendropy.Tree.get(
+        data=s2,
+        schema='newick',
+        taxon_namespace=tns)
+
+## Weighted Robinson-Foulds distance = 2.971031
+print(treecompare.weighted_robinson_foulds_distance(tree1, tree2))
+
+## Compare to unweighted Robinson-Foulds distance: 0
+print(treecompare.symmetric_difference(tree1, tree2))
diff --git a/doc/source/glossary.rst b/doc/source/glossary.rst
new file mode 100644
index 0000000..2e72b9d
--- /dev/null
+++ b/doc/source/glossary.rst
@@ -0,0 +1,310 @@
+*************************************
+Glossary and Terminological Reference
+*************************************
+
+.. glossary::
+    :sorted:
+
+    ancestor nodes
+        On a :term:`tree`, the full set of :term:`nodes <node>` from which a
+        given :term:`node <node>` has descended (i.e., the given node's parent,
+        parent's parent, parent's parent's parent, and so on until the
+        :term:`root <seed node>` or the :term:`seed node` of the tree, inclusive).
+
+    basal
+    basal node
+        On a :term:`tree`, a :term:`node` that is ancestral with respect to
+        another node. :term:`Leaf or tip nodes <leaf node>` can never be basal,
+        except if the tree consists of only a single node (i.e., the
+        :term:`seed node` is a :term:`leaf node`). The basal-most node of a
+        tree is the :term:`seed or root node <seed node>`.
+
+    basal bifurcation
+        On a :term:`tree`, a :term:`seed or root <seed node>` which has exactly
+        two :term:`child nodes <child node>`. On a :term:`rooted tree <rooted
+        tree>` this (can) reflect actual information, but on an :term:`unrooted
+        tree <unrooted tree>`, this is actually an artifact, as the :term:`seed
+        or root <seed node>` does not actually exist as it is just an
+        algorithmic contrivance. In practical terms, this means that
+        :term:`bipartition <bipartition>` calculations and operations on
+        :term:`unrooted trees <unrooted tree>` with basal bifurcations may
+        result in undetermined and errorenous behavior. Thus, typically, on
+        unrooted trees the basal bifurcation is collapsed to yield a basal
+        trifurcation.
+
+    basal trifurcation
+        On a :term:`tree`, a :term:`seed or root <seed node>` which has exactly
+        three :term:`child nodes <child node>`. While this might occur in both
+        :term:`rooted <rooted tree>` and :term:`unrooted <unrooted tree>` trees,
+        this is typically the result of :term:`collapsing a basal bifurcation
+        <basal bifurcation>` on :term:`unrooted trees <unrooted tree>`.
+
+    bifurcation
+    bifurcating node
+        On a :term:`tree`, a :term:`node` with exactly two :term:`child nodes
+        <child node>`. Also known as an "out-degree two" node.
+
+    bifurcating tree
+        A :term:`tree` in which all :term:`nodes <node>` are :term:`bifurcating
+        <bifurcation>`.
+
+    bipartition
+    split
+        On a :term:`tree`, a partition of the leaf set of a tree into two
+        mutually-exclusive and collectively-comprehensive subsets. It
+        corresponds to an edge of a tree: if we imagine "splitting" or cutting
+        a tree into two trees at a given edge, the leaf sets of each of the new
+        trees form the two subsets of the partitioning. A bipartition is often
+        referred to as a split, especially in the context of :term:`unrooted trees
+        <unrooted tree>`. Bipartitions are described in detail in
+        the :doc:`DendroPy Primer <primer/bipartitions>`.
+
+    child node
+        On a :term:`tree`, a :term:`node` descending from another :term:`node`.
+        A node on a tree may have zero or more child nodes. A node with zero
+        child nodes is termed a :term:`leaf node`.
+
+    descendent nodes
+        On a :term:`tree`, the full set of nodes that are descended from a
+        given node (i.e., the given node's children and children's children and
+        children's children's children, and so on, until the :term:`leaf nodes
+        <leaf node>` of the tree, inclusive).
+
+    edge
+    branch
+        A connection between two :term:`nodes <node>` on a :term:`tree`,
+        modeled in DendroPy by the |Edge| class. A synonym for "branch" in the
+        context of phylogenetic trees.
+
+        An edge connects a "tail node" (also called an origin or source node)
+        to a "head node" (also called a target or destination node). The tail
+        node is called the ":term:`parent <parent node>`" of the head node,
+        while the head node is called the ":term:`child <child node>`" of the
+        tail node.
+
+        An edge is said to subtend or be incident to the node *to* which an
+        edge connects, i.e., the head node.
+        Conversely, the edges connecting a node to its children are called
+        outgoing edges with respect to the tail node.
+
+        On a :term:`tree`, every node has one and exactly one incident edge,
+        and every edge has a :term:`head node`. On the other hand, not every
+        node may have outgoing edges (e.g, :term:`leaf nodes <leaf node>`),
+        and not every edge has a :term:`tail node` (e.g., :term:`root nodes
+        <seed node>`). As such, edges can be thought of "belonging" to their
+        head nodes, due to this one-to-one relationship.
+
+        Edges can have any number of properties or attributes associated with
+        them, representing a diverse range of phenomena, but the most important
+        one is the edge :term:`length or weight <edge length>`.
+
+    edge length
+    edge weight
+        A (typically) quantitative value associated with an :term:`edge`. This
+        value may represent any number of things, but most typically is used to
+        model time, evolutionary distance, or expected rates of substitution on
+        a phylogenetic tree. An :term:`edge` may have many values, quantitative
+        or otherwise, associated with it, but the length or weight is special
+        as it usually denotes the relationship between the :term:`tail node`
+        and :term:`head node` related by an :term:`edge`.
+
+    incident edge
+    subtending edge
+        An :term:`edge` that connects *to* a particular :term:`node` is termed
+        the incident or subtending edge of that node.
+
+    internal edge
+        An :term:`edge` that has an :term:`internal node` as a :term:`head
+        node`.
+
+    internal node
+        A :term:`node` that has :term:`child nodes <child node>`. Also known as
+        an inner node or branch node.
+
+    head node
+    target node
+    destination node
+        On an :term:`edge` connecting two :term:`nodes <node>`, the node *to*
+        which the edge extends to link *from* the other node, termed the
+        :term:`tail node`. The head node is the :term:`child node` of the
+        :term:`tail node`,  and the :term:`tail node` is the :term:`parent
+        node` of the head node. The :term:`edge` is said to subtend, or be
+        incident, to the head node.
+
+    leaf edge
+    terminal edge
+    external edge
+    outer edge
+        An :term:`edge` that has an :term:`leaf node` as a :term:`head
+        node`.
+
+    leaf node
+    tip node
+    terminal node
+    external node
+    outer node
+        A :term:`node` that does not have any :term:`child nodes <child node>`
+        descending from it. Also known as a tip, outer node, external node, or
+        terminal node.
+
+    node
+        An fundamental element of information or data on a :term:`tree`,
+        connected to other such elements in a parent-child relationshop by
+        :term:`edges <edge>`.
+        In DendroPy, a node is modeled by the |Node| class.
+        A node has at most one :term:`parent <parent node>`, to which it is
+        connected by its :term:`incident or subtending <incident edge>` edge.
+        A node may have zero or more :term:`children <child node>`, to each of
+        which it is connected by an independent :term:`outgoing edge <outgoing
+        edge>` edge.
+        A node can be associated with zero or more informational or data
+        values. In a phylogenetic :term:`tree`, one of these values is often a
+        :term:`taxon`, but many other aspects of information can be modeled.
+
+    node depth
+        On a :term:`tree`, the depth of a node is the length of the
+        :term:`path` to its :term:`root <seed node>` (i.e., its root path). The
+        root node has a depth zero.
+
+    outgoing edge
+        An :term:`edge` that connects *from* a particular :term:`node` (to,
+        e.g., its :term:`children <child node>` is said to be an outgoing edge
+        for that node.
+
+    parent node
+    ancestor node
+        On a :term:`tree`, a :term:`node` from which a given node
+        immediately descends.
+
+    seed node
+    root node
+        The first or topmost :term:`node` in a tree. This is also more commonly
+        called the "root" or "root node" of the tree, though, in the strictest
+        sense, this equivalence is only valid when the tree is explicitly
+        :term:`rooted <rooted tree>`. Both :term:`rooted trees <rooted tree>`
+        and :term:`unrooted trees <unrooted tree>` have seed nodes. In rooted
+        trees, the seed node is the root of the tree.
+
+        By definition, the seed node does not have a :term:`parent node`.  It
+        is the node at which algorithms on the tree begin, since as a data
+        structure, one can only pass from :term:`parent node` to :term:`child
+        nodes <child node>`.  If the tree is :term:`rooted <rooted tree>`, then
+        the seed node is equivalent to the root of the tree.
+
+    node height
+        The height of a node is the length of the longest downward path to a
+        leaf from that node. The height of the root is the height of the tree.
+        The depth of a node is the length of the path to its root (i.e., its
+        root path). The root node has depth zero,
+        leaf nodes have height zero, and a tree with only a single node
+        (hence both a root and leaf) has depth and height zero. Conventionally,
+        an empty tree (tree with no nodes, if such are allowed) has depth and
+        height 1.
+
+    path
+        The sequence of :term:`edges <edge>` connecting two :term:`nodes
+        <node>` on a :term:`tree`.
+
+    path length
+    path weight
+    unweighted path
+    weighted path
+        The number or sum of lengths of the :term:`edges <edge>` connecting two
+        :term:`nodes <node>` on a :term:`tree`. An *unweighted* path length is
+        just the number of :term:`edges:, while a *weighted* path length or
+        path weight is the sum of :term:`edge lengths <edge length>`.
+
+    rooted tree
+        A state of a :term:`tree` in which its :term:`seed node` represents the
+        most-recent common ancestor of all the :term:`leaf nodes <leaf node>`
+        on the tree. Rooted trees have a distinct directionality, and
+        ancestor-descendent relationships are not invertible.
+
+    schema
+        The format or syntax of serialized phylogenetic or related data.
+        Examples are NEXUS, NEWICk, Phylip, NeXML, etc. A "schema" is
+        DendroPy-speak for "format" (we cannot use the argument name "format"
+        because this is a Python built-in, and hence we adopted this
+        terminology for consistency), and is typicallly specified using one of
+        a set of predefined string values, known as "schema specification
+        strings". Supported reading (input) schemas are described :ref:`here
+        <Specifying_the_Data_Source_Format>` while supported writing (output)
+        schemas are described :ref:`here <Specifying_the_Data_Writing_Format>`.
+
+    sibling node
+    sister node
+        Two or more :term:`nodes <node>` that share the same :term:`parent
+        node`, i.e., are descended from the same node, are said to be siblings
+        or sisters with respect to each other.
+
+    subtree
+        A subtree of a :term:`tree` is a tree consisting of a :term:`node` in
+        the tree and all its :term:`descendents <descendent nodes>`. Each
+        :term:`node` on a :term:`tree` thus corresponds to the :term:`root
+        <seed node>` of the subtree that it determines.
+
+    tail node
+    source node
+    origin node
+        On an :term:`edge` connecting two :term:`nodes <node>`, the node *from*
+        which the edge extends to link *to* the other node, termed the :term:`head node`.
+        The tail node is the :term:`parent node` of the :term:`head node`,  and
+        the :term:`head node` is the :term:`child node` of the tail node.
+        The edge is said to be an outgoing node with respect to the tail node.
+
+    taxon
+    operational taxonomic unit
+    taxon concept
+    taxon name
+        A class of organisms being modeled represented by a string label or
+        namethat is guaranteed to be unique within a particular :term:`taxon
+        namespace`.
+
+    taxon namespace
+        A set of distinct and unique labels, with each label mapping to one and
+        exactly one or names that is used to relate data from across different
+        data sources to each other by reference to a :term:`taxon concept`.
+
+    tree
+    arborescence
+        A tree is a set of :term:`nodes <node>` connected to each other in
+        parent-child relationships given by a set of :term:`edges <edge>`. In
+        DendroPy, a tree is modeled by the |Tree| class. A tree is a
+        specialization of a `graph
+        <http://en.wikipedia.org/wiki/Graph_%28mathematics%29>`_, constrained
+        such that:
+
+            1. All its :term:`edges <edge>` are directional.
+            2. It has no `directed cycles <http://en.wikipedia.org/wiki/Cycle_graph#Directed_cycle_graph>`_ .
+            3. The directionality is from the :term:`root <seed node>` (or
+               ":term:`seed node`" in DendroPy's parlance) to the
+               :term:`tips <leaf node>`.
+
+        The first and second constraints alone result in a `directed acylic graph
+        <http://en.wikipedia.org/wiki/Directed_acyclic_graph>`_ .
+        The addition of the third constraint results in an `arborescence
+        <http://en.wikipedia.org/wiki/Arborescence_(graph_theory)>`_, which is
+        strictly synonymous with "tree".
+
+    unifurcation
+    unifurcating node
+        On a :term:`tree`, a :term:`node` with exactly one :term:`child node`.
+        Also known as an "out-degree one" node. In some cases, unifurcations
+        may be used to represent information (e.g., a change in some value
+        associated with edges, such as population size or a rate of some kind),
+        but they more typically arise as side-effect of tree manipulation
+        operations, such as re-rooting or pruning. Though DendroPy has no
+        problem in handling unifurcations, trees with unifurcating nodes are
+        considered pathological in many contexts and operations, and DendroPy
+        thus provides facilities for suppressing unifurcations, either on
+        existing trees or as they occur as a side-effect of other operations.
+
+    unrooted tree
+        A state of a :term:`tree` in which its :term:`seed node` is an
+        algorithmic artifact, and not necessarily represents the most-recent
+        common ancestor of all the :term:`leaf nodes <leaf node>` on the tree.
+        In an unrooted trees, ancestor-descendent relationships are also
+        algorithmic artifacts and can be (conceptually) inverted without
+        changing the information represented by the tree, though this operation
+        usually requires a fundamental restructuring of the computational
+        representation of the tree.
diff --git a/doc/source/index.rst b/doc/source/index.rst
new file mode 100644
index 0000000..d888aef
--- /dev/null
+++ b/doc/source/index.rst
@@ -0,0 +1,118 @@
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+DendroPy Phylogenetic Computing Library
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+.. toctree::
+    :maxdepth: 3
+    :hidden:
+
+    Introduction <self>
+    downloading.rst
+    primer/index.rst
+    library/index.rst
+    schemas/index.rst
+    programs/index.rst
+    glossary.rst
+    migration.rst
+    changes.rst
+
+DendroPy is a |Python|_ library for phylogenetic computing.
+It provides classes and functions for the simulation, processing, and manipulation of phylogenetic trees and character matrices, and supports the reading and writing of phylogenetic data in a range of formats, such as NEXUS, NEWICK, NeXML, Phylip, FASTA, etc.
+Application scripts for performing some useful phylogenetic operations, such as data conversion and tree posterior distribution summarization, are also distributed and installed as part of the libary.
+DendroPy can thus function as a stand-alone library for phylogenetics, a component of more complex multi-library phyloinformatic pipelines, or as a scripting "glue" that assembles and drives such pipelines.
+
+Prerequisites
+=============
+
+DendroPy is a pure-Python library with no dependencies, and runs under any version of Python 3 and Python 2.7. If you want to run DendroPy under Python version of less than 2.7, you should download the `DendroPy legacy release <https://github.com/jeetsukumaran/DendroPy/releases/tag/v3.12.1>`_.
+
+.. versionchanged:: 4.0.0
+   Python 2.4, 2.5, and 2.6 support removed.
+   Python 3.x support added.
+
+Installing
+==========
+
+DendroPy is fully easy-installable and can be installed using |pip|_::
+
+    $ sudo pip install -U dendropy
+
+Note: the "``sudo``" command should only be used if installing system-wide on a machine on which you have administrative privileges. Otherwise, you would use the "``--user``" flag for a local user install::
+
+    $ pip install --user -U dendropy
+
+If you do not have |pip|_ installed, you should *definitely* `install it <https://pip.pypa.io/en/latest/installing.html>`_ !
+Other ways of obtaining and installing DendroPy (e.g., by downloading the |dendropy_source_archive|_, or by cloning the `DendroPy Git repository <http://github.com/jeetsukumaran/DendroPy>`_), are discussed in detail in the ":doc:`/downloading`" section.
+
+Documentation
+==============
+
+    :doc:`Downloading and Installing DendroPy </downloading>`
+
+        The many ways to get DendroPy up and running on your system.
+
+    :doc:`DendroPy Primer </primer/index>`
+
+        A detailed primer on how to use the DendroPy library, with lots of annotated practical examples and code walk-throughs.
+
+    :doc:`DendroPy Library API Reference </library/index>`
+
+        The technical details of the modules, classes and methods of the DendroPy library. Almost all of this information is also readily available from the |Python|_ interpreter by invoking ``help()`` on an object or name.
+
+    :doc:`SumTrees User Manual </programs/sumtrees>`
+
+        How to use SumTrees, an application script bundled with DendroPy that faciliates the summarization of non-parameteric bootstrap or Bayesian posterior probability support for splits or clades on phylogenetic trees.
+
+    :doc:`Migration Guide </migration>`
+
+        DendroPy 4 is improved in *many* ways over DendroPy 3. However, some of the changes are significant enough to break code written for DendroPy 3. This reference provides an overview of these changes, as well as notes on how to fix, handle or otherwise work-around issues that might result from trying to run code written for DendroPy 3 under DendroPy 4.
+
+
+    :doc:`Change History </changes>`
+
+        A summary of major changes (new features, bug fixes, bug creations, etc.) of each release of DendroPy.
+
+Feature Requests
+================
+
+We are constantly expanding and improving the DendroPy library.
+And we are constantly looking for ideas on *how* to expand and improve DendroPy.
+If you have an idea that you would like to see implemented in DendroPy, or a need/requirement for some sort of functionality that DendroPy does not provide or does not provide easily, or, for that matter, a suggestion/idea for how something DendroPy already does that can be done better, *please* let us know by posting it as an issue on the |dendropy_issues|_ page.
+We take these very seriously, and look forward to ideas/suggestions/requests/critiques such as these, not only because they improve the library for the entire community of users, but also because they sometimes provide for interesting side-projects for us.
+
+Bug Reports and Other Issues
+============================
+
+Everyone has issues.
+And sometimes, not pointing them out is enabling rather than helping. So, if you encounter or think you have encountered a problem in the way DendroPy works, please report it on the |dendropy_issues|_ page.
+The DendroPy library has a really large suite of tests, but it is also a really, really, really, *really* large and complex library, and there are many nooks and crannies in which can scurry many crooks and nannies, so you will be doing us *and* the community a really generous favor by reporting bugs and problems.
+Even if you are not sure if you are actually dealing with a bug, please do not hesitate to report it as an issue: false positives are better than false negatives in this context.
+
+Having said that, *please* do take the trouble to write-up a decent bug report.
+It really is quite frustrating to receive vague and lackadaisical "reports" that are little more than stream-of-consciousness responses to stimuli from the monitor that were almost accidentally blurted into the keyboard (e.g., "doesn't work").
+At the very least, you should provide:
+
+    -   A brief explication of what occurred, describing the operation that you were trying to do, and the result (or non-result) that you observed that led you to think there is an error.
+    -   The environment in which this error occurred. This includes, at the very least, the operating system, as well as things such as the python version, DendroPy version, the installation locations of DendroPy and the Python libraries, etc. Apart from the operating system, you can retrieve all this information by running the following command::
+
+            $ python -m dendropy
+
+        The version of SumTrees that ships with DendroPy 4 onwards includes a special flag, "``--describe``" (i.e., you would type "``sumtrees.py --describe``"), that also provides this information.
+        Including the details of either of these commands in their entirety along with the operating system is not only useful, but essential.
+    -   We have gone to great lengths to write sensible and meaningful error messages, and that chunk of text that usually appears on the screen on error (i.e., the "stack trace") is packed with useful information, and should be included in its entirety should it appear.
+    -   We need to be able to reproduce the error on our side to be able to fix. Thus, providing a *self-contained* and *minimum* example of the error is crucial. This might involve a little bit of work on your side, to extract the essential bits of code from their context and to ensure that it can run, at least, up to the point where it does not due to the error. The "steps to reproduce" section of a bug report is so important and useful that anyone who makes sure to include a good one  [...]
+
+Help, Discussion, and Comments
+==============================
+
+The |dendropy_users|_ discussion group is available for general discussion on the usage of the DendroPy library.
+If you want more information on how to use a particular feature or carry out specific task or want to make some general comments on the DendroPy library, you should post a message to the |dendropy_users|_ discussion group.
+I follow and respond to messages on this group fairly regularly, so you should get a response within 24 hours.
+
+
+Of course, we accept all bug reports, bad or good, and I guess we would prefer to have a bad bug report than no report at all, so if all the above seem enough of a hassle to discourage you from posting an issue at all, feel free to go ahead and write it up any way you see fit.
+
+.. include:: citation.inc
+.. include:: license.inc
+.. include:: acknowledgements.inc
+
diff --git a/doc/source/library/basemodel.rst b/doc/source/library/basemodel.rst
new file mode 100644
index 0000000..801dd4a
--- /dev/null
+++ b/doc/source/library/basemodel.rst
@@ -0,0 +1,38 @@
+**************************************************************************************
+:mod:`dendropy.datamodel.basemodel`: Core Infrastructure for Phylogenetic Data Objects
+**************************************************************************************
+
+.. module:: dendropy.datamodel.basemodel
+
+.. toctree::
+    :maxdepth: 2
+
+The :class:`DataObject` Class
+=============================
+.. autoclass:: dendropy.datamodel.basemodel.DataObject
+    :members:
+    :inherited-members:
+
+The :class:`Annotable` Class
+=============================
+.. autoclass:: dendropy.datamodel.basemodel.Annotable
+    :members:
+    :inherited-members:
+
+The :class:`Annotation` Class
+=============================
+.. autoclass:: dendropy.datamodel.basemodel.Annotation
+    :members:
+    :inherited-members:
+
+The :class:`AnnotationSet` Class
+================================
+.. autoclass:: dendropy.datamodel.basemodel.AnnotationSet
+    :members:
+    :inherited-members:
+
+Supporting Classes
+==================
+.. autoclass:: dendropy.utility.bibtex.BibTexEntry
+    :members:
+    :inherited-members:
diff --git a/doc/source/library/birthdeath.rst b/doc/source/library/birthdeath.rst
new file mode 100644
index 0000000..8617bb1
--- /dev/null
+++ b/doc/source/library/birthdeath.rst
@@ -0,0 +1,8 @@
+***********************************************************************
+:mod:`dendropy.model.birthdeath`: The Birth-Death and Related Processes
+***********************************************************************
+
+.. automodule:: dendropy.model.birthdeath
+    :members:
+
+
diff --git a/doc/source/library/charmatrixmodel.rst b/doc/source/library/charmatrixmodel.rst
new file mode 100644
index 0000000..ea87027
--- /dev/null
+++ b/doc/source/library/charmatrixmodel.rst
@@ -0,0 +1,88 @@
+***************************************************************************
+:mod:`dendropy.datamodel.charmatrixmodel`: Character Sequences and Matrices
+***************************************************************************
+
+.. module:: dendropy.datamodel.charmatrixmodel
+
+.. toctree::
+    :maxdepth: 2
+
+Character Sequences
+===================
+
+.. autoclass:: dendropy.datamodel.charmatrixmodel.CharacterDataSequence
+    :members:
+
+Character Types
+===============
+
+.. autoclass:: dendropy.datamodel.charmatrixmodel.CharacterType
+    :members:
+
+Character Subsets
+=================
+
+.. autoclass:: dendropy.datamodel.charmatrixmodel.CharacterSubset
+    :members:
+
+
+Character Matrices
+==================
+
+The |CharacterMatrix| Class
+----------------------------------
+
+.. autoclass:: dendropy.datamodel.charmatrixmodel.CharacterMatrix
+    :members:
+    :inherited-members:
+
+|ContinuousCharacterMatrix|: Continuous Data
+---------------------------------------------------
+
+.. autoclass:: dendropy.datamodel.charmatrixmodel.ContinuousCharacterMatrix
+    :members:
+    :inherited-members:
+
+|DnaCharacterMatrix|: DNA Data
+-------------------------------------
+
+.. autoclass:: dendropy.datamodel.charmatrixmodel.DnaCharacterMatrix
+    :members:
+    :inherited-members:
+
+|RnaCharacterMatrix|: RNA Data
+-------------------------------------
+
+.. autoclass:: dendropy.datamodel.charmatrixmodel.RnaCharacterMatrix
+    :members:
+    :inherited-members:
+
+|ProteinCharacterMatrix|: Protein (Amino Acid) Data
+----------------------------------------------------------
+
+.. autoclass:: dendropy.datamodel.charmatrixmodel.ProteinCharacterMatrix
+    :members:
+    :inherited-members:
+
+|RestrictionSitesCharacterMatrix|: Restriction Sites Data
+----------------------------------------------------------------
+
+.. autoclass:: dendropy.datamodel.charmatrixmodel.RestrictionSitesCharacterMatrix
+    :members:
+    :inherited-members:
+
+|InfiniteSitesCharacterMatrix| : Infinite Sites Data
+-----------------------------------------------------------
+
+.. autoclass:: dendropy.datamodel.charmatrixmodel.InfiniteSitesCharacterMatrix
+    :members:
+    :inherited-members:
+
+|StandardCharacterMatrix|: "Standard" Data
+-------------------------------------------------
+
+.. autoclass:: dendropy.datamodel.charmatrixmodel.StandardCharacterMatrix
+    :members:
+    :inherited-members:
+
+
diff --git a/doc/source/library/charstatemodel.rst b/doc/source/library/charstatemodel.rst
new file mode 100644
index 0000000..177e501
--- /dev/null
+++ b/doc/source/library/charstatemodel.rst
@@ -0,0 +1,19 @@
+**********************************************************************************
+:mod:`dendropy.datamodel.charstatemodel`: Character State Identities and Alphabets
+**********************************************************************************
+
+.. module:: dendropy.datamodel.charstatemodel
+
+.. toctree::
+    :maxdepth: 2
+
+The :class:`StateAlphabet` Class
+================================
+.. autoclass:: dendropy.datamodel.charstatemodel.StateAlphabet
+    :members:
+
+The :class:`StateIdentity` Class
+================================
+.. autoclass:: dendropy.datamodel.charstatemodel.StateIdentity
+    :members:
+
diff --git a/doc/source/library/coalescent.rst b/doc/source/library/coalescent.rst
new file mode 100644
index 0000000..1686f8d
--- /dev/null
+++ b/doc/source/library/coalescent.rst
@@ -0,0 +1,9 @@
+************************************************
+:mod:`dendropy.model.coalescent`: The Coalescent
+************************************************
+
+.. automodule:: dendropy.model.coalescent
+    :members:
+
+
+
diff --git a/doc/source/library/continuous.rst b/doc/source/library/continuous.rst
new file mode 100644
index 0000000..68f07db
--- /dev/null
+++ b/doc/source/library/continuous.rst
@@ -0,0 +1,8 @@
+****************************************************************
+:mod:`dendropy.model.continuous`: Continuous Character Evolution
+****************************************************************
+
+.. automodule:: dendropy.model.continuous
+    :members:
+
+
diff --git a/doc/source/library/datasetmodel.rst b/doc/source/library/datasetmodel.rst
new file mode 100644
index 0000000..bb007c3
--- /dev/null
+++ b/doc/source/library/datasetmodel.rst
@@ -0,0 +1,7 @@
+************************************************************************************************************
+:mod:`dendropy.datamodel.datasetmodel`: Datasets -- Aggregate Collections of Taxon, Character, and Tree Data
+************************************************************************************************************
+
+.. autoclass:: dendropy.datamodel.datasetmodel.DataSet
+    :members:
+    :inherited-members:
diff --git a/doc/source/library/discrete.rst b/doc/source/library/discrete.rst
new file mode 100644
index 0000000..1dad4f4
--- /dev/null
+++ b/doc/source/library/discrete.rst
@@ -0,0 +1,9 @@
+************************************************************
+:mod:`dendropy.model.discrete`: Discrete Character Evolution
+************************************************************
+
+.. automodule:: dendropy.model.discrete
+    :members:
+
+
+
diff --git a/doc/source/library/index.rst b/doc/source/library/index.rst
new file mode 100644
index 0000000..7fc5581
--- /dev/null
+++ b/doc/source/library/index.rst
@@ -0,0 +1,59 @@
+##########################
+DendroPy Library Reference
+##########################
+
+
+.. contents::
+    :local:
+    :backlinks: none
+
+Data Model
+==========
+
+.. toctree::
+    :maxdepth: 2
+
+    basemodel.rst
+    taxonmodel.rst
+    treemodel.rst
+    treecollectionmodel.rst
+    charstatemodel.rst
+    charmatrixmodel.rst
+    datasetmodel.rst
+
+Process Models
+==============
+
+.. toctree::
+    :maxdepth: 2
+
+    birthdeath.rst
+    coalescent.rst
+    continuous.rst
+    discrete.rst
+    parsimony.rst
+    reconcile.rst
+    treeshape.rst
+
+Calculations, Statistics, and Metrics
+=====================================
+
+.. toctree::
+    :maxdepth: 2
+
+    treemeasure.rst
+    treecompare.rst
+    treescore.rst
+    popgenstat.rst
+    probability.rst
+    statistics.rst
+
+Simulations
+===========
+
+.. toctree::
+    :maxdepth: 2
+
+    treesim.rst
+    popgensim.rst
+
diff --git a/doc/source/library/parsimony.rst b/doc/source/library/parsimony.rst
new file mode 100644
index 0000000..7990cc1
--- /dev/null
+++ b/doc/source/library/parsimony.rst
@@ -0,0 +1,9 @@
+****************************************************
+:mod:`dendropy.model.parsimony`: The Parsimony Model
+****************************************************
+
+.. automodule:: dendropy.model.parsimony
+    :members:
+
+
+
diff --git a/doc/source/library/popgensim.rst b/doc/source/library/popgensim.rst
new file mode 100644
index 0000000..6e71d8b
--- /dev/null
+++ b/doc/source/library/popgensim.rst
@@ -0,0 +1,10 @@
+*******************************************************************
+:mod:`dendropy.simulate.popgensim`: Population Genetics Simulations
+*******************************************************************
+
+.. automodule:: dendropy.simulate.popgensim
+    :members:
+
+
+
+
diff --git a/doc/source/library/popgenstat.rst b/doc/source/library/popgenstat.rst
new file mode 100644
index 0000000..4481ebb
--- /dev/null
+++ b/doc/source/library/popgenstat.rst
@@ -0,0 +1,8 @@
+********************************************************************
+:mod:`dendropy.calculate.popgenstat`: Population Genetics Statistics
+********************************************************************
+
+.. automodule:: dendropy.calculate.popgenstat
+    :members:
+
+
diff --git a/doc/source/library/probability.rst b/doc/source/library/probability.rst
new file mode 100644
index 0000000..5ddc79a
--- /dev/null
+++ b/doc/source/library/probability.rst
@@ -0,0 +1,8 @@
+**********************************************************
+:mod:`dendropy.calculate.probability`: General Probability
+**********************************************************
+
+.. automodule:: dendropy.calculate.probability
+    :members:
+
+
diff --git a/doc/source/library/reconcile.rst b/doc/source/library/reconcile.rst
new file mode 100644
index 0000000..65b6ec4
--- /dev/null
+++ b/doc/source/library/reconcile.rst
@@ -0,0 +1,9 @@
+*********************************************************************************
+:mod:`dendropy.model.reconcile`: Tree-Fitting (Gene/Species, Parasite/Host, etc.)
+*********************************************************************************
+
+.. automodule:: dendropy.model.reconcile
+    :members:
+
+
+
diff --git a/doc/source/library/statistics.rst b/doc/source/library/statistics.rst
new file mode 100644
index 0000000..9c3d3d6
--- /dev/null
+++ b/doc/source/library/statistics.rst
@@ -0,0 +1,8 @@
+********************************************************
+:mod:`dendropy.calculate.statistics`: General Statistics
+********************************************************
+
+.. automodule:: dendropy.calculate.statistics
+    :members:
+
+
diff --git a/doc/source/library/taxonmodel.rst b/doc/source/library/taxonmodel.rst
new file mode 100644
index 0000000..244a9be
--- /dev/null
+++ b/doc/source/library/taxonmodel.rst
@@ -0,0 +1,23 @@
+**********************************************************************************
+:mod:`dendropy.datamodel.taxonmodel`: Taxonomic Namespace Reference and Management
+**********************************************************************************
+
+.. module:: dendropy.datamodel.taxonmodel
+
+.. toctree::
+    :maxdepth: 2
+
+The :class:`TaxonNamespace` Class
+=================================
+.. autoclass:: dendropy.datamodel.taxonmodel.TaxonNamespace
+    :members:
+
+The :class:`Taxon` Class
+========================
+.. autoclass:: dendropy.datamodel.taxonmodel.Taxon
+    :members:
+
+The :class:`TaxonNamespaceAssociated` Class
+===========================================
+.. autoclass:: dendropy.datamodel.taxonmodel.TaxonNamespaceAssociated
+    :members:
diff --git a/doc/source/library/treecollectionmodel.rst b/doc/source/library/treecollectionmodel.rst
new file mode 100644
index 0000000..faef259
--- /dev/null
+++ b/doc/source/library/treecollectionmodel.rst
@@ -0,0 +1,54 @@
+*******************************************************************
+:mod:`dendropy.datamodel.treecollectionmodel`: Collections of Trees
+*******************************************************************
+
+.. module:: dendropy.datamodel.treecollectionmodel
+
+.. toctree::
+    :maxdepth: 2
+
+The |TreeList| Class
+====================
+.. autoclass:: dendropy.datamodel.treecollectionmodel.TreeList
+    :members:
+    :inherited-members:
+
+
+.. method:: TreeList.read(\*\*kwargs)
+
+.. method:: TreeList.put(\*\*kwargs)
+
+    Write out collection of trees to file.
+
+    :Mandatory Destimation-Specification Keyword Arguments (one and exactly one of the following required):
+
+        - **file** (*file*) -- File or file-like object opened for writing.
+        - **path** (*str*) -- Path to file to which to write.
+
+    :Mandatory Schema-Specification Keyword Argument:
+
+        - **schema** (*str*) -- Identifier of format of data given by the "``file``", "``path``", "``data``", or "``url``" argument specified above: ":doc:`newick </schemas/newick>`", ":doc:`nexus </schemas/nexus>`", or ":doc:`nexml </schemas/nexml>`".
+
+    :Optional Schema-Specific Keyword Arguments:
+
+        -  These provide control over how the data is formatted, and supported argument names and values depend on the schema as specified by the value passed as the "``schema``" argument:
+            -   :ref:`"newick" (Newick) <schema_specific_keyword_arguments_writing_newick>`
+            -   :ref:`"nexus" (Nexus) <schema_specific_keyword_arguments_writing_nexus>`
+            -   :ref:`"nexml" (NeXML) <schema_specific_keyword_arguments_writing_nexml>`
+
+
+The |TreeArray| Class
+=====================
+.. autoclass:: dendropy.datamodel.treecollectionmodel.TreeArray
+    :members:
+
+The |SplitDistribution| Class
+=============================
+.. autoclass:: dendropy.datamodel.treecollectionmodel.SplitDistribution
+    :members:
+
+The |SplitDistributionSummarizer| Class
+=======================================
+.. autoclass:: dendropy.datamodel.treecollectionmodel.SplitDistributionSummarizer
+    :members:
+
diff --git a/doc/source/library/treecompare.rst b/doc/source/library/treecompare.rst
new file mode 100644
index 0000000..929206d
--- /dev/null
+++ b/doc/source/library/treecompare.rst
@@ -0,0 +1,8 @@
+*****************************************************************************
+:mod:`dendropy.calculate.treecompare`: Distances and Comparison Between Trees
+*****************************************************************************
+
+.. automodule:: dendropy.calculate.treecompare
+    :members:
+
+
diff --git a/doc/source/library/treemeasure.rst b/doc/source/library/treemeasure.rst
new file mode 100644
index 0000000..2e68e1d
--- /dev/null
+++ b/doc/source/library/treemeasure.rst
@@ -0,0 +1,7 @@
+************************************************************************
+:mod:`dendropy.calculate.treemeasure`: Unary Tree Statistics and Metrics
+************************************************************************
+
+.. automodule:: dendropy.calculate.treemeasure
+    :members:
+
diff --git a/doc/source/library/treemodel.rst b/doc/source/library/treemodel.rst
new file mode 100644
index 0000000..d118add
--- /dev/null
+++ b/doc/source/library/treemodel.rst
@@ -0,0 +1,33 @@
+******************************************
+:mod:`dendropy.datamodel.treemodel`: Trees
+******************************************
+
+.. module:: dendropy.datamodel.treemodel
+
+.. toctree::
+    :maxdepth: 3
+
+The :class:`Tree` Class
+=======================
+.. autoclass:: dendropy.datamodel.treemodel.Tree
+    :members:
+    :inherited-members:
+
+The :class:`Node` Class
+=======================
+.. autoclass:: dendropy.datamodel.treemodel.Node
+    :members:
+    :inherited-members:
+
+The :class:`Edge` Class
+=======================
+.. autoclass:: dendropy.datamodel.treemodel.Edge
+    :members:
+    :inherited-members:
+
+The :class:`Bipartition` Class
+==============================
+.. autoclass:: dendropy.datamodel.treemodel.Bipartition
+    :members:
+    :inherited-members:
+
diff --git a/doc/source/library/treescore.rst b/doc/source/library/treescore.rst
new file mode 100644
index 0000000..6fed2cc
--- /dev/null
+++ b/doc/source/library/treescore.rst
@@ -0,0 +1,8 @@
+*************************************************************************************
+:mod:`dendropy.calculate.treescore`: Scoring Trees with Reference to Data and a Model
+*************************************************************************************
+
+.. automodule:: dendropy.calculate.treescore
+    :members:
+
+
diff --git a/doc/source/library/treeshape.rst b/doc/source/library/treeshape.rst
new file mode 100644
index 0000000..38bdad2
--- /dev/null
+++ b/doc/source/library/treeshape.rst
@@ -0,0 +1,8 @@
+*****************************************************
+:mod:`dendropy.model.treeshape`: Models of Tree Shape
+*****************************************************
+
+.. automodule:: dendropy.model.treeshape
+    :members:
+
+
diff --git a/doc/source/library/treesim.rst b/doc/source/library/treesim.rst
new file mode 100644
index 0000000..3be9db8
--- /dev/null
+++ b/doc/source/library/treesim.rst
@@ -0,0 +1,10 @@
+**********************************************************************************************************
+:mod:`dendropy.simulate.treesim`: Unified Namespace Aggregating Functions and Classes for Tree Simulations
+**********************************************************************************************************
+
+.. automodule:: dendropy.simulate.treesim
+    :members:
+    :inherited-members:
+
+
+
diff --git a/doc/source/license.inc b/doc/source/license.inc
new file mode 100644
index 0000000..545e0a1
--- /dev/null
+++ b/doc/source/license.inc
@@ -0,0 +1,61 @@
+Copyright, License and Warranty
+===============================
+
+.. container:: copyrightblock
+
+    The DendroPy project, including all source code, documentation, images, and
+    other supporting files and media, either as provided online or otherwise
+    distributed, is:
+
+        .. container:: licenseblock
+
+            |dendropy_copyright|
+
+    1.  The DendroPy project source code and associated data files are licensed for
+        redistribution and use in source and binary forms, with or without
+        modification, are permitted provided that the following conditions
+        are met:
+
+            *   Redistributions of source code must retain the above copyright
+                notice, this list of conditions and the following disclaimer.
+
+            *   Redistributions in binary form must reproduce the above copyright
+                notice, this list of conditions and the following disclaimer in the
+                documentation and/or other materials provided with the distribution.
+
+            *   The names of its contributors may not be used to endorse or promote
+                products derived from this software without specific prior written
+                permission.
+
+        THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+        AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+        IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+        ARE DISCLAIMED. IN NO EVENT SHALL JEET SUKUMARAN OR MARK T. HOLDER BE
+        LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+        CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+        SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+        BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+        WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+        OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+        IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+    2.  All DendroPy documentation, (non-logo) images and other media, as
+        provided online or otherwise distributed, can be used in accordance with the
+        `Attribution-NonCommercial-ShareAlike 4.0 International License
+        <http://creativecommons.org/licenses/by-nc-sa/4.0/>`_ , which allows anyone
+        to copy, modify and redistribute modifications of all or part of the DendroPy
+        documentation as long as:
+
+            * The license is included with all copies or redistributions.
+            * the DendroPy documentation is attributed as the originating document.
+            * You must give appropriate credit, provide a link to the license, and
+              indicate if changes were made. You may do so in any reasonable
+              manner, but not in any way that suggests the licensor endorses you or
+              your use.
+            * You may not use the material for commercial purposes.
+            * If you remix, transform, or build upon the material, you must
+              distribute your contributions under the same license as the original.
+
+    3.  The `DendroPy logo <_static/dendropy.png>`_  can be used and reproduced
+        freely only to identify, advertise, or otherwise denote association
+        or usage of the DendroPy library.
diff --git a/doc/source/migration.rst b/doc/source/migration.rst
new file mode 100644
index 0000000..76092cc
--- /dev/null
+++ b/doc/source/migration.rst
@@ -0,0 +1,269 @@
+#######################################
+DendroPy 4 Changes and Migration Primer
+#######################################
+
+Introduction
+============
+
+* Updated for full (and exclusive) Python 3.x compatibility.
+
+* Faster, better, stronger! Core serialization/deserialization infrastructure
+  rewritten from the ground up, with *many* optimizations for speed and
+  reliability.
+
+Python Version Compatibility
+============================
+
+* Compatibility: Python 3 is fully supported. The only version of Python 2
+  supported is Python 2.7.
+
+    * Python 2: Python 2.7
+
+    * Python 3: Python 3.1, 3.2, 3.3, 3.4
+
+Library-Wide Changes
+====================
+
+Public Module Reorganization
+----------------------------
+
+A number of modules have been renamed, moved, or split into multiple modules.
+Calls to the old module should continue to work, albeit with warnings exhorting
+that you update to the latest configuration.
+
+    * ``dendropy.treecalc`` has been split into three submodules depending on
+      whether the statistic or value being calculated is on a single tree, a
+      single tree and a dataset, or two trees:
+
+        *   :mod:`dendropy.calculate.treemeasure`
+            For calculation of statistics, metrics, and values on a single tree.
+        *   :mod:`dendropy.calculate.treecompare`
+            For calculation of statistics, metrics, and values of two trees
+            (e.g., Robinson-Fould's distances).
+        *   :mod:`dendropy.calculate.treescore`
+            For calculation of statistics, metrics, and values of a tree with
+            reference to a dataset under some criterion.
+    * The functionality provided ``dendropy.treesplit`` has been largely subsumed by the new |Bipartition| class.
+    * The functionality provided by ``dendropy.treesum`` has been largely subsumed by the new |TreeArray| class, a high-performance class for efficiently managing and carrying out operations on large collections of large trees.
+    * ``dendropy.reconcile`` has been moved to :mod:`dendropy.model.reconcile`.
+    * ``dendropy.coalescent`` has been moved to :mod:`dendropy.model.coalescent`.
+    * ``dendropy.popgenstat`` has been moved to :mod:`dendropy.calculate.popgenstat`.
+    * ``dendropy.treesim`` has been moved to :mod:`dendropy.simulate.treesim`.
+    * ``dendropy.popgensim`` has been moved to :mod:`dendropy.simulate.popgensim`.
+
+
+Behind-the-Scenes Module Reorganization
+---------------------------------------
+
+* In constrast to the above, the following changes *should* be opaque to most
+  normal usage and client code. Most of the names (classes/methods/variables)
+  in these modules were imported into the '``dendropy``' namespace, and this is
+  how all public code should be accessing them, *or* they were never exposed
+  (or meant to be exposed) for public usage in the first place. A list of
+  module changes:
+
+        +----------------------------------+-----------------------------------------------+
+        | DendroPy 3                       | DendroPy 4                                    |
+        +==================================+===============================================+
+        | :mod:`dendropy.dataobject.base`  | :mod:`dendropy.datamodel.basemodel`           |
+        +----------------------------------+-----------------------------------------------+
+        | :mod:`dendropy.dataobject.taxon` | :mod:`dendropy.datamodel.taxonmodel`          |
+        +----------------------------------+-----------------------------------------------+
+        | :mod:`dendropy.dataobject.tree`  | :mod:`dendropy.datamodel.treemodel`           |
+        |                                  | :mod:`dendropy.datamodel.treecollectionmodel` |
+        +----------------------------------+-----------------------------------------------+
+        | :mod:`dendropy.dataobject.char`  | :mod:`dendropy.datamodel.charstatemodel`,     |
+        |                                  | :mod:`dendropy.datamodel.charmatrixmodel`     |
+        +----------------------------------+-----------------------------------------------+
+
+
+Unique Object Identifier ("``oid``") Attributes Removed
+-------------------------------------------------------
+
+* The entire ``oid`` system ("object identifier"), i.e., the unique id assigned
+  to every data object, has been removed. This was an implementation artifact
+  from NEXML parsing that greatly slowed down a number of operations without
+  any benefit or utility for most normal operations.
+
+:class:`TaxonSet` is now :class:`TaxonNamespace`
+================================================
+
+* The ``dendropy.TaxonSet`` class has been renamed |TaxonNamespace|,
+  (and the corresponding ``taxon_set`` attribute of phylogenetic data objects
+  that reference a taxonomic context has been renamed ``taxon_namespace``).
+
+* The |TaxonNamespace| class replaces the :class:`TaxonSet` class as the
+  manager for the :class:`Taxon` objects.
+
+* The API is largely similar with the following differences:
+
+    * Calls to the
+      :meth:`~dendropy.datamodel.taxonmodel.TaxonNamespace.__getitem__` and
+      :meth:`~dendropy.datamodel.taxonmodel.TaxonNamespace.__delitem__` methods
+      (e.g. ``TaxonNamespace[x]``) now only accept integer values as arguments
+      (representing indexes into the list of :class:`Taxon` objects in the
+      internal array).
+
+    * :meth:`TaxonSet.has_taxon()` and :meth:`TaxonSet.has_taxa()` have been
+        replaced by :meth:`TaxonNamespace.has_taxon_label()` and
+        :meth:`TaxonNamespace.has_taxa_labels()` respectively.
+
+    * Various new methods for accessing and managing the collection of
+        :class:`Taxon` objects (e.g., :meth:`~dendropy.datamodel.taxonmodel.TaxonNamespace.findall`, :meth:`~dendropy.datamodel.taxonmodel.TaxonNamespace.remove_taxon`, :meth:`~dendropy.datamodel.taxonmodel.TaxonNamespace.remove_taxon_label`,
+        :meth:`~dendropy.datamodel.taxonmodel.TaxonNamespace.discard_taxon_label`, :meth:`~dendropy.datamodel.taxonmodel.TaxonNamespace.__delitem__`, etc.)
+
+    * Numerous look-up methods took '``case_insensitive``' as an argument that
+      determined whether the look-up was case sensitive or not (when
+      retrieving, for example, a :class:`Taxon` object corresponding to a
+      particular label), which, if not specified, default to `False`, i.e. a
+      non-caseless or a case-sensitive matching criteria. In all cases, this
+      has been changed to to '``case_sensitive``' with a default of `True`. That
+      is, searches by default are still case-sensitive, but now you will have
+      to specify '``case_sensitive=False``' instead of '``case_insensitive=True``'
+      to perform a case-insensitive search. This change was for consistency
+      with the rest of the library.
+
+* In most cases, a simple global search-and-replace of "TaxonSet" with
+  "TaxonNamespace" and "``taxon_set``" with "``taxon_namespace``" should be
+  sufficient to bring existing code into line with DendroPy 4.
+
+* For legacy support, a class called :class:`TaxonSet` exists. This derives with no
+  modifications from :class:`TaxonNamespace`. Instantiating objects of this class
+  will result in warnings being emitted. As long as usage of :class:`TaxonSet` does
+  conforms to the above API change notes, old or legacy code should continue
+  to work unchanged (albeit, with some warning noise). This support is
+  temporary and will be removed in upcoming releases: code should update to
+  using :class:`TaxonNamespace` as soon as expedient.
+
+* For legacy support, "``taxon_set``" continues to be accepted and processed as
+  an attribute name and keyword argument synonymous with "``taxon_namespace``".
+  Usage of this will result in warnings being emitted, but code should
+  continue to function as expected. This support is temporary and will be
+  removed in upcoming releases: code should update to using
+  "``taxon_namespace``" as soon as expedient.
+
+The :class:`Node` Class
+=======================
+
+* Constructor now only accepts keyword arguments (and ``oid`` is *not* one of them!).
+
+* :meth:`~dendropy.datamodel.treemodel.Node.add_child()` no longer accepts ``pos`` as an argument to indicate
+  position in which a child should be inserted. Use :meth:`~dendropy.datamodel.treemodel.Node.insert_child()`
+  which takes a position specified by ``index`` and a node specified by ``node``
+  for this functionality instead.
+
+The :class:`Edge` Class
+=======================
+
+* Constructor now only accepts keyword arguments (and ``oid`` is *not* one of them!).
+
+* Because ``tail_node`` is no longer an independent attribute but a dynamic
+  property, bound to :attr:`Node._parent_node` attribute of the ``head_node``
+  (see below), the :class:`Edge` constructor does *not* accept ``tail_node`` as
+  an argument.
+
+* The ``tail_node`` of an :class:`Edge` object is now a dynamic property,
+  referencing the :attr:`Node._parent_node` attribute of the
+  :attr:`Edge._head_node` of the :class:`Edge` object. So, now updating
+  :attr:`Edge._tail_node` of an :class:`Edge` object will set the
+  :attr:`Node._parent_node` of its :attr:`Edge._head_node` to the new value,
+  and vice versa.  This avoids the need for independent book-keeping logic to
+  ensure that :attr:`Node._parent_node` and :attr:`Edge._tail_node` are always
+  synchronized to reference the same :class:`Node` object and all the potential
+  errors this might cause.
+
+The :class:`Tree` Class
+=======================
+
+* Constructor no longer supports they ``stream`` keyword argument to construct
+  the new :class:`~dendropy.datamodel.treemodel.Tree` object from a data source. Use the factory class
+  method: :meth:`~dendropy.datamodel.treemodel.Tree.get_from_stream()` instead.
+
+* :meth:`~dendropy.datamodel.treemodel.Tree.nodes()` : sorting option removed; use :func:`~dendropy.datamodel.treemodel.sorted(tree.nodes())` instead.
+
+* :meth:`~dendropy.datamodel.treemodel.Tree.node_set()` : removed; use :func:`~dendropy.datamodel.treemodel.set(tree.nodes())` instead.
+
+* :meth:`~dendropy.datamodel.treemodel.Tree.edge_set()` : removed; use :func:`~dendropy.datamodel.treemodel.set(tree.edges())` instead.
+
+* For consistency with :meth:`~dendropy.datamodel.treemodel.Tree.preorder_node_iter()`,
+  :meth:`~dendropy.datamodel.treemodel.Tree.postorder_node_iter()`, a number of iteration methods have been renamed.
+
+    +----------------------------------+-------------------------------------------------------------------+
+    | DendroPy 3                       | DendroPy 4                                                        |
+    +==================================+===================================================================+
+    | ``Tree.level_order_node_iter()`` | :meth:`~dendropy.datamodel.treemodel.Tree.levelorder_node_iter()` |
+    +----------------------------------+-------------------------------------------------------------------+
+    | ``Tree.level_order_edge_iter()`` | :meth:`~dendropy.datamodel.treemodel.Tree.levelorder_edge_iter()` |
+    +----------------------------------+-------------------------------------------------------------------+
+    | ``Node.level_order_iter()``      | :meth:`~dendropy.datamodel.treemodel.Node.levelorder_iter()`      |
+    +----------------------------------+-------------------------------------------------------------------+
+    | ``Tree.age_order_node_iter()``   | :meth:`~dendropy.datamodel.treemodel.Tree.ageorder_node_iter()`   |
+    +----------------------------------+-------------------------------------------------------------------+
+    | ``Node.age_order_iter()``        | :meth:`~dendropy.datamodel.treemodel.Node.ageorder_iter()`        |
+    +----------------------------------+-------------------------------------------------------------------+
+    | ``Tree.leaf_iter()``             | :meth:`~dendropy.datamodel.treemodel.Tree.leaf_node_iter()`       |
+    +----------------------------------+-------------------------------------------------------------------+
+
+  The old names are still supported for now (with warnings being emitted),
+  but new code should start using the newer names.  In additon, support for
+  in-order or infix tree traversal has been added:
+  :meth:`~dendropy.datamodel.treemodel.Tree.inorder_node_iter`, :meth:`~dendropy.datamodel.treemodel.Tree.inorder_edge_iter()`.
+
+* Instead of ``tree_source_iter`` and ``multi_tree_source_iter``, use
+  :meth:`~dendropy.datamodel.treemodel.Tree.yield_from_files`
+
+NEWICK-format Reading
+=====================
+
+* The ``suppress_external_taxon_labels`` and ``suppress_external_node_labels`` keyword
+  arguments have been replaced by ``suppress_leaf_taxon_labels`` and
+  ``suppress_leaf_node_labels``, respectively. This is for consistency with the
+  rest of the library (including writing in NEWICK-format), which uses the term
+  "leaf" rather than "external".
+
+* The various boolean rooting directive switches (``as_rooted``,
+  ``default_as_rooted``, etc.) have been replaced by a single argument:
+  ``rooting``. This can take on one of the following (string) values:
+
+    * rooting="default-unrooted"
+        Interpret trees following rooting token ("``[&R]``" for rooted,
+        "``[&U]``" for unrooted) if present; otherwise, intrepret trees as
+        unrooted.
+    * rooting"default-rooted"
+        Interpret trees following rooting token ("``[&R]``" for rooted,
+        "``[&U]``" for unrooted) if present; otherwise, intrepret trees as
+        rooted.
+    * rooting="force-unrooted"
+        Unconditionally interpret all trees as unrooted.
+    * rooting="force-rooted"
+        Unconditionally interpret all trees as rooted.
+
+  The value of the "``rooting``" argument defaults to "default-unrooted", i.e.,
+  all trees are assumed to be unrooted unless a rooting token is present that
+  explicitly specifies the rooting state.
+
+NEWICK-format Writing
+=====================
+
+* Previously, if ``annotations_as_nhx`` was `True`, metadata annotations would
+  be written out even if ``suppress_annotations`` was `True`. Now,
+  ``suppress_annotations`` must be `True` for annotations to be written out,
+  even if ``annotations_as_nhx`` is `True`.
+
+The :class:`DataSet` Class
+==========================
+
+* Constructor no longer supports they ``stream`` keyword argument to construct
+  the new :class:`DataSet` object from a data source. Use the factory class
+  method: :meth:`DataSet.get_from_stream()` instead.
+
+* Constructor only accepts one unnamed (positional) argument: either a
+  :class:`DataSet` instance to be cloned, or an iterable of
+  :class:`TaxonNamespace`, :class:`TreeList`, or
+  :class:`CharacterMatrix`-derived instances to be composed (added) into the
+  new :class:`DataSet` instance.
+
+* :class:`TaxonNamespace` no longer managed.
+
+
+
diff --git a/doc/source/primer/bipartitions.rst b/doc/source/primer/bipartitions.rst
new file mode 100644
index 0000000..841f33a
--- /dev/null
+++ b/doc/source/primer/bipartitions.rst
@@ -0,0 +1,404 @@
+************
+Bipartitions
+************
+
+Many tree statistics and operations in DendroPy use the *bipartition encoding*
+of a :class:`Tree` instance in the background, including, for example:
+
+    -   tree statistics and metrics
+    -   tree comparisons
+    -   tree scoring
+
+By default, the DendroPy functions assume that bipartitions are *not* encoded,
+or are not up-to-date with respect to the current tree structure, resulting in
+their recalculation *every* time. This is computationally inefficient, and you
+want to avoid it if, indeed, the bipartition encoding of a tree is current. You
+can control whether or not these service functions recalculate the bipartition
+encoding by passing in the argument ``is_bipartitions_updated=True`` to
+suppress the recalculation or ``is_bipartitions_updated=False`` to force it.
+
+If you are doing multiple operations that require a bipartition encoding, you
+should call :class:`Tree.encode_bipartitions()` *once* for each tree, and,
+then, as long as the trees are *not* *modified* since the encoding, specify the
+``is_bipartitions_updated=True`` argument to each of the functions that use it
+to ensure that the bipartitions are not recalculated each time.
+
+If, on the other hand, you modify a tree structure in any way, e.g., rerooting,
+pruning, add/removing nodes or subtrees, you should update the bipartition
+encoding of a tree yourself by calling :class:`Tree.encode_bipartitions()`, or
+make sure to specify ``is_bipartitions_updated=False`` to the *first* function
+that you call following the tree modification.
+
+Modeling Bipartitions
+=====================
+
+A Bipartition is a Partitioning of Taxa Corresponding to an Edge of a Tree
+--------------------------------------------------------------------------
+
+A bipartition is the division or sorting of the leaves/tips of a tree into two
+mutually-exclusive and collectively-exhaustive subsets (i.e., a *partition*, in
+the set theory sense, of the leaves of the tree into exactly two non-empty
+subsets; hence the term, "*bi*-partition"). Every edge on a tree corresponds to
+a bipartition in the sense that if were were to split or bisect a tree at a
+particular edge, the leaf sets of each of the two new trees constitute the a
+bipartition of the leaf set of the original tree. In the context of
+evolutionary trees like a phylogeny, the leaves typically are associated with
+operational taxonomic unit concepts, or, for short, taxa. So, just as we view a
+tree as a schematic representation of the relationships of taxa, we can see
+bipartitions as a representation of a clustering of taxa.
+
+For example, given a tree:
+
+    ((a,(b,c)),(d,(e,f)));
+
+the edge subtending the leaf node with taxon "d" corresponds to the bipartition
+that splits "d" from the rest of the taxa. Similarly, the edge subtending the
+most-recent common ancestor (MRCA) node of taxa "d", "e", and "f" corresponds
+to the bipartition that splits "d", "e", and "f" from the rest of the taxa,
+"a", "b", and "c".
+
+A Bipartition Can Be Described by a *Bitmask*
+---------------------------------------------
+
+If we were to index the taxa of the tree, with the first taxon getting index 1,
+the second index 2, the third index 3, etc. and so on until index $n$, we can
+represent any possible split as sequence of symbols, such as:
+
+    abbabbaa
+
+where the symbol indicates membership in one arbitrarily-labeled group (e.g.,
+"a") or the other (e.g., "b") of a particular taxon, based on how we relate the
+taxon indexes to the position of the symbols in sequence.
+
+If we were to use a left-to-right order, such that the first element
+corresponded to the first taxon, the second to the second taxon, and so one,
+the above sequence would describe the a partition of the taxa {1,2,...,8} into
+the sets {1,4,7,8} and {2,3,5,6}. However, in DendroPy, we use a right-to-left
+order (for reasons explained below), such that the right-most element
+corresponds to the taxon with index 1, the next right-most element corresponds
+to the taxon with index 2 and so on, so the sequence above represents a
+partition of the taxa {1,2,...,8} into the sets {1,2,5,8} and {3,4,6,7}.
+
+Let us say that we had a set of 8 taxa {A,B,...,H}:
+
+    A, B, C, D, E, F, G, H
+
+which we assign indexes {1,2,...,8} according to the following scheme:
+
+    +-------+---+---+---+---+---+---+---+---+
+    | Taxon | A | B | C | D | E | F | G | H |
+    +-------+---+---+---+---+---+---+---+---+
+    | Index | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 |
+    +-------+---+---+---+---+---+---+---+---+
+
+Then we can describe a bipartition that divides the taxa into two groups
+{A,B,E,H} and {C,D,F,G}, using right-to-left ordering and symbols "0" and "1"
+(instead of "a" and "b") as:
+
+    +-------------+---+---+---+---+---+---+---+---+
+    | Taxon Index | 8 | 7 | 6 | 5 | 4 | 3 | 2 | 1 |
+    +-------------+---+---+---+---+---+---+---+---+
+    | Group       | 0 | 1 | 1 | 0 | 1 | 1 | 0 | 0 |
+    +-------------+---+---+---+---+---+---+---+---+
+
+We can succintly and usefully represent the bipartition above with an integer
+given by interpreting the sequence of 0's and 1's as bits. Interpreting the
+sequence above, "01101100", as a binary number or bitmask means that this
+bipartition can be represented as the decimal integer "108".
+
+This, in essence, is how bipartitions are represented in DendroPy: as integers
+that are interpreted as *bitmasks* (also known as bit arrays, bit vectors, or
+bit fields, though exact application of terminology varies depending on whether
+or not primary operations are bitwise or dereferencing through offset indexes
+or field names, etc.), where the 0's and 1's assign taxa to different subsets
+of the bipartition.
+
+As an example, consider the following tree::
+
+    [&R] (A,(B,(C,(D,E))));
+
+This would be encoded as::
+
+    /----------------------------- 00001 (A)
+    11111
+    |      /---------------------- 00010 (B)
+    \------11110
+        |       /-------------- 00100 (C)
+        \-------11100
+                |      /------- 01000 (D)
+                \------11000
+                        \------- 10000 (E)
+
+The leaves are assigned bitmasks based on the indexes of the taxa, while the
+internal nodes are given by a `bitwise OR <http://en.wikipedia.org/wiki/Bitwise_operation#OR>`_-ing of the bitmasks of their children.
+
+Modeling Bipartitions Using Leafset Bitmasks and Split Bitmasks
+---------------------------------------------------------------
+
+In DendroPy, bipartitions are modeled using bitmasks as discussed above, i.e.,
+integers that, when represented as a bitarray or bitstring, specify the
+assignment of taxa into one of two groups, based on whether or not the bit
+corresponding to the taxon index is set or not.
+
+In fact, each bipartition is actually modeled by *two* types of bitmasks: a
+*leafset bitmask* and a *split bitmask*:
+
+    - A leafset bitmask is a bit array in which the presence of a taxon in the
+      leaves descending from the edge associated with the bipartition is
+      represented by a set bit ("1"), while its absence is represented by an
+      unset bit ("0"). The taxa are mapped to bit positions using a
+      least-significant bit mapping scheme, in which the first taxon is represented by
+      the least significant bit, the second taxon is represented by the next
+      most significant bit, and so on.
+
+    - A split bitmask is a bit array which divides or partitions taxa by assign
+      each taxon to one of two arbitrarily-labeled groups, "0" or "1",
+      depending on whether or not a bit is set or not in the position
+      corresponding the taxon index under a least-signficant bit mapping scheme
+      as described above.
+
+        - For bipartitions of rooted trees, the split bitmask is the same value
+          as the leafset bitmask.
+
+        - For bipartitions of unrooted trees, the split bitmask is the same
+          value as the leafset bitmask *if and only if* the least-signficant
+          bit of the leafset bitmask is 0 (i.e., the first taxon is assigned to
+          group "0"), or the *complement* of the leafset bitmask if this is the
+          case. In other words, with unrooted trees we constrain the split
+          bitmasks such that the first taxon and all other taxa grouped
+          together with it are always placed in group "0".
+
+Why this complication?
+
+Consider the following unrooted tree::
+
+    A    C    D
+     \   |   /
+      +--+--+
+     /       \
+    B         E
+
+This could be represented by either of the following NEWICK strings::
+
+    [&U] ((A,B),(C,(D,E)));
+    [&U] (((A,B),C),(D,E));
+
+Both the above topologies, while distinct if interpreted as rooted, represent
+*identical* unrooted toplogies.
+
+When the bipartitions are encoded as leafset bitmasks, we get the following if
+the first tree statement is parsed by DendroPy::
+
+                        /--------- 00001 (A)
+    /-------------------00011
+    |                   \--------- 00010 (B)
+    11111
+    |         /------------------- 00100 (C)
+    \---------11100
+            |         /--------- 01000 (D)
+            \---------11000
+                        \--------- 10000 (E)
+
+and the following if the second tree statement is parsed by DendroPy::
+
+                        /--------- 00001 (A)
+            /---------00011
+    /---------00111     \--------- 00010 (B)
+    |         |
+    11111     \------------------- 00100 (C)
+    |
+    |                   /--------- 01000 (D)
+    \-------------------11000
+                        \--------- 10000 (E)
+
+Note that the leafset bitmask "11100" in the first tree is absent in the second
+tree, while conversely, the leafset bitmask "00111" in the second tree is
+absent in the first tree.
+
+This difference is due purely to the placement of the root to one side or the
+other of taxon 'C'. In rooted trees, this root is a real root, and this
+difference in bipartitions as given by the leafset bitmasks is also real. In
+unrooted trees, this "root" is actually an artifact of the tree structure, and
+the placement is an artifact of the NEWICK string representation. In unrooted
+trees, then the difference in bipartitions as given by the leafset bitmasks is,
+thus, wholly artifactual. This means that it would be impossible to robustly
+and reliably compare, relate, and perform any operations on bipartitions coded
+using leafset bitmasks on unrooted trees: what is effectively the equivalent
+bipartition of taxa maybe represented either by placing, the first taxon and all
+the other taxa in the same group as it in group "0" in one representation, or
+group "1" in another one, and which representation is used is arbitrary and
+random and unpredictable.
+
+Thus, to allow for robustly establishing equivalence of bipartitions across
+different representations and instantiations of different unrooted trees, we
+*normalize* the bit array representation of bipartitions in unrooted trees to
+always ensure that the first taxon is assigned to group "0", *whether* *or*
+*not* *this* *taxon* *is* *actually* *a* *descendent* *or* *a* *member* *of*
+*the* *leafset* *of* *the* *edge*. [We also collapse the basal bifurcation of
+unrooted trees to avoid redundant representation of artifactual bipartitions.]
+
+As the first taxon corresponds to the least-significant bit in the DendroPy
+scheme, this normalization is known as the least-significant bit 0 or "LSB-0"
+normalization scheme. The choice of 0 as opposed to 1 is arbitrary, but the
+reason is so ensure that we can have consistent comparisons of groups across
+trees of different rotations (and "pseudo-rootings" created by the constraints
+of tree representation in, e.g., the NEWICK format) by enforcing the convention
+that group "0" will always be the group that includes the first taxon (i.e.,
+the taxon with index 1, corresponding to the position of the least-significant
+or right-most bit).
+
+We refer to this normalized version of the leafset bitmask as a *split
+bitmask*. For consistency, bipartitions of rooted trees are also assigned split
+bitmasks, but here these are simply the unmodifed leafset bitmasks. For both
+unrooted and rooted trees we maintain the leafset bitmask representation in
+parallel for each bipartition, as this has useful information is lost when
+normalized, e.g., establishing whether or not a particular subtree or taxon can
+be found within bipartition.
+
+Thus, regardless of whether the tree is rooted or unrooted, each bipartition on
+is modeled by *two* bitmasks: a split bitmask and leafset bitmask. For rooted
+trees, these are identical in value. For unrooted trees, the split bitmask is
+the leafset bitmask normalized to constrain the least significant bit to be 0.
+
+A split bitmask is used to establish *identity* across different trees (for
+this reason it is also sometimes called a split or bipartition hash), while a
+leafset bitmask is used to work with various ancestor-descendent relationships
+within the same tree (it can be used to, for example, quickly assess if a taxon
+descends from a particular node, or if a particular node is a common ancestor
+of two taxa).
+
+Leafset bitmasks are unstable representations of bipartitions for unrooted
+trees, but remain accurate and convenient representations of the descendent
+leaf-sets of nodes in both unrooted and rooted trees. Split bitmasks, on the
+other hand, *are* stable representations of bipartitions for both unrooted as
+well as rooted trees, but are not accurate representations of the taxa
+associated with the leaves descended from the bipartition of a particular edge.
+
+Using Bipartitions
+==================
+
+Bipartition Encoding
+--------------------
+
+The bipartition encoding of a tree is a specification of the structure of tree
+in terms of the complete set of bipartitions that can be found on it. Given a
+bipartition encoding of a tree, the entire topology can be reconstructed
+completely and accurately. In addition, the bipartition encoding of trees can
+be used to quickly and accurately compare, relate, and calculate various
+statistics between different trees and within the same tree.
+
+In DendroPy, the :meth:`Tree.encode_bipartitions()` method calculates the
+bipartitions of a tree. The :attr:`Edge.bipartition` attribute of each edge
+will be populated by a :class:`Bipartition` instance, each of which has the
+bipartition's split bitmask stored in the :attr:`Bipartition.split_bitmask`
+attribute and the leafset bitmask stored in the
+:attr:`Bipartition.leaf_bitmask` attribute. In addition, each
+:class:`Bipartition` also stores a reference to the edge to which it
+corresponds in its :attr:`Bipartition.edge` attribute. For convenience, the
+split bitmask and the leafset bitmask associated with each bipartition of an
+edge can be also be accesed through the :attr:`Edge.split_bitmask` and
+:attr:`Ede.leafset_bitmask` properties, respectively.
+
+You can access these :class:`Bipartition` objects by iterating over the edges
+of the tree, but it might be more convenient to access them through the
+:attr:`Tree.bipartition_encoding` attribute of the :class:`Tree`. You can also
+access a dictionary mapping :class:`Bipartition` instances to their
+corresponding edges through the :attr:`Tree.bipartition_edge_map` attribute, or
+a dictionary mapping split bitmasks to their corresponding edges through the
+:attr:`Tree.split_bitmask_edge_map` attribute.
+
+By default, the :class:`Bipartition` instances created are immutable. This is
+to allow them to be used in sets or dictionary keys, and thus exploit O(1)
+look-up/access performance. The hash value of a :class:`Bipartition` object is
+its :attr:`Bipartition.split_bitmask` attribute; two distinct
+:class:`Bipartition` objects are considered equivalent even if they refer to
+different :class:`Edge` objects on different :class:`Tree` objects if their
+:attr:`Bipartition.split_bitmask` values are the same. If you need to modify
+the values of a :class:`Bipartition`, you need to set the
+:attr:`Bipartition.is_mutable` attribute to `True`. Note that changing any
+values that modify the hash of a :class:`Bipartition` instance that is already
+in a hash container such as a set or dictionary will make that instance or
+possibly other members of the container inaccessible: never change the value of
+a :class:`Bipartition` instance if it is in a set or dictionary.
+
+Calculating Bipartitions on Trees
+---------------------------------
+
+A large number of DendroPy functions calculate the split and leafset bitmasks
+in the background: from tree comparison approaches (e.g., calculating the
+Robinson-Foulds distance), to working with within-tree operations (e.g.,
+finding the most-recent common ancestor between two nodes or patrisitic
+distances between taxa), to tree-set operations (e.g., building consensus trees
+or scoring tree leafset credibilities and finding the maximum leafset
+credibility tree).
+
+When passing trees to these methods and functions, these functions will call
+:meth:`Tree.encode_bipartitions()` automatically for you unless you explicitly
+specify that this should not be done by passing in
+'``is_bipartitions_updated=True``'.
+
+The typical usage idiom in this context would be to:
+
+    (1) Establish a common taxon namespace [i.e., creating a global
+        :class:`TaxonNamespace` object and pass it in to all
+        reading/parsing/input operations]
+    (2) Read/load the trees, calling :meth:`Tree.encode_bipartitions()` on each one.
+    (3) Perform the calculations, making sure to specify ``is_bipartitions_updated=True``.
+
+For, example, the following snippet shows how you might count the number of trees in a bootstrap file that have the same topology as a tree of interest::
+
+    import dendropy
+    from dendropy.calculate import treecompare
+    taxa = dendropy.TaxonNamespace()
+    target_tree = dendropy.Tree.get_from_path(
+        "mle.tre",
+        "nexus",
+        taxon_namespace=taxa)
+    count = 0
+    for sup_tree in dendropy.Tree.yield_from_files(
+        files=["boots1.tre", "boots2.tre", "boostraps3.tre"],
+        schema="nexus",
+        taxon_namespace=taxa):
+        d = treecompare.symmetric_difference(target_tree, sup_tree)
+        if d == 0:
+            count += 1
+    print(count)
+
+For this application, it is simpler just to let the calculations take place in the background. But, for example, if for some reason you wanted to do something more complicated, as it calculating the counts with respect to multiple trees of interest, you should try and avoid the redundant recalculation of the bitmasks::
+
+    import dendropy
+
+    from dendropy.calculate import treecompare
+    taxa = dendropy.TaxonNamespace()
+    tree1 = dendropy.Tree.get_from_path(
+        "mle1.tre",
+        "nexus",
+        taxon_namespace=taxa)
+    tree1.encode_bipartitions()
+    tree2 = dendropy.Tree.get_from_path(
+        "mle2.tre",
+        "nexus",
+        taxon_namespace=taxa)
+    tree2.encode_bipartitions()
+    counts1 = 0
+    counts2
+    for sup_tree in dendropy.Tree.yield_from_files(
+        files=["boots1.tre", "boots2.tre", "boostraps3.tre"],
+        schema="nexus",
+        taxon_namespace=taxa):
+        sup_tree.encode_bipartitions()
+        if treecompare.symmetric_difference(
+                tree1, sup_tree, is_bipartitions_updated=True):
+            count1 += 1
+        if treecompare.symmetric_difference(
+                tree2, sup_tree, is_bipartitions_updated=True):
+            count2 += 2
+    print(count1, count2)
+
+Note that in all cases, for bipartitions to be meaningfully compared two conditions must hold:
+
+    1. The trees must reference the *same* operational taxonomic unit namespace
+       object, :class:`TaxonNamespace`.
+    2. The trees must have the same rooting state (i.e., all rooted or all
+       unrooted).
+
diff --git a/doc/source/primer/chars.rst b/doc/source/primer/chars.rst
new file mode 100644
index 0000000..c712ed9
--- /dev/null
+++ b/doc/source/primer/chars.rst
@@ -0,0 +1,181 @@
+******************
+Character Matrices
+******************
+
+Types of Character Matrices
+===========================
+
+The |CharacterMatrix| object represents character data in DendroPy.
+In most cases, you will not deal with objects of the |CharacterMatrix| class directly, but rather with objects of one of the classes specialized to handle specific data types:
+
+    - |DnaCharacterMatrix|, for DNA nucleotide sequence data
+    - |RnaCharacterMatrix|, for RNA nucleodtide sequence data
+    - |ProteinCharacterMatrix|, for amino acid sequence data
+    - |StandardCharacterMatrix|, for discrete-value data
+    - |ContinuousCharacterMatrix|, for continuous-valued data
+
+The |ContinuousCharacterMatrix| class represents its character values directly.
+Typically, all other classes represent its character values as special :class:`~dendropy.datamodel.charstatemodel.StateIdentity` instances, *not* as strings.
+So, for example, the DNA character "A" is modeled by a special :class:`~dendropy.datamodel.charstatemodel.StateIdentity` instance (created by the DendroPy library).
+While it is represented by the string "A", and can be converted to the string and back again, it is not the same as the string "A".
+Each discrete |CharacterMatrix| instance has one or more :class:`~dendropy.datamodel.charstatemodel.StateAlphabet` instances associated with it that manage the collection of letters that make up the character data.
+In the case of, e.g. DNA, RNA, protein and other specialized discrete data, this are pre-defined by DendroPy: ``dendropy.DNA_STATE_ALPHABET``, ``dendropy.RNA_STATE_ALPHABET``, etc.
+In the case of "standard" character data, these are created for each matrix separately. Facilities are provided for the creation of custom state alphabets and for the sharing of state alphabets between different |StandardCharacterMatrix| instances.
+
+Reading and Writing Character Data
+==================================
+
+As with most other phylogenetic data objects, objects of the |CharacterMatrix|-derived classes support the "|get|" factory method to populate objects from a data source.
+This method takes a data source as the first keyword argument and a :ref:`schema specification string <Specifying_the_Data_Source_Format>` ("``nexus``", "``newick``", "``nexml``", "``fasta``", or "``phylip``", etc.) as the second::
+
+    import dendropy
+    dna1 = dendropy.DnaCharacterMatrix.get(file=open("pythonidae.fasta"), schema="fasta")
+    dna2 = dendropy.DnaCharacterMatrix.get(url="http://purl.org/phylo/treebase/phylows/matrix/TB2:M2610?format=nexus", schema="nexus")
+    aa1 = dendropy.ProteinCharacterMatrix.get(file=open("pythonidae.dat"), schema="phylip")
+    std1 = dendropy.StandardCharacterMatrix.get(path="python_morph.nex", schema="nexus")
+    std2 = dendropy.StandardCharacterMatrix.get(data=">t1\n01011\n\n>t2\n11100", schema="fasta")
+
+
+The "|write|" method allows you to write the data of a |CharacterMatrix| to a file-like object or a file path::
+
+    dna1 = dendropy.DnaCharacterMatrix.get(file=open("pythonidae.nex"), schema="nexus")
+    dna1.write(path="out.nexml", schema="nexml")
+    dna1.write(file=open("out.fasta", schema="fasta")
+
+You can also represent the data as a string using the :meth:`as_string` method::
+
+    dna1 = dendropy.DnaCharacterMatrix.get(file=open("pythonidae.nex"), schema="nexus")
+    s = dna1.as_string(schema="fasta")
+    print(s)
+
+
+In addition, fine-grained control over the reading and writing of data is available through various :ref:`keyword arguments <Customizing_Data_Creation_and_Reading>`.
+
+More information on reading operations is available in the :doc:`/primer/reading_and_writing` section.
+
+Creating a Character Data Matrix from a Dictionary of Strings
+=============================================================
+
+The :meth:`~dendropy.datamodel.charmatrixmodel.CharacterMatrix.from_dict` factory method creates a new |CharacterMatrix| from a dictionary mapping taxon labels to sequences represented as strings::
+
+    import dendropy
+    d = {
+            "s1" : "TCCAA",
+            "s2" : "TGCAA",
+            "s3" : "TG-AA",
+    }
+    dna = dendropy.DnaCharacterMatrix.from_dict(d)
+
+Taxon Management with Character Matrices
+========================================
+
+Taxon management with |CharacterMatrix|-derived objects work very much the same as it does with |Tree| or |TreeList| objects every time a |CharacterMatrix|-derived object is independentally created or read, a new |TaxonNamespace| is created, unless an existing one is specified.
+Thus, again, if you are creating multiple character matrices that refer to the same set of taxa, you will want to make sure to pass each of them a common |TaxonNamespace| reference::
+
+    import dendropy
+    taxa = dendropy.TaxonNamespace()
+    dna1 = dendropy.DnaCharacterMatrix.get(
+        path="pythonidae_cytb.fasta",
+        schema="fasta",
+        taxon_namespace=taxa)
+    prot1 = dendropy.ProteinCharacterMatrix.get(
+        path="pythonidae_morph.nex",
+        schema="nexus",
+        taxon_namespace=taxa)
+    trees = dendropy.TreeList.get(
+        path="pythonidae.trees.nex",
+        schema="nexus",
+        taxon_namespace=taxa)
+
+Concatenating Multiple Data Matrices
+====================================
+
+A new |CharacterMatrix| can be created from multiple existing matrices using the :meth:`~dendropy.datamodel.charmatrixmodel.CharacterMatrix.concatentate()` factory method, which takes a list or an iterable of |CharacterMatrix| instances as an argument.
+        All the CharacterMatrix objects in the list must be of the
+        same type, and share the same TaxonNamespace reference. All taxa
+        must be present in all alignments, all all alignments must
+        be of the same length. Component parts will be recorded as
+        character subsets.
+
+For example:
+
+.. literalinclude:: /examples/char_mat_concat.py
+
+
+results in ::
+
+    d1: 12 sequences, 231 characters
+    d2: 12 sequences, 231 characters
+    d3: 12 sequences, 231 characters
+    d_all: 12 sequences, 693 characters
+    Subsets: {'locus002': <dendropy.datamodel.charmatrixmodel.CharacterSubset object at 0x101d792d0>, 'locus000': <dendropy.datamodel.charmatrixmodel.CharacterSubset object at 0x101d79250>, 'locus001': <dendropy.datamodel.charmatrixmodel.CharacterSubset object at 0x101d79290>}
+
+You can instantiate a concatenated matrix from multiple sources using the :meth:`~dendropy.datamodel.charmatrixmodel.CharacterMatrix.concatentate_from_paths()` or :meth:`~dendropy.datamodel.charmatrixmodel.CharacterMatrix.concatentate_from_streams()` factory methods:
+
+.. literalinclude:: /examples/char_mat_concat2.py
+
+Sequence Management
+===================
+
+A range of methods also exist for importing data from another matrix object.
+These vary depending on how "new" and "existing" are treated.  A "new"
+sequence is a sequence in the other matrix associated with a |Taxon|
+object for which there is no sequence defined in the current matrix.  An
+"existing" sequence is a sequence in the other matrix associated with a
+|Taxon| object for which there *is* a sequence defined in the
+current matrix.
+
++---------------------------------+---------------------------------------------------------------------------------+--------------------------------------------------------------------------------+
+|                                 | New Sequences: IGNORED                                                          | New Sequences: ADDED                                                           |
++=================================+=================================================================================+================================================================================+
+| Existing Sequences: IGNORED     | [NO-OP]                                                                         | :meth:`~dendropy.datamodel.charmatrixmodel.CharacterMatrix.add_sequences()`    |
++---------------------------------+---------------------------------------------------------------------------------+--------------------------------------------------------------------------------+
+| Existing Sequences: OVERWRITTEN | :meth:`~dendropy.datamodel.charmatrixmodel.CharacterMatrix.replace_sequences()` | :meth:`~dendropy.datamodel.charmatrixmodel.CharacterMatrix.update_sequences()` |
++---------------------------------+---------------------------------------------------------------------------------+--------------------------------------------------------------------------------+
+| Existing Sequences: EXTENDED    | :meth:`~dendropy.datamodel.charmatrixmodel.CharacterMatrix.extend_sequences()`  | :meth:`~dendropy.datamodel.charmatrixmodel.CharacterMatrix.extend_matrix()`    |
++---------------------------------+---------------------------------------------------------------------------------+--------------------------------------------------------------------------------+
+
+More information cane be found in the source documentation:
+
+-   :meth:`~dendropy.datamodel.charmatrixmodel.CharacterMatrix.add_sequences()`
+-   :meth:`~dendropy.datamodel.charmatrixmodel.CharacterMatrix.replace_sequences()`
+-   :meth:`~dendropy.datamodel.charmatrixmodel.CharacterMatrix.update_sequences()`
+-   :meth:`~dendropy.datamodel.charmatrixmodel.CharacterMatrix.extend_sequences()`
+-   :meth:`~dendropy.datamodel.charmatrixmodel.CharacterMatrix.extend_matrix()`
+
+In addition there are methods for selecting removing sequences:
+
+-   :meth:`~dendropy.datamodel.charmatrixmodel.CharacterMatrix.remove_sequences()`
+-   :meth:`~dendropy.datamodel.charmatrixmodel.CharacterMatrix.discard_sequences()`
+-   :meth:`~dendropy.datamodel.charmatrixmodel.CharacterMatrix.keep_sequences()`
+
+As well as "filling out" a matrix by adding columns or rows:
+
+-   :meth:`~dendropy.datamodel.charmatrixmodel.CharacterMatrix.fill_taxa()`
+-   :meth:`~dendropy.datamodel.charmatrixmodel.CharacterMatrix.fill()`
+-   :meth:`~dendropy.datamodel.charmatrixmodel.CharacterMatrix.pack()`
+
+Accessing Data
+==============
+
+A |CharacterMatrix| behaves very much like a dictionary, where the "keys" are |Taxon| instances, which can be dereferenced using the instance itself, the taxon label, or the index of the taxon in the collection (note: this is *not* neccessarily the same as the accession index, which is the basis for bipartition collection).
+
+For example:
+
+.. literalinclude:: /examples/chars_access1.py
+
+You can also iterate over the matrix in a number of ways:
+
+.. literalinclude:: /examples/chars_access2.py
+
+
+The "values" return by dereferencing the "keys" of a |CharacterMatrix|  objects are |CharacterDataSequence| objects.
+Objects of this class behave very much like lists, where the elements are either numeric values for |ContinuousCharacterMatrix| matrices:
+
+.. literalinclude:: /examples/chars_access3.py
+
+or |StateIdentity| instances for all other types of matrices:
+
+.. literalinclude:: /examples/chars_access4.py
+
+As can be seen, you can use :meth:`~dendropy.datamodel.charmatrixmodel.CharacterDataSequence.values()` to get a list of the values of the sequence directly, :meth:`~dendropy.datamodel.charmatrixmodel.CharacterDataSequence.symbols_as_list()` to get a list of the values represented as strings, and :meth:`~dendropy.datamodel.charmatrixmodel.CharacterDataSequence.symbols_as_string()` to get the string representation of the whole sequence.
diff --git a/doc/source/primer/converting.rst b/doc/source/primer/converting.rst
new file mode 100644
index 0000000..05561ff
--- /dev/null
+++ b/doc/source/primer/converting.rst
@@ -0,0 +1,86 @@
+*******************************
+Converting Between Data Formats
+*******************************
+
+Any data in a schema that can be read by DendroPy, can be saved to files in any schema that can be written by DendroPy.
+Converting data between formats is simply a matter of calling readers and writers of the appropriate type.
+
+Converting from FASTA schema to NEXUS::
+
+    >>> import dendropy
+    >>> cytb = dendropy.DnaCharacterMatrix.get(path="pythonidae_cytb.fasta", schema="fasta")
+    >>> cytb.write(path="pythonidae_cytb.nexus", schema="nexus")
+
+Converting a collection of trees from NEXUS schema to Newick::
+
+    >>> import dendropy
+    >>> post_trees = dendropy.TreeList()
+    >>> post_trees.read(
+    ...         file=open("pythonidae.nex.run1.t", "r")
+    ...         schema="nexus",
+    ...         tree_offset=200)
+    >>> post_trees.read(
+    ...         path="pythonidae.nex.run2.t",
+    ...         schema="nexus",
+    ...         tree_offset=200)
+    >>> post_trees.write(
+    ...     path="pythonidae.mcmc.newick",
+    ...     schema="newick")
+
+Converting a single tree from Newick schema to NEXUS::
+
+    >>> import dendropy
+    >>> mle = dendropy.Tree.get(path="pythonidae.mle.newick", schema="newick")
+    >>> mle.write(path="pythonidae.mle.nex", schema="nexus")
+
+Collecting data from multiple sources and writing to a NEXUS-formatted file::
+
+    >>> import dendropy
+    >>> ds = dendropy.DataSet()
+    >>> tns = ds.new_taxon_namespace()
+    >>> ds.attach_taxon_namespace(tns)
+    >>> ds.read(
+    ...     path="pythonidae_cytb.fasta",
+    ...     schema="fasta",
+    ...     data_type="dna")
+    >>> ds.read(
+    ...     path="pythonidae_aa.nex",
+    ...     schema="nexus")
+    >>> ds.read(
+    ...     path="pythonidae_morph.nex",
+    ...     schema="nexus")
+    >>> ds.read(
+    ...     path="pythonidae_trees.tre",
+    ...     schema="newick")
+    >>> ds.write(
+    ...     path="pythonidae_combined.nex",
+    ...     schema="nexus")
+
+Note how we create a new |TaxonNamespace| instance using the :meth:`~dendropy.datamodel.DataSet.new_taxon_namespace` method, and then "bind" or attach it to the |DataSet| instance using the :meth:`~dendropy.datamodel.DataSet.attach_taxon_namespace()` method.
+This ensures that all new data parsed by the |DataSet| instance will reference the same |TaxonNamespace| instance, i.e., all taxon labels will be mapped to the same set of |Taxon| objects.
+Alternatively, we could also have explicitly passed in the |TaxonNamespace| instance to use for each reading operation::
+
+    >>> import dendropy
+    >>> ds = dendropy.DataSet()
+    >>> tns = ds.new_taxon_namespace()
+    >>> ds.read(
+    ...     path="pythonidae_cytb.fasta",
+    ...     schema="fasta",
+    ...     data_type="dna",
+    ...     taxon_namespace=tns)
+    >>> ds.read(
+    ...     path="pythonidae_aa.nex",
+    ...     schema="nexus",
+    ...     taxon_namespace=tns)
+    >>> ds.read(
+    ...     path="pythonidae_morph.nex",
+    ...     schema="nexus",
+    ...     taxon_namespace=tns)
+    >>> ds.read(
+    ...     path="pythonidae_trees.tre",
+    ...     schema="newick",
+    ...     taxon_namespace=tns)
+    >>> ds.write(
+    ...     path="pythonidae_combined.nex",
+    ...     schema="nexus",
+    ...     taxon_namespace=tns)
diff --git a/doc/source/primer/dataobjects.rst b/doc/source/primer/dataobjects.rst
new file mode 100644
index 0000000..4c7f45d
--- /dev/null
+++ b/doc/source/primer/dataobjects.rst
@@ -0,0 +1,89 @@
+*****************************************
+Introduction to Phylogenetic Data Objects
+*****************************************
+
+Primary Phylogenetic Data Objects
+==================================
+
+Phylogenetic data in DendroPy is represented by one or more objects of the following classes:
+
+    |Taxon|
+        A representation of an operational taxonomic unit, with an attribute, :attr:`~dendropy.datamodel.taxonmodel.Taxon.label`, corresponding to the taxon label.
+
+    |TaxonNamespace|
+        A collection of |Taxon| objects representing a distinct definition of taxa (for example, as specified explicitly in a NEXUS "TAXA" block, or implicitly in the set of all taxon labels used across a Newick tree file).
+
+    |Tree|
+        A collection of |Node| and |Edge| objects representing a phylogenetic tree.
+        Each |Tree| object maintains a reference to a |TaxonNamespace| object in its attribute, :attr:`~dendropy.datamodel.treemodel.Tree.taxon_namespace`, which specifies the set of taxa that are referenced by the tree and its nodes. Each |Node| object has a :attr:`~dendropy.datamodel.treemodel.Node.taxon` attribute (which points to a particular |Taxon| object if there is an operational taxonomic unit associated with this node, or is |None| if not), a :attr:`~dendropy.datamodel.treemode [...]
+        In addition, advanced operations with tree data often make use of a |Bipartition| object associated with each |Edge| on a |Tree| (see ":doc:`/primer/bipartitions`" for more information).
+
+
+    |TreeList|
+        A :class:`list` of |Tree| objects. A |TreeList| object has an attribute, :attr:`~dendropy.datamodel.treemodel.TreeList.taxon_namespace`, which specifies the set of taxa that are referenced by all member |Tree| elements. This is enforced when a |Tree| object is added to a |TreeList|, with the |TaxonNamespace| of the |Tree| object and all |Taxon| references of the |Node| objects in the |Tree| mapped to the |TaxonNamespace| of the |TreeList|.
+
+    |CharacterMatrix|
+        Representation of character data, with specializations for different data types: |DnaCharacterMatrix|, |RnaCharacterMatrix|, |ProteinCharacterMatrix|, |StandardCharacterMatrix|, |ContinuousCharacterMatrix|, etc. A |CharacterMatrix| can treated very much like a :class:`dict` object, with
+        |Taxon| objects as keys and character data as values associated with those keys.
+
+    |DataSet|
+        A meta-collection of phylogenetic data, consisting of lists of multiple |TaxonNamespace| objects (:attr:`DataSet.taxon_namespaces`), |TreeList| objects (:attr:`DataSet.tree_lists`), and |CharacterMatrix| objects (:attr:`DataSet.char_matrices`).
+
+
+    |TreeArray|
+        A high-performance container designed to efficiently store and manage (potentially) large collections of structures of (potentially) large trees for processing.
+
+
+Creating New (Empty) Objects
+============================
+
+All of the above names are imported into the the the :mod:`dendropy` namespace, and so to instantiate new, empty objects of these classes, you would need to import :mod:`dendropy`::
+
+    >>> import dendropy
+    >>> tree1 = dendropy.Tree()
+    >>> tree_list11 = dendropy.TreeList()
+    >>> dna1 = dendropy.DnaCharacterMatrix()
+    >>> dataset1 = dendropy.DataSet()
+
+Or import the names directly::
+
+    >>> from dendropy import Tree, TreeList, DnaCharacterMatrix, DataSet
+    >>> tree1 = Tree()
+    >>> tree_list1 = TreeList()
+    >>> dna1 = DnaCharacterMatrix()
+    >>> dataset1 = DataSet()
+
+More details on how to create and populate new objects of various kinds programmatically are given in later chapters (e.g., ":doc:`trees`", ":doc:`chars`", ":doc:`datasets`").
+
+Reading, Writing, and Annotating Phylogenetic Data
+==================================================
+
+DendroPy provides a rich set of tools for reading and writing phylogenetic data
+in various formats, such as NEXUS, Newick, PHYLIP, etc., with *many* options to
+customize and control how the data is ingested and parsed, as well as formatted
+and written-out.
+For example::
+
+    >>> import dendropy
+    >>> tree_list1 = dendropy.TreeList()
+    >>> tree_list1.read_from_path("pythonidae.mcmc1.nex",
+    ...     schema="nexus",
+    ...     collection_offset=0,
+    ...     tree_offset=100)
+    >>> tree_list1.read_from_path("pythonidae.mcmc2.nex",
+    ...     schema="nexus",
+    ...     collection_offset=0,
+    ...     tree_offset=100)
+    >>> tree_list1.write_to_path("combined.newick",
+    ...     suppress_edge_lengths=True,
+    ...     schema="newick")
+
+These are covered in detail in the next chapter, ":doc:`/primer/reading_and_writing`".
+
+
+Support is also available for adding, accessing, and managing rich and
+expressive metadata annotations to many of the above objects and components of
+those objects. This is covered in detail in the
+":doc:`/primer/working_with_metadata_annotations`" chapter.
+
+
diff --git a/doc/source/primer/datasets.rst b/doc/source/primer/datasets.rst
new file mode 100644
index 0000000..8fb17e1
--- /dev/null
+++ b/doc/source/primer/datasets.rst
@@ -0,0 +1,178 @@
+*********
+Data Sets
+*********
+
+The |DataSet| class provides for objects that allow you to manage multiple types of phylogenetic data.
+
+It has three primary attributes:
+
+    :attr:`~dendropy.datamodel.datasetmodel.DataSet.taxon_namespaces`
+        A list of all |TaxonNamespace| objects in the |DataSet|, in the order that they were added or read, include |TaxonNamespace| objects added implicitly through being associated with added |TreeList| or |CharacterMatrix| objects.
+
+    :attr:`~dendropy.datamodel.datasetmodel.DataSet.tree_lists`
+        A list of all |TreeList| objects in the |DataSet|, in the order that they were added or read.
+
+    :attr:`~dendropy.datamodel.datasetmodel.DataSet.char_matrices`
+        A list of all |CharacterMatrix| objects in the |DataSet|, in the order that they were added or read.
+
+|DataSet| Creation and Reading
+===============================
+
+Reading and Writing |DataSet| Objects
+-------------------------------------
+
+You can use the :meth:`~dendropy.datamodel.datasetmodel.DataSet.get` factory class method for simultaneously instantiating and populating |DataSet| object, taking a data source as the first argument and a :ref:`schema specification string <Specifying_the_Data_Source_Format>` ("``nexus``", "``newick``", "``nexml``", "``fasta``", "``phylip``", etc.) as the second::
+
+    >>> import dendropy
+    >>> ds = dendropy.DataSet.get(
+        path='pythonidae.nex',
+        schema='nexus')
+
+The :meth:`~dendropy.datamodel.datasetmodel.DataSet.read` instance method for reading additional data into existing objects are also supported, taking the same arguments (i.e., a data source, a :ref:`schema specification string <Specifying_the_Data_Source_Format>`, as well as optional :keyword arguments to customize the parse behavior):
+
+.. literalinclude:: /examples/ds1.py
+
+.. Note::
+
+    Note how the :meth:`~dendropy.datamodel.datasetmodel.DataSet.attach_taxon_namespace()` method is called before invoking any ":meth:`~dendropy.datamodel.datasetmodel.DataSet.read`" statements, to ensure that all the taxon references in the data sources get mapped to the same |TaxonNamespace| instance.
+    It is **HIGHLY** recommended that you do this, i.e., manage all data with the same |DataSet| instance under the same taxonomic namespace, unless you have a special reason to include multiple independent taxon "domains" in the same data set.
+
+The ":meth:`~dendropy.datamodel.datasetmodel.DataSet.write`" method allows you to write the data of a |DataSet| to a file-like object or a file path
+The following example aggregates the post-burn in MCMC samples from a series of NEXUS-formatted tree files into a single |TreeList|, then, adds the |TreeList| as well as the original character data into a single |DataSet| object, which is then written out as NEXUS-formatted file:
+
+.. literalinclude:: /examples/dsrw1.py
+
+If you do not want to actually write to a file, but instead simply need a string representing the data in a particular format, you can call the instance method :meth:`as_string()`, passing a :ref:`schema specification string <Specifying_the_Data_Writing_Format>` as the first argument::
+
+    import dendropy
+    ds = dendropy.DataSet()
+    ds.read_from_path('pythonidae.cytb.fasta', 'dnafasta')
+    s = ds.as_string('nexus')
+
+or::
+
+    dna1 = dendropy.DataSet.get(file=open("pythonidae.nex"), schema="nexus")
+    s = dna1.as_string(schema="fasta")
+    print(s)
+
+
+In addition, fine-grained control over the reading and writing of data is available through various keyword arguments.
+More information on reading operations is available in the :doc:`/primer/reading_and_writing` section.
+
+.. Cloning an Existing |DataSet|
+.. -----------------------------
+..
+.. You can also clone an existing |DataSet| object by passing it as an argument to the |DataSet| constructor::
+..
+..     >>> import dendropy
+..     >>> ds1 = dendropy.DataSet.get(
+..     ... path='pythonidae.cytb.fasta',
+..     ... schema='dnafasta')
+..     >>> ds2 = dendropy.DataSet(ds1)
+..
+.. Following this, ``ds2`` will be a *full* deep-copy clone of ``ds1``, with distinct and independent, but identical, |Taxon|, |TaxonNamespace|, |TreeList|, |Tree| and |CharacterMatrix| objects.
+.. Note that, in distinction to the similar cloning methods of |Tree| and |TreeList|, even the |Taxon| and |TaxonNamespace| objects are cloned, meaning that you manipulate the |Taxon| and |TaxonNamespace| objects of ``ds2`` without in any way effecting those of ``ds1``.
+
+Creating a New |DataSet| from Existing |TreeList| and |CharacterMatrix| Objects
+-------------------------------------------------------------------------------
+
+You can add independentally created or parsed data objects to a |DataSet| by passing them as unnamed arguments to the constructor:
+
+.. literalinclude:: /examples/ds4.py
+
+Note how we call the instance method :meth:`~dendropy.datamodel.datasetmodel.DataSet.unify_taxon_namespaces()` after the creation of the |DataSet| object.
+This method will remove all existing |TaxonNamespace| objects from the |DataSet|, create and add a new one, and then map all taxon references in all contained |TreeList| and |CharacterMatrix| objects to this new, unified |TaxonNamespace|.
+
+Adding Data to an Exisiting |DataSet|
+-------------------------------------
+
+You can add independentally created or parsed data objects to a |DataSet| using the :meth:`~dendropy.datamodel.datasetmodel.DataSet.add()` method::
+
+.. literalinclude:: /examples/ds4.py
+
+Here, again, we call the :meth:`~dendropy.datamodel.datasetmodel.DataSet.unify_taxon_namespaces()` to map all taxon references to the same, common, unified |TaxonNamespace|.
+
+Taxon Management with Data Sets
+===============================
+
+The |DataSet| object, representing a meta-collection of phylogenetic data, differs in one important way from all the other phylogenetic data objects discussed so far with respect to taxon management, in that it is not associated with any particular |TaxonNamespace| object.
+Rather, it maintains a list (in the property :attr:`~dendropy.datamodel.datasetmodel.DataSet.taxon_namespaces`) of *all* the |TaxonNamespace| objects referenced by its contained |TreeList| objects (in the property :attr:`~dendropy.datamodel.datasetmodel.DataSet.tree_lists`) and |CharacterMatrix| objects (in the property :attr:`~dendropy.datamodel.datasetmodel.DataSet.char_matrices`).
+
+With respect to taxon management, |DataSet| objects operate in one of two modes: "detached taxon set" mode and "attached taxon set" mode.
+
+Detached (Multiple) Taxon Set Mode
+----------------------------------
+
+In the "detached taxon set" mode, which is the default, |DataSet| object tracks all |TaxonNamespace| references of their other data members in the property :attr:`~dendropy.datamodel.datasetmodel.DataSet.taxon_namespaces`, but no effort is made at taxon management as such.
+Thus, every time a data source is read with a "detached taxon set" mode |DataSet| object, by default, a new  |TaxonNamespace| object will be created and associated with the |Tree|, |TreeList|, or |CharacterMatrix| objects created from each data source, resulting in multiple |TaxonNamespace| independent references.
+As such, "detached taxon set" mode |DataSet| objects are suitable for handling data with multiple distinct sets of taxa.
+
+For example::
+
+    >>> import dendropy
+    >>> ds = dendropy.DataSet()
+    >>> ds.read(path="primates.nex", schema="nexus")
+    >>> ds.read(path="snakes.nex", schema="nexus")
+
+The dataset, ``ds``, will now contain two distinct sets of |TaxonNamespace| objects, one for the taxa defined in "primates.nex", and the other for the taxa defined for "snakes.nex".
+In this case, this behavior is correct, as the two files do indeed refer to different sets of taxa.
+
+However, consider the following::
+
+    >>> import dendropy
+    >>> ds = dendropy.DataSet()
+    >>> ds.read(path="pythonidae_cytb.fasta", schema="fasta", data_type="dna")
+    >>> ds.read(path="pythonidae_aa.nex", schema="nexus")
+    >>> ds.read(path="pythonidae_morphological.nex", schema="nexus")
+    >>> ds.read(path="pythonidae.mle.tre", schema="nexus")
+
+Here, even though all the data files refer to the same set of taxa, the resulting  |DataSet| object will actually have 4 distinct  |TaxonNamespace| objects, one for each of the independent reads, and a taxon with a particular label in the first file (e.g., "Python regius" of "pythonidae_cytb.fasta") will map to a completely distinct |Taxon| object than a taxon with the same label in the second file (e.g., "Python regius" of "pythonidae_aa.nex").
+This is incorrect behavior, and to achieve the correct behavior with a multiple taxon set mode |DataSet| object, we need to explicitly pass a |TaxonNamespace| object to each of the :meth:`~dendropy.datamodel.datasetmodel.DataSet.read_from_path()` statements::
+
+    >>> import dendropy
+    >>> ds = dendropy.DataSet()
+    >>> ds.read(path="pythonidae_cytb.fasta", schema="fasta", data_type="dna")
+    >>> ds.read(schema="pythonidae_aa.nex", "nexus", taxon_namespace=ds.taxon_namespaces[0])
+    >>> ds.read(schema="pythonidae_morphological.nex", "nexus", taxon_namespace=ds.taxon_namespaces[0])
+    >>> ds.read(schema="pythonidae.mle.tre", "nexus", taxon_namespace=ds.taxon_namespaces[0])
+    >>> ds.write_to_path("pythonidae_combined.nex", "nexus")
+
+In the previous example, the first :meth:`~dendropy.datamodel.datasetmodel.DataSet.read()` statement results in a new |TaxonNamespace| object, which is added to the :attr:`~dendropy.datamodel.datasetmodel.DataSet.taxon_namespaces` property of the |DataSet| object ``ds``.
+This |TaxonNamespace| object gets passed via the ``taxon_namespace`` keyword to subsequent :meth:`~dendropy.datamodel.datasetmodel.DataSet.read_from_path()` statements, and thus as each of the data sources are processed, the taxon references get mapped to |Taxon| objects in the same, single, |TaxonNamespace| object.
+
+While this approach works to ensure correct taxon mapping across multiple data object reads and instantiation, in this context, it is probably more convenient to use the |DataSet| in "attached taxon set" mode.
+In fact, it is highly recommended that |DataSet| instances *always* use the "attached taxon set" mode, as, conceptually there are very few cases where a collection of data should span multiple independent taxon namespaces.
+
+Attached (Single) Taxon Set Mode
+--------------------------------
+In the "attached taxon set" mode, |DataSet| objects ensure that the taxon references of all data objects that are added to them are mapped to the same |TaxonNamespace| object (at least one for each independent read or creation operation).
+The "attached taxon set" mode is activated by calling the :meth:`~dendropy.datamodel.datasetmodel.DataSet.attach_taxon_namespace` method on a |DataSet| and passing in the |TaxonNamespace| to use::
+
+    >>> import dendropy
+    >>> ds = dendropy.DataSet()
+    >>> taxa = dendropy.TaxonNamespace(label="global")
+    >>> ds.attach_taxon_namespace(taxa)
+    >>> ds.read_from_path("pythonidae_cytb.fasta", "dnafasta")
+    >>> ds.read_from_path("pythonidae_aa.nex", "nexus")
+    >>> ds.read_from_path("pythonidae_morphological.nex", "nexus")
+    >>> ds.read_from_path("pythonidae.mle.tre", "nexus")
+
+Switching Between Attached and Detached Taxon Set Modes
+-------------------------------------------------------
+As noted above, you can use the :meth:`~dendropy.datamodel.datasetmodel.DataSet.attached_taxon_namespace()` method to switch a |DataSet| object to attached taxon set mode.
+To restore it to multiple taxon set mode, you would use the :meth:`~dendropy.datamodel.datasetmodel.DataSet.detach_taxon_namespace()` method::
+
+    >>> import dendropy
+    >>> ds = dendropy.DataSet()
+    >>> taxa = dendropy.TaxonNamespace(label="global")
+    >>> ds.attach_taxon_namespace(taxa)
+    >>> ds.read_from_path("pythonidae_cytb.fasta", "dnafasta")
+    >>> ds.read_from_path("pythonidae_aa.nex", "nexus")
+    >>> ds.read_from_path("pythonidae_morphological.nex", "nexus")
+    >>> ds.read_from_path("pythonidae.mle.tre", "nexus")
+    >>> ds.detach_taxon_namespace()
+    >>> ds.read_from_path("primates.nex", "nexus")
+
+Here, the same |TaxonNamespace| object is used to manage taxon references for data parsed from the first four files, while the data from the fifth and final file gets its own, distinct, |TaxonNamespace| object and associated |Taxon| object references.
+
+
diff --git a/doc/source/primer/genbank.rst b/doc/source/primer/genbank.rst
new file mode 100644
index 0000000..0b8a828
--- /dev/null
+++ b/doc/source/primer/genbank.rst
@@ -0,0 +1,565 @@
+*************************************************
+Working with GenBank Molecular Sequence Databases
+*************************************************
+
+The :mod:`~dendropy.interop.genbank` module provides the classes and methods to download sequences from |GenBank| and instantiate them into |DendroPy| phylogenetic data objects.
+Three classes are provided, all of which have an identical interface, varying only in the type of data retrieved:
+
+   :class:`~dendropy.interop.genbank.GenBankDna`
+
+        Acquire and manage DNA sequence data from the |GenBank| Nucleotide database.
+
+   :class:`~dendropy.interop.genbank.GenBankRna`
+
+        Acquire and manage RNA sequence data from the |GenBank| Nucleotide database.
+
+   :class:`~dendropy.interop.genbank.GenBankProtein`
+
+        Acquire and manage AA sequence data from the |GenBank| Protein database.
+
+
+Quick Start
+===========
+
+The basic way to retrieve sequence data is create a
+:class:`~dendropy.interop.genbank.GenBankDna`,
+:class:`~dendropy.interop.genbank.GenBankRna`, or
+:class:`~dendropy.interop.genbank.GenBankProtein` object, and pass in a list of
+identifiers to be retrieved using the "``ids``"  argument.
+The value of this argument should be a container with either GenBank accession identifiers or GI numbers::
+
+
+    >>> from dendropy.interop import genbank
+    >>> gb_dna = genbank.GenBankDna(ids=['EU105474', 'EU105475'])
+    >>> for gb in gb_dna:
+    ...     print gb
+    gi|158930545|gb|EU105474.1| Homo sapiens Ache non-coding region T864 genomic sequence
+    gi|158930546|gb|EU105475.1| Homo sapiens Arara non-coding region T864 genomic sequence
+
+The records are stored as :class:`~dendropy.interop.genbank.GenBankAccessionRecord` objects.
+These records store the *full* information available in a |GenBank| record, including the references, feature table, qualifiers, and other details, and these are available as attributes of the :class:`~dendropy.interop.genbank.GenBankAccessionRecord` objects (e.g., "``primary_accession``", "``taxonomy``", "``feature_table``" and so on).
+
+To generate a |CharacterMatrix| object from the collection of sequences, call the :meth:`~dendropy.interop.genbank.GenBankDna.generate_char_matrix`  method::
+
+    >>> from dendropy.interop import genbank
+    >>> gb_dna = genbank.GenBankDna(ids=['EU105474', 'EU105475'])
+    >>> char_matrix = gb_dna.generate_char_matrix()
+    >>> print(char_matrix.as_string("nexus"))
+    #NEXUS
+    BEGIN TAXA;
+
+        DIMENSIONS NTAX=2;
+        TAXLABELS
+            EU105474
+            EU105475
+    ;
+    END;
+    BEGIN CHARACTERS;
+        DIMENSIONS  NCHAR=494;
+        FORMAT DATATYPE=DNA GAP=- MISSING=? MATCHCHAR=.;
+        MATRIX
+    EU105474    TCTCTTATCA...
+    EU105475    TCTCTTATCA...
+    ;
+    END;
+
+As can be seen, by default the taxon labels assigned to the sequences are set to the identifier used to request the sequences. This, and many other aspects of the character matrix generation, including annotation of taxa and sequences, can be customized, as discussed in detail below.
+
+Acquiring Data from GeneBank
+============================
+
+The :class:`~dendropy.interop.genbank.GenBankDna`, :class:`~dendropy.interop.genbank.GenBankRna`, and :class:`~dendropy.interop.genbank.GenBankProtein` classes provide for the downloading and management of DNA, RNA, and protein (AA) sequences from |GenBank|.
+The first two of these query the "nucleotide" or "nuccore" database, while the last queries the "protein" database.
+The constructors of these classes accept the following arguments:
+
+    ``ids``
+
+        A list of accession identifiers of GI numbers of the records to be downloaded. E.g. "``ids=['EU105474', 'EU105475']``",  "``ids=['158930545', 'EU105475']``", or  "``ids=['158930545', '158930546']``".
+        If "``prefix``" is specified, this string will be pre-pended to all values in the list.
+
+    ``id_range``
+        A tuple of *integers* that specify the first and last values (inclusive) of accession or GI numbers of the records to be downloaded. If "``prefix``" is specified, this string will be prepended to all numbers in this range.
+        Thus specifying "``id_range=(158930545, 158930550)``" is exactly equivalent to specifying "``ids=[158930545, 158930546, 158930547, 158930548, 158930549, 158930550]``", while specifying "``id_range=(105474, 105479), prefix="EU"``" is exactly equivalent tp specifying "``ids=["EU105474", "EU105475", "EU105476", "EU105477", "EU105478", "EU105479"]``".
+
+
+    ``prefix``
+        This string will be prepended to all values resulting from the "``ids``" and "``id_range``".
+
+
+    ``verify``
+        By default, the results of the download are checked to make sure there is a one-to-one correspondence between requested id's and retrieved records. Setting "``verify=False``" skips this checking.
+
+So, for example, the following are all different ways of instantiating |GenBank| resource data store::
+
+    >>> from dendropy.interop import genbank
+    >>> gb_dna = genbank.GenBankDna(ids=['EU105474', 'EU105475'])
+    >>> gb_dna = genbank.GenBankDna(ids=['158930545', 'EU105475'])
+    >>> gb_dna = genbank.GenBankDna(ids=['158930545', '158930546'])
+    >>> gb_dna = genbank.GenBankDna(ids=['105474', '105475'], prefix="EU")
+    >>> gb_dna = genbank.GenBankDna(id_range=(105474, 105478), prefix="EU")
+    >>> gb_dna = genbank.GenBankDna(id_range=(158930545, 158930546))
+
+You can add more records to an existing instance of :class:`~dendropy.interop.genbank.GenBankDna`, :class:`~dendropy.interop.genbank.GenBankRna`, or :class:`~dendropy.interop.genbank.GenBankProtein` objects by using the "``acquire``" or "``acquire_range``" methods.
+The "``acquire``" method takes a sequence of accession identifiers or GI numbers for the first argument ("``ids``"), and, in addition an optional string prefix to be prepended can be supplied using the second argument, "``prefix``", while verification can be disabled by specifying |False| for the third argument, "``verify``".
+The "``acquire_range``" method takes two mandatory *integer* arguments: the first and last value of the range of accession or GI numbers of the records to be downloaded.
+As with the other method, a string prefix to be prepended can be optionally supplied using the argument "``prefix``", while verification can be disabled by specifying "``verify=|False|``".
+For example::
+
+
+    >>> from dendropy.interop import genbank
+    >>> gb_dna = genbank.GenBankDna(['EU105474', 'EU105475'])
+    >>> print len(gb_dna)
+    >>> gb_dna.acquire([158930547, 158930548])
+    >>> print len(gb_dna)
+    >>> gb_dna.acquire_range(105479, 105480, prefix="EU")
+    >>> print len(gb_dna)
+    2
+    4
+    6
+
+Accessing GenBank Records
+=========================
+
+The |GenBank| records accumulated in :class:`~dendropy.interop.genbank.GenBankDna`, :class:`~dendropy.interop.genbank.GenBankRna`, and :class:`~dendropy.interop.genbank.GenBankProtein` objects are represented by collections of :class:`~dendropy.interop.genbank.GenBankAccessionRecord` objects.
+Each of these :class:`~dendropy.interop.genbank.GenBankAccessionRecord` objects represent the full information from the |GenBank| source as a rich Python object.
+
+    >>> from dendropy.interop import genbank
+    >>> gb_dna = genbank.GenBankDna(['EU105474', 'EU105475'])
+    >>> for gb_rec in gb_dna:
+    ...    print gb_rec.gi
+    ...    print gb_rec.locus
+    ...    print gb_rec.length
+    ...    print gb_rec.moltype
+    ...    print gb_rec.topology
+    ...    print gb_rec.strandedness
+    ...    print gb_rec.division
+    ...    print gb_rec.update_date
+    ...    print gb_rec.create_date
+    ...    print gb_rec.definition
+    ...    print gb_rec.primary_accession
+    ...    print gb_rec.accession_version
+    ...    print "(other seq ids)"
+    ...    for osi_key, osi_value in gb_rec.other_seq_ids.items():
+    ...        print "    ", osi_key, osi_value
+    ...    print gb_rec.source
+    ...    print gb_rec.organism
+    ...    print gb_rec.taxonomy
+    ...    print "(references)"
+    ...    for ref in gb_rec.references:
+    ...        print "    ", ref.number , ref.position , ref.authors , ref.consrtm , ref.title , ref.journal , ref.medline_id , ref.pubmed_id , ref.remark
+    ...    print "(feature_table)"
+    ...    for feature in gb_rec.feature_table:
+    ...        print "    ", feature.key, feature.location
+    ...        for qualifier in feature.qualifiers:
+    ...            print "        ", qualifier.name, qualifier.value
+    ...
+    158930545
+    EU105474
+    494
+    DNA
+    linear
+    double
+    PRI
+    27-NOV-2007
+    27-NOV-2007
+    Homo sapiens Ache non-coding region T864 genomic sequence
+    EU105474
+    EU105474.1
+    (other seq ids)
+        gb EU105474.1
+        gi 158930545
+    Homo sapiens (human)
+    Homo sapiens
+    Eukaryota; Metazoa; Chordata; Craniata; Vertebrata; Eutel...
+    (references)
+        1 1..494 [] None Statistical evaluation of alternativ...
+        2 1..494 [] None Direct Submission Submitted (17-AUG-...
+    (feature_table)
+        source 1..494
+            organism Homo sapiens
+            mol_type genomic DNA
+            db_xref taxon:9606
+            chromosome 18
+            note Ache
+        misc_feature 1..494
+            note non-coding region T864
+    .
+    .
+    .
+    (etc.)
+
+Generating Character Matrix Objects from GenBank Data
+=====================================================
+
+The "``generate_char_matrix()``" method of :class:`~dendropy.interop.genbank.GenBankDna`, :class:`~dendropy.interop.genbank.GenBankRna`, and :class:`~dendropy.interop.genbank.GenBankProtein` objects creates and returns a |CharacterMatrix| object of the appropriate type out of the data collected in them.
+When called without any arguments, it generates a new |TaxonNamespace| block, creating one new |Taxon| object for every sequence in the collection with a label corresponding to the identifier used to request the sequence::
+
+    >>> from dendropy.interop import genbank
+    >>> gb_dna = genbank.GenBankDna(ids=[158930545, 'EU105475'])
+    >>> char_matrix = gb_dna.generate_char_matrix()
+    >>> print char_matrix.as_string("nexus")
+    #NEXUS
+
+    BEGIN TAXA;
+
+        DIMENSIONS NTAX=2;
+        TAXLABELS
+            158930545
+            EU105475
+    ;
+    END;
+
+    BEGIN CHARACTERS;
+        DIMENSIONS  NCHAR=494;
+        FORMAT DATATYPE=DNA GAP=- MISSING=? MATCHCHAR=.;
+        MATRIX
+    158930545    TCTCTTATCAAACTA...
+    EU105475     TCTCTTATCAAACTA...
+        ;
+    END;
+
+
+    BEGIN SETS;
+    END;
+
+
+Customizing/Controlling Sequence Taxa
+-------------------------------------
+
+The taxon assignment can be controlled in one of two ways:
+
+    1. Using the "``label_components``" and optionally the "``label_component_separator``" arguments.
+    2. Specifying a custom function using the "``gb_to_taxon_func``" argument that takes a :class:`~dendropy.interop.genbank.GenBankAccessionRecord` object and returns the |Taxon| object to be assigned to the sequence; this approach requires specification of a |TaxonNamespace| object passed using the "``taxon_namespace``" argument.
+
+Specifying a Custom Label for Sequence Taxa
+...........................................
+
+The "``label_components``" and the "``label_component_separator``" arguments allow for customization of the taxon labels of the |Taxon| objects created for each sequence.
+The "``label_components``" argument should be assigned an ordered container (e.g., a list) of strings that correspond to attributes of objects of the :class:`~dendropy.interop.genbank.GenBankAccessionRecord` class.
+The values of these attributes will be concatenated to compose the |Taxon| object label.
+By default, the components will be separated by spaces, but you can override this by passing the string to be used by the "``label_component_separator``" argument.
+For example::
+
+
+    >>> from dendropy.interop import genbank
+    >>> gb_dna = genbank.GenBankDna(ids=[158930545, 'EU105475'])
+    >>> char_matrix = gb_dna.generate_char_matrix(
+    ... label_components=["accession", "organism", ],
+    ... label_component_separator="_")
+    >>> print [t.label for t in char_matrix.taxon_namespace]
+    ['EU105474_Homo_sapiens', 'EU105475_Homo_sapiens']
+    >>> char_matrix = gb_dna.generate_char_matrix(
+    ... label_components=["organism", "moltype", "gi"],
+    ... label_component_separator=".")
+    >>> print [t.label for t in char_matrix.taxon_namespace]
+    ['Homo.sapiens.DNA.158930545', 'Homo.sapiens.DNA.158930546']
+
+Specifying a Custom Taxon-Discovery Function
+............................................
+
+Full control over the |Taxon| object assignment process is given by using the "``gb_to_taxon_func``" argument.
+This should be used to specify a function that takes a :class:`~dendropy.interop.genbank.GenBankAccessionRecord` object and returns the |Taxon| object to be assigned to the sequence.
+The specification of a |TaxonNamespace| object passed using the "``taxon_namespace``" argument is also required, so that this can be assigned to the |CharacterMatrix| object.
+
+A simple example that illustrates the usage of the "``gb_to_taxon_func``" argument by creating a custom label::
+
+    #! /usr/bin/env python
+
+    import dendropy
+    from dendropy.interop import genbank
+
+    def gb_to_taxon(gb):
+        locality = gb.feature_table.find("source").qualifiers.find("note").value
+        label = "GI" + gb.gi + "." + locality
+        taxon = dendropy.Taxon(label=label)
+        return taxon
+
+    taxon_namespace = dendropy.TaxonNamespace()
+
+    gb_dna = genbank.GenBankDna(ids=[158930545, 'EU105475'])
+    char_matrix = gb_dna.generate_char_matrix(
+        taxon_namespace=taxon_namespace,
+        gb_to_taxon_func=gb_to_taxon)
+    print [t.label for t in char_matrix.taxon_namespace]
+
+which results in::
+
+    ['GI158930545.Ache', 'GI158930546.Arara']
+
+A more complex case might be where you may already have a |TaxonNamespace| with existing |Taxon| objects that you may want to associate with the sequences.
+The following illustrates how to do this::
+
+
+    #! /usr/bin/env python
+
+    import dendropy
+    from dendropy.interop import genbank
+
+    tree = dendropy.Tree.get_from_string(
+        "(Ache, (Arara, (Bribri, (Guatuso, Guaymi))))",
+        "newick")
+    def gb_to_taxon(gb):
+        locality = gb.feature_table.find("source").qualifiers.find("note").value
+        taxon = tree.taxon_namespace.get_taxon(label=locality)
+        assert taxon is not None
+        return taxon
+
+    gb_ids = [158930545, 158930546, 158930547, 158930548, 158930549]
+
+    gb_dna = genbank.GenBankDna(ids=gb_ids)
+    char_matrix = gb_dna.generate_char_matrix(
+        taxon_namespace=tree.taxon_namespace,
+        gb_to_taxon_func=gb_to_taxon)
+    print [t.label for t in char_matrix.taxon_namespace]
+    print tree.taxon_namespace is char_matrix.taxon_namespace
+    for taxon in tree.taxon_namespace:
+        print "{}: {}".format(
+            taxon.label,
+            char_matrix[taxon].symbols_as_string()[:10])
+
+which results in::
+
+    True
+    Ache: TCTCTTATCA
+    Arara: TCTCTTATCA
+    Bribri: TCTCTTATCA
+    Guatuso: TCTCTTATCA
+    Guaymi: TCTCTTATCA
+    ['Ache', 'Arara', 'Bribri', 'Guatuso', 'Guaymi']
+
+The important thing to note here is the the |Taxon| objects in the |DnaCharacterMatrix| do not just have the same labels as the |Taxon| object in the |Tree|, "``tree``", but actually *are* the same objects (i.e., reference the same operational taxonomic units within |DendroPy|).
+
+Adding the GenBank Record as an Attribute
+-----------------------------------------
+
+It is sometimes useful to maintain a handle on the original GenBank record in the |CharacterMatrix| resulting from "``generate_char_matrix()``".
+The "``set_taxon_attr``"  and "``set_seq_attr``" arguments of the "``generate_char_matrix()``" method allow you to this.
+The values supplied to these arguments should be strings that specify the name of the attribute that will be created on the |Taxon| or |CharacterDataSequence| objects, respectively.
+The value of this attribute will be the :class:`~dendropy.interop.genbank.GenBankAccessionRecord` that underlies the |Taxon| or |CharacterDataSequence| sequence.
+For example::
+
+    #! /usr/bin/env python
+
+    import dendropy
+    from dendropy.interop import genbank
+    gb_dna = genbank.GenBankDna(ids=[158930545, 'EU105475'])
+    char_matrix = gb_dna.generate_char_matrix(set_taxon_attr="gb_rec")
+    for taxon in char_matrix.taxon_namespace:
+        print "Data for taxon '{}' is based on GenBank record: {}".format(
+            taxon.label,
+            taxon.gb_rec.definition)
+
+will result in::
+
+    Data for taxon '158930545' is based on GenBank record: Homo sapiens Ache non-coding region T864 genomic sequence
+    Data for taxon 'EU105475' is based on GenBank record: Homo sapiens Arara non-coding region T864 genomic sequence
+
+Alternatively, the following::
+
+    #! /usr/bin/env python
+
+    import dendropy
+    from dendropy.interop import genbank
+    gb_dna = genbank.GenBankDna(ids=[158930545, 'EU105475'])
+    char_matrix = gb_dna.generate_char_matrix(set_seq_attr="gb_rec")
+    for sidx, sequence in enumerate(char_matrix.vectors()):
+        print "Sequence {} ('{}') is based on GenBank record: {}".format(
+            sidx+1,
+            char_matrix.taxon_namespace[sidx].label,
+            sequence.gb_rec.defline)
+
+will result in::
+
+    Sequence 1 ('158930545') is based on GenBank record: gi|158930545|gb|EU105474.1| Homo sapiens Ache non-coding region T864 genomic sequence
+    Sequence 2 ('EU105475') is based on GenBank record: gi|158930546|gb|EU105475.1| Homo sapiens Arara non-coding region T864 genomic sequence
+
+Annotating with GenBank Data and Metadata
+-----------------------------------------
+
+To persist the information in a the :class:`~dendropy.interop.genbank.GenBankAccessionRecord` object through serialization and deserialization, you can request that this information gets added as an  :class:`~dendropy.datamodel.basemodel.Annotation` (see ":doc:`Working with Metadata Annotations </primer/working_with_metadata_annotations>`") to the corresponding |Taxon| or |CharacterDataSequence| object.
+
+Reference Annotation
+....................
+
+Specifying "`add_ref_annotation_to_taxa=True`" will result in a reference-style metadata annotation added to the |Taxon| object, while specifying "`add_ref_annotation_to_seqs=True`" will result in a reference-style metadata annotation added to the sequence.
+The reference-style annotation is brief, single annotation that points to the URL of the original record.
+As with metadata annotations in general, you really need to be using the NeXML format for full functionality.
+
+So, for example::
+
+    #! /usr/bin/env python
+
+    import dendropy
+    from dendropy.interop import genbank
+    gb_dna = genbank.GenBankDna(ids=[158930545, 'EU105475'])
+    char_matrix = gb_dna.generate_char_matrix(add_ref_annotation_to_taxa=True)
+    print char_matrix.as_string("nexml")
+
+
+will result in::
+
+    <?xml version="1.0" encoding="ISO-8859-1"?>
+    <nex:nexml
+        version="0.9"
+        xsi:schemaLocation="http://www.nexml.org/2009 ../xsd/nexml.xsd"
+        xmlns:dcterms="http://purl.org/dc/terms/"
+        xmlns="http://www.nexml.org/2009"
+        xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+        xmlns:xml="http://www.w3.org/XML/1998/namespace"
+        xmlns:nex="http://www.nexml.org/2009"
+        xmlns:xsd="http://www.w3.org/2001/XMLSchema#"
+    >
+        <otus id="d4320533416">
+            <otu id="d4323884688" label="158930545">
+                <meta xsi:type="nex:ResourceMeta" rel="dcterms:source" href="http://www.ncbi.nlm.nih.gov/nucleotide/158930545" id="d4323884752" />
+            </otu>
+            <otu id="d4323884816" label="EU105475">
+                <meta xsi:type="nex:ResourceMeta" rel="dcterms:source" href="http://www.ncbi.nlm.nih.gov/nucleotide/EU105475" id="d4323990736" />
+            </otu>
+        </otus>
+        .
+        .
+        .
+    </nex:nexml>
+
+Alternatively::
+
+    #! /usr/bin/env python
+
+    import dendropy
+    from dendropy.interop import genbank
+    gb_dna = genbank.GenBankDna(ids=[158930545, 'EU105475'])
+    char_matrix = gb_dna.generate_char_matrix(add_ref_annotation_to_seqs=True)
+    print char_matrix.as_string("nexml")
+
+will result in::
+
+    <?xml version="1.0" encoding="ISO-8859-1"?>
+    <nex:nexml
+        version="0.9"
+        xsi:schemaLocation="http://www.nexml.org/2009 ../xsd/nexml.xsd"
+        xmlns:dcterms="http://purl.org/dc/terms/"
+        xmlns="http://www.nexml.org/2009"
+        xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+        xmlns:xml="http://www.w3.org/XML/1998/namespace"
+        xmlns:nex="http://www.nexml.org/2009"
+        xmlns:xsd="http://www.w3.org/2001/XMLSchema#"
+    >
+            <matrix>
+                <row id="d4320533856" otu="d4322811536">
+                    <meta xsi:type="nex:ResourceMeta" rel="dcterms:source" href="http://www.ncbi.nlm.nih.gov/nucleotide/158930545" id="d4322811600" />
+                    <seq>TCTCTTATCAAAC.../seq>
+                </row>
+                <row id="d4320534384" otu="d4322811664">
+                    <meta xsi:type="nex:ResourceMeta" rel="dcterms:source" href="http://www.ncbi.nlm.nih.gov/nucleotide/EU105475" id="d4322917584" />
+                    <seq>TCTCTTATCAAAC...</seq>
+                </row>
+            </matrix>
+        </characters>
+    </nex:nexml>
+
+Full Annotation
+...............
+
+Specifying "`add_full_annotation_to_taxa=True`" or "`add_full_annotation_to_seqs=True`" will result in the entire GenBank record being added as a set of annotations to the |Taxon| or |CharacterDataSequence| object, respectively.
+
+For example::
+
+    #! /usr/bin/env python
+
+    import dendropy
+    from dendropy.interop import genbank
+    gb_dna = genbank.GenBankDna(ids=[158930545, 'EU105475'])
+    char_matrix = gb_dna.generate_char_matrix(add_full_annotation_to_taxa=True)
+    print char_matrix.as_string("nexml")
+
+will result in the following::
+
+    <?xml version="1.0" encoding="ISO-8859-1"?>
+    <nex:nexml
+        version="0.9"
+        xsi:schemaLocation="http://www.nexml.org/2009 ../xsd/nexml.xsd"
+        xmlns:genbank="http://www.ncbi.nlm.nih.gov/dtd/INSD_INSDSeq.mod.dtd"
+        xmlns:dcterms="http://purl.org/dc/terms/"
+        xmlns="http://www.nexml.org/2009"
+        xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+        xmlns:xml="http://www.w3.org/XML/1998/namespace"
+        xmlns:nex="http://www.nexml.org/2009"
+        xmlns:xsd="http://www.w3.org/2001/XMLSchema#"
+    >
+        <otus id="d4320533416">
+            <otu id="d4323884688" label="158930545">
+                <meta xsi:type="nex:ResourceMeta" rel="dcterms:source" href="http://www.ncbi.nlm.nih.gov/nucleotide/158930545" id="d4323884752" >
+                    <meta xsi:type="nex:LiteralMeta" property="genbank:INSDSeq_locus" content="EU105474" id="d4323884880" />
+                    <meta xsi:type="nex:LiteralMeta" property="genbank:INSDSeq_length" content="494" id="d4323884944" />
+                    <meta xsi:type="nex:LiteralMeta" property="genbank:INSDSeq_moltype" content="DNA" id="d4323885008" />
+                    <meta xsi:type="nex:LiteralMeta" property="genbank:INSDSeq_topology" content="linear" id="d4323901520" />
+                    <meta xsi:type="nex:LiteralMeta" property="genbank:INSDSeq_strandedness" content="double" id="d4323901584" />
+                    <meta xsi:type="nex:LiteralMeta" property="genbank:INSDSeq_division" content="PRI" id="d4323901648" />
+                    <meta xsi:type="nex:LiteralMeta" property="genbank:INSDSeq_update-date" content="27-NOV-2007" id="d4323901712" />
+                    <meta xsi:type="nex:LiteralMeta" property="genbank:INSDSeq_create-date" content="27-NOV-2007" id="d4323901776" />
+                    <meta xsi:type="nex:LiteralMeta" property="genbank:INSDSeq_definition" content="Homo sapiens Ache non-coding region T864 genomic sequence" id="d4323901840" />
+                    <meta xsi:type="nex:LiteralMeta" property="genbank:INSDSeq_primary-accesison" content="EU105474" id="d4323901904" />
+                    <meta xsi:type="nex:LiteralMeta" property="genbank:INSDSeq_accession-version" content="EU105474.1" id="d4323901968" />
+                    <meta xsi:type="nex:ResourceMeta" rel="genbank:otherSeqIds" id="d4323902032" >
+                        <meta xsi:type="nex:LiteralMeta" property="genbank:gb" content="EU105474.1" id="d4323902160" />
+                        <meta xsi:type="nex:LiteralMeta" property="genbank:gi" content="158930545" id="d4323902224" />
+                    </meta>
+                    <meta xsi:type="nex:LiteralMeta" property="genbank:INSDSeq_source" content="Homo sapiens (human)" id="d4323902096" />
+                    <meta xsi:type="nex:LiteralMeta" property="genbank:INSDSeq_organism" content="Homo sapiens" id="d4323902288" />
+                    <meta xsi:type="nex:LiteralMeta" property="genbank:INSDSeq_taxonomy" content="Eukaryota; Metazoa; Chordata; Craniata; Vertebrata; Euteleostomi; Mammalia; Eutheria; Euarchontoglires; Primates; Haplorrhini; Catarrhini; Hominidae; Homo" id="d4323902352" />
+                    <meta xsi:type="nex:ResourceMeta" rel="genbank:INSDSeq_references" id="d4323902416" >
+                        <meta xsi:type="nex:ResourceMeta" rel="genbank:INSDReference_reference" id="d4323902544" >
+                            <meta xsi:type="nex:LiteralMeta" property="genbank:INSDReference_reference" content="1" id="d4323902672" />
+                            <meta xsi:type="nex:LiteralMeta" property="genbank:INSDReference_position" content="1..494" id="d4323902736" />
+                            <meta xsi:type="nex:LiteralMeta" property="genbank:INSDReference_title" content="Statistical evaluation of alternative models of human evolution" id="d4323902800" />
+                            <meta xsi:type="nex:LiteralMeta" property="genbank:INSDReference_journal" content="Proc. Natl. Acad. Sci. U.S.A. 104 (45), 17614-17619 (2007)" id="d4323902864" />
+                            <meta xsi:type="nex:LiteralMeta" property="genbank:INSDReference_pubmed" content="17978179" id="d4323902928" />
+                        </meta>
+                        <meta xsi:type="nex:ResourceMeta" rel="genbank:INSDReference_reference" id="d4323902608" >
+                            <meta xsi:type="nex:LiteralMeta" property="genbank:INSDReference_reference" content="2" id="d4323903056" />
+                            <meta xsi:type="nex:LiteralMeta" property="genbank:INSDReference_position" content="1..494" id="d4323903120" />
+                            <meta xsi:type="nex:LiteralMeta" property="genbank:INSDReference_title" content="Direct Submission" id="d4323903184" />
+                            <meta xsi:type="nex:LiteralMeta" property="genbank:INSDReference_journal" content="Submitted (17-AUG-2007) Laboratorio de Biologia Genomica e Molecular, Pontificia Universidade Catolica do Rio Grande do Sul, Av Ipiranga 6681, Predio 12C, Sala 172, Porto Alegre, RS 90619-900, Brazil" id="d4323903248" />
+                        </meta>
+                    </meta>
+                    <meta xsi:type="nex:ResourceMeta" rel="genbank:INSDSeq_feature-table" id="d4323902480" >
+                        <meta xsi:type="nex:ResourceMeta" rel="genbank:INSDSeq_feature" id="d4323903312" >
+                            <meta xsi:type="nex:LiteralMeta" property="genbank:INSDFeature_key" content="source" id="d4323903440" />
+                            <meta xsi:type="nex:LiteralMeta" property="genbank:INSDFeature_location" content="1..494" id="d4323903504" />
+                            <meta xsi:type="nex:ResourceMeta" rel="genbank:INSDFeature_quals" id="d4323903376" >
+                                <meta xsi:type="nex:LiteralMeta" property="genbank:organism" content="Homo sapiens" id="d4323903632" />
+                                <meta xsi:type="nex:LiteralMeta" property="genbank:mol_type" content="genomic DNA" id="d4323903696" />
+                                <meta xsi:type="nex:LiteralMeta" property="genbank:db_xref" content="taxon:9606" id="d4323903760" />
+                                <meta xsi:type="nex:LiteralMeta" property="genbank:chromosome" content="18" id="d4323903824" />
+                                <meta xsi:type="nex:LiteralMeta" property="genbank:note" content="Ache" id="d4323903888" />
+                            </meta>
+                        </meta>
+                        <meta xsi:type="nex:ResourceMeta" rel="genbank:INSDSeq_feature" id="d4323903568" >
+                            <meta xsi:type="nex:LiteralMeta" property="genbank:INSDFeature_key" content="misc_feature" id="d4323904016" />
+                            <meta xsi:type="nex:LiteralMeta" property="genbank:INSDFeature_location" content="1..494" id="d4323904080" />
+                            <meta xsi:type="nex:ResourceMeta" rel="genbank:INSDFeature_quals" id="d4323903952" >
+                                <meta xsi:type="nex:LiteralMeta" property="genbank:note" content="non-coding region T864" id="d4323904208" />
+                            </meta>
+                        </meta>
+                    </meta>
+                </meta>
+            </otu>
+            <otu id="d4323884816" label="EU105475">
+                <meta xsi:type="nex:ResourceMeta" rel="dcterms:source" href="http://www.ncbi.nlm.nih.gov/nucleotide/EU105475" id="d4324005904" >
+                    <meta xsi:type="nex:LiteralMeta" property="genbank:INSDSeq_locus" content="EU105475" id="d4324006032" />
+                    <meta xsi:type="nex:LiteralMeta" property="genbank:INSDSeq_length" content="494" id="d4324006096" />
+                    .
+                    .
+                    .
+                    (etc.)
+                </meta>
+            </otu>
+        </otus>
+        .
+        .
+        .
+        (etc.)
+    </nex:nexml>
+
+
diff --git a/doc/source/primer/index.rst b/doc/source/primer/index.rst
new file mode 100644
index 0000000..946f6c5
--- /dev/null
+++ b/doc/source/primer/index.rst
@@ -0,0 +1,78 @@
+###################
+The DendroPy Primer
+###################
+
+
+.. contents::
+    :local:
+    :backlinks: none
+
+The DendroPy Data Model: Phylogenetic Data Objects
+==================================================
+
+.. toctree::
+    :maxdepth: 2
+
+    dataobjects.rst
+    reading_and_writing.rst
+    converting.rst
+
+Working with Taxa: Operational Taxonomic Units and Taxon Namespaces
+===================================================================
+
+.. toctree::
+    :maxdepth: 2
+
+    taxa.rst
+    taxa_partitions.rst
+
+Working with Trees and Tree Lists
+=================================
+
+.. toctree::
+    :maxdepth: 2
+
+    trees.rst
+    treecollections.rst
+    treestats.rst
+    treemanips.rst
+    bipartitions.rst
+    treesims.rst
+
+Working with Character Data: Sequences and Matrices
+===================================================
+
+.. toctree::
+    :maxdepth: 2
+
+    chars.rst
+    phylogenetic_character_analyses.rst
+    popgenstats.rst
+
+Working with Data Sets
+======================
+
+.. toctree::
+    :maxdepth: 2
+
+    datasets.rst
+
+
+Working with Metadata Annotations
+=================================
+
+.. toctree::
+    :maxdepth: 2
+
+    working_with_metadata_annotations.rst
+
+Interoperability with Other Programs, Libraries and Applications
+================================================================
+
+.. toctree::
+    :maxdepth: 2
+
+    genbank.rst
+    seqgen.rst
+    paup.rst
+    raxml.rst
diff --git a/doc/source/primer/paup.rst b/doc/source/primer/paup.rst
new file mode 100644
index 0000000..7e9c15a
--- /dev/null
+++ b/doc/source/primer/paup.rst
@@ -0,0 +1,17 @@
+****
+PAUP
+****
+
+The :mod:`~dendropy.interop.paup` module provides functions to estimate a tree given a data matrix, or a substitution model given a tree and a data model.
+
+Trees can be estimated using likelihood:
+
+.. literalinclude:: /examples/paup_estimate_tree_ml.py
+
+Or neighbor-joining:
+
+.. literalinclude:: /examples/paup_estimate_tree_nj.py
+
+Estimating a substitution model parameters requires both a tree and a data matrix:
+
+.. literalinclude:: /examples/paup_estimate_model.py
diff --git a/doc/source/primer/phylogenetic_character_analyses.rst b/doc/source/primer/phylogenetic_character_analyses.rst
new file mode 100644
index 0000000..222116f
--- /dev/null
+++ b/doc/source/primer/phylogenetic_character_analyses.rst
@@ -0,0 +1,139 @@
+*******************************
+Phylogenetic Character Analyses
+*******************************
+
+Phylogenetic Independent Contrasts (PIC)
+========================================
+
+Basic Analysis
+--------------
+
+A phylogenetic independent contrasts analysis (Felsenstein 1985; Garland et al. 2005) can be carried out using the :class:`~dendropy.model.continuous.PhylogeneticIndependentConstrasts` class.
+This requires you to have a |Tree| and a |ContinuousCharacterMatrix| which reference the same |TaxonNamespace|.
+Thus, if your data is in the same file:
+
+.. literalinclude:: /examples/pic2.py
+
+While if you have the tree and characters in a different file:
+
+.. literalinclude:: /examples/pic3.py
+
+In either case, we have a |Tree| object, ``tree`` and a |ContinuousCharacterMatrix| object, ``chars``, that both reference the same |TaxonNamespace|.
+
+Once the data is loaded, we create the :class:`~dendropy.model.continuous.PhylogeneticIndependentConstrasts` object::
+
+    >>> from dendropy import continuous
+    >>> pic = continuous.PhylogeneticIndependentContrasts(tree=tree, char_matrix=chars)
+
+At this point, the data is ready for analysis. Typically, we want to map the contrasts onto a tree. The :meth:`~dendropy.model.continuous.PhylogeneticIndependentConstrasts.contrasts_tree` method takes a single mandatory argument, the 0-based index of the character (or column) to be analyzed, and returns a |Tree| object that is a clone of the original input |Tree|, but with the following attributes added to each |Node|:
+
+        - ``pic_state_value``
+        - ``pic_state_variance``
+        - ``pic_contrast_raw``
+        - ``pic_contrast_variance``
+        - ``pic_contrast_standardized``
+        - ``pic_edge_length_error``
+        - ``pic_corrected_edge_length``
+
+In addition to the 0-based index first argument, ``character_index``, the :meth:`~dendropy.model.continuous.PhylogeneticIndependentConstrasts.contrasts_tree` method takes the following optional arguments:
+
+    ``annotate_pic_statistics``
+        If |True| then the PIC statistics attributes will be *annotated* (i.e., serialized or persisted when the tree is written out or saved. Defaults to |False|.
+    ``state_values_as_node_labels``
+        If |True| then the :attr:`~dendropy.datamodel.treemodel.Node.label` attribute of each |Node| object will be set to the value of the character.
+    ``corrected_edge_lengths``
+        If |True| then the |Tree| returned will have its edge lengths adjusted to the corrected edge lengths as yielded by the PIC analysis.
+
+Results as a Table
+------------------
+
+So the following retrieves the constrasts tree for the first character (index=0), and prints a table of the various statistics::
+
+    >>> ctree1 = pic.contrasts_tree(character_index=0,
+    ...     annotate_pic_statistics=True,
+    ...     state_values_as_node_labels=False,
+    ...     corrected_edge_lengths=False)
+    >>> for nd in ctree1.postorder_internal_node_iter():
+    ...     row = [nd.pic_state_value,
+    ...             nd.pic_state_variance,
+    ...             nd.pic_contrast_raw,
+    ...             nd.pic_edge_length_error]
+    ...     row_str = [(("%10.8f") % i) for i in row]
+    ...     row_str = "    ".join(row_str)
+    ...     label = nd.label.ljust(6)
+    ...     print "%s %s" % (label, row_str)
+    HP     3.85263000    0.38500000    0.48342000    0.10500000
+    HPM    3.20037840    0.34560000    1.48239000    0.21560000
+    HPMA   2.78082358    0.60190555    1.17222840    0.22190555
+    Root   1.18372461    0.37574347    4.25050358    0.37574347
+
+Results as a Newick String with State Values as Node Labels
+-----------------------------------------------------------
+
+Alternatively, you might want to visualize the results as a tree showing the numeric values of the states. The following produces this for each character in the matrix by first requesting that :meth:`~dendropy.model.continuous.PhylogeneticIndependentConstrasts.contrasts_tree` replace existing node labels with the state values for that node, and then, when writing out in Newick format, suppressing taxon labels and printing node labels in their place:
+
+.. literalinclude:: /examples/pic1.py
+
+This results in::
+
+    [&R] ((((4.09434:0.21,3.61092:0.21)3.85263:0.28,2.37024:0.49)3.2003784:0.13,2.02815:0.62)2.78082357912:0.38,'-1.46968':1.0)1.1837246134:0.0;
+
+    [&R] ((((4.74493:0.21,3.3322:0.21)4.038565:0.28,3.3673:0.49)3.7432084:0.13,2.89037:0.62)3.43796714996:0.38,2.30259:1.0)3.01135659943:0.0;
+
+Results as a NEXUS Document with Analysis Statistics as Node Metadata
+---------------------------------------------------------------------
+
+However, probably the best way to visualize the results would be as a tree marked up with metadata that can be viewed in |FigTree|_ (by checking "Node Labels" and selecting the appropriate statistics from the drop-down menu). This is, in fact, even easier to do than the above, as it will result from the default options. The following illustrates this. It collects the metadata-annotated contrast analysis trees produced by :meth:`~dendropy.model.continuous.PhylogeneticIndependentConstrasts [...]
+
+.. literalinclude:: /examples/pic_annotated.py
+
+Thus, we get::
+
+    #NEXUS
+
+
+    BEGIN TAXA;
+        DIMENSIONS NTAX=5;
+        TAXLABELS
+            Homo
+            Pongo
+            Macaca
+            Ateles
+            Galago
+    ;
+    END;
+
+    BEGIN TREES;
+        TREE PIC_1 = [&R] ((((Homo:0.21[&pic_contrast_variance=None,pic_edge_length_error=0.0,pic_state_variance=None,pic_corrected_edge_length=0.21,pic_state_value=4.09434,pic_contrast_standardized=None,pic_contrast_raw=None],Pongo:0.21[&pic_contrast_variance=None,pic_edge_length_error=0.0,pic_state_variance=None,pic_corrected_edge_length=0.21,pic_state_value=3.61092,pic_contrast_standardized=None,pic_contrast_raw=None])HP:0.28[&pic_contrast_variance=0.42,pic_edge_length_error=0.105,pic [...]
+
+        TREE PIC_2 = [&R] ((((Homo:0.21[&pic_contrast_variance=None,pic_edge_length_error=0.0,pic_state_variance=None,pic_corrected_edge_length=0.21,pic_state_value=4.74493,pic_contrast_standardized=None,pic_contrast_raw=None],Pongo:0.21[&pic_contrast_variance=None,pic_edge_length_error=0.0,pic_state_variance=None,pic_corrected_edge_length=0.21,pic_state_value=3.3322,pic_contrast_standardized=None,pic_contrast_raw=None])HP:0.28[&pic_contrast_variance=0.42,pic_edge_length_error=0.105,pic_ [...]
+
+    END;
+
+
+Multifurcating Trees and Polytomies
+-----------------------------------
+
+By default, the :class:`~dendropy.model.continuous.PhylogeneticIndependentConstrasts` class only handles fully-bifurcating trees, and throws an exception if the input tree has polytomies.
+You can change this behavior by specifying one of the following strings to the "``polytomy_strategy``" argument of the class constructor:
+
+    "``ignore``"
+        Polytomies will handled without complaint::
+
+            >>> pic = dendropy.model.continuous.PhylogeneticIndependentContrasts(tree=tree,
+            ...        char_matrix=chars,
+            ...        polytomy_strategy='ignore')
+
+        Note that in this case the raw contrast and the raw contrast variance calculated for nodes that have more than two children will be invalid. The reconstructed state values should be still valid, though.
+
+    "``resolve``"
+        Polytomies will be arbitrarily resolved with 0-length branches::
+
+            >>> pic = dendropy.model.continuous.PhylogeneticIndependentContrasts(tree=tree,
+            ...        char_matrix=chars,
+            ...        polytomy_strategy='resolve')
+
+        In this case this validity of the analysis for nodes with (originally) more than two children is dubious, as the resulting contrasts are non-independent.
+
+
+
diff --git a/doc/source/primer/popgenstats.rst b/doc/source/primer/popgenstats.rst
new file mode 100644
index 0000000..0a32b53
--- /dev/null
+++ b/doc/source/primer/popgenstats.rst
@@ -0,0 +1,46 @@
+*************************************
+Population Genetic Summary Statistics
+*************************************
+
+The :mod:`dendropy.calculate.popgenstat` module provides functions that calculate some common population genetic summary statistics.
+
+For example, given a |DnaCharacterMatrix| as an argument, the :func:`~dendropy.calculate.popgenstat.num_segregating_sites()` function returns the raw number of segregating sites, :func:`~dendropy.calculate.popgenstat.average_number_of_pairwise_differences()` returns the average number of pairwise differences, and :func:`~dendropy.calculate.popgenstat.nucleotide_diversity()` returns the nucleotide diversity.
+
+More complex statistics are provided by the :class:`~dendropy.calculate.popgenstat.PopulationPairSummaryStatistics` class.
+Objects of this class are instantatiated with two lists of |DnaCharacterDataSequence| objects as arguments, each representing a sample of DNA sequences drawn from two distinct but related populations.
+Once instantiated, the following attributes of the :class:`~dendropy.calculate.popgenstat.PopulationPairSummaryStatistics` object are available:
+
+        :attr:`~dendropy.calculate.popgenstat.PopulationPairSummaryStatistics.average_number_of_pairwise_differences`
+            The average number of pairwise differences between every sequence across both populations.
+
+        :attr:`~dendropy.calculate.popgenstat.PopulationPairSummaryStatistics.average_number_of_pairwise_differences_between`
+            The average number of pairwise differences between every sequence between both populations.
+
+        :attr:`~dendropy.calculate.popgenstat.PopulationPairSummaryStatistics.average_number_of_pairwise_differences_within`
+            The average number of pairwise differences between every sequence within each population.
+
+        :attr:`~dendropy.calculate.popgenstat.PopulationPairSummaryStatistics.average_number_of_pairwise_differences_net`
+            The net number of pairwise differences.
+
+        :attr:`~dendropy.calculate.popgenstat.PopulationPairSummaryStatistics.num_segregating_sites`
+            The number of segregating sites.
+
+        :attr:`~dendropy.calculate.popgenstat.PopulationPairSummaryStatistics.wattersons_theta`
+            Watterson's theta.
+
+        :attr:`~dendropy.calculate.popgenstat.PopulationPairSummaryStatistics.wakeleys_psi`
+            Wakeley's psi.
+
+        :attr:`~dendropy.calculate.popgenstat.PopulationPairSummaryStatistics.tajimas_d`
+            Tajima's D.
+
+The following example calculates the suite of population genetic summary statistics for sequences drawn from two populations of sticklebacks.
+The original data consists of 23 sequences, with individuals from Eastern Pacific populations identified by their taxon labels beginning with "``EPAC``" and individuals from Western Pacific populations identified by their taxon labels beginning with "``WPAC``".
+The taxon labels thus are used as the basis for sorting the sequences into the required lists of |DnaCharacterDataSequence| objects, ``p1`` and ``p2``.
+
+.. literalinclude:: /examples/pgstats1.py
+    :linenos:
+
+Lines 6-12 build up the two lists of |DnaCharacterDataSequence| objects by sorting the original sequences into their source populations based on the taxon label (with operational taxonomic units with labels beginning with "``EPAC``" coming from the Eastern Pacific, and assigned to the list ``p1``, while those that begin with "``WPAC``" coming from the Western Pacific, and assigned to the list ``p2``).
+These lists are then passed as the instantiation arguments to the :class:`~dendropy.calculate.popgenstat.PopulationPairSummaryStatistics` constructor in line 14.
+The calculations are performed immediately, and the results reported in the following lines.
diff --git a/doc/source/primer/raxml.rst b/doc/source/primer/raxml.rst
new file mode 100644
index 0000000..f9adf54
--- /dev/null
+++ b/doc/source/primer/raxml.rst
@@ -0,0 +1,20 @@
+*****
+RAxML
+*****
+
+The :mod:`~dendropy.interop.raxml` module provides the :class:`~dendropy.interop.raxml.RaxmlRunner` class, which is a lighweight (i.e., mostly "pass-through") wrapper to the |RAxML| maximum-likelihood tree estimation program. |RAxML| needs to be installed in the system for this class to be used.
+
+The class handles the exporting of the |DendroPy| dataset in a format suitable for |RAxML| analysis, and re-reading the resulting tree back into a |DendroPy| object.
+
+The basic call assumes nucleotide data and estimates a tree under the ``GTRCAT`` model:
+
+
+.. literalinclude:: /examples/raxml_estimate_tree.py
+
+
+Currently, the only way to customize the call to the underlying |RAxML| program using :meth:`~dendropy.interop.raxml.RaxmlRunner.estimate_tree` is to pass it a list of command-line arguments, with each argument token a separate list element. So, for example, to include invariant sites in the substitutio model and run 250 independent tree searches::
+
+
+    >>> tree = rx.estimate_tree(data, ['-m', 'GTRCATI', '-N', '250'])
+
+Obviously, while this works, it is neither ideal nor very Pythonic. Future releases will polish up the interface.
diff --git a/doc/source/primer/reading_and_writing.rst b/doc/source/primer/reading_and_writing.rst
new file mode 100644
index 0000000..068c62a
--- /dev/null
+++ b/doc/source/primer/reading_and_writing.rst
@@ -0,0 +1,250 @@
+*************************************
+Reading and Writing Phylogenetic Data
+*************************************
+
+Creating New Objects From an External Data Source
+=================================================
+
+The |Tree|, |TreeList|, |CharacterMatrix|-derived (i.e., |DnaCharacterMatrix|,
+|ProteinCharacterMatrix|, |StandardCharacterMatrix|, etc.), and |DataSet|
+classes all support a "|get|" factory class-method that instantiates an object
+of the given class from a data source. This method takes, at a minumum, two
+keyword arguments that specify the *source* of the data and the *schema* (or
+format) of the data.
+
+The source must be specifed using one and exactly one of the following:
+
+    -   a path to a file (specified using the keyword argument "``path``")
+    -   a file or a file-like object opened for reading (specified using the keyword argument ``"file"``)
+    -   a string value giving the data directly (specified using the keyword argument ``"data"``)
+    -   or a URL (specified using the keyword argument ``"url"``)
+
+The schema is specified using the keyword argument ``"schema"``, and takes a string value that identifies the format of data.
+This ":ref:`schema specification string <Specifying_the_Data_Source_Format>`" can be one of: ":doc:`fasta </schemas/fasta>`", ":doc:`newick </schemas/newick>`", ":doc:`nexus </schemas/nexus>`", ":doc:`nexml </schemas/nexml>`", or ":doc:`phylip </schemas/phylip>`".
+Not all formats are supported for reading, and not all formats make sense for particular objects (for example, it would not make sense to try and instantiate a |Tree| or |TreeList| object from a |FASTA|-formatted data source).
+
+.. A ":term:`schema`" is DendroPy-speak for "format" (we cannot use the argument or variable name "format" for this in library, because this is a Python built-in, and hence we use "schema" and adopted this terminology for consistency), and is specified using one of a set of predefined string values.
+
+For example:
+
+.. code-block:: python
+
+    import dendropy
+
+    tree1 = dendropy.Tree.get(path="mle.tre", schema="newick")
+    tree2 = dendropy.Tree.get(file=open("mle.nex", "r"), schema="nexus")
+    tree3 = dendropy.Tree.get(data="((A,B),(C,D));", schema="newick")
+    tree4 = dendropy.Tree.get(url="http://api.opentreeoflife.org/v2/study/pg_1144/tree/tree2324.nex", schema="nexus")
+
+    tree_list1 = dendropy.TreeList.get(path="pythonidae.mcmc.nex", schema="nexus")
+    tree_list2 = dendropy.TreeList.get(file=open("pythonidae.mcmc.nex", "r"), schema="nexus")
+    tree_list3 = dendropy.TreeList.get(data="(A,(B,C));((A,B),C);", "r"), schema="newick")
+
+    dna1 = dendropy.DnaCharacterMatrix.get(file=open("pythonidae.fasta"), schema="fasta")
+    dna2 = dendropy.DnaCharacterMatrix.get(url="http://purl.org/phylo/treebase/phylows/matrix/TB2:M2610?format=nexus", schema="nexus")
+    aa1 = dendropy.ProteinCharacterMatrix.get(file=open("pythonidae.dat"), schema="phylip")
+    std1 = dendropy.StandardCharacterMatrix.get(path="python_morph.nex", schema="nexus")
+    std2 = dendropy.StandardCharacterMatrix.get(data=">t1\n01011\n\n>t2\n11100", schema="fasta")
+
+    dataset1 = dendropy.DataSet.get(path="pythonidae.chars_and_trees.nex", schema="nexus")
+    dataset2 = dendropy.DataSet.get(url="http://purl.org/phylo/treebase/phylows/study/TB2:S1925?format=nexml", schema="nexml")
+
+The "|get|" method takes a number of other optional keyword arguments that provide control over how the data is interpreted and processed.
+Some are general to all classes (e.g., the "``label``" or "``taxon_namespace``" arguments), while others specific to a given class (e.g. the "``exclude_trees``" argument when instantiating data into a |DataSet| object, or the "``tree_offset``" argument when instantiating data into a |Tree| or |TreeList| object).
+These are all covered in detail in the documentation of the respective methods for each class:
+
+    -   :meth:`Tree.get <dendropy.datamodel.treemodel.Tree.get>`
+    -   :meth:`TreeList.get <dendropy.datamodel.treecollectionmodel.TreeList.get>`
+    -   :meth:`DnaCharacterMatrix.get <dendropy.datamodel.charmatrixmodel.DnaCharacterMatrix.get>`
+    -   :meth:`RnaCharacterMatrix.get <dendropy.datamodel.charmatrixmodel.RnaCharacterMatrix.get>`
+    -   :meth:`ProteinCharacterMatrix.get <dendropy.datamodel.charmatrixmodel.ProteinCharacterMatrix.get>`
+    -   :meth:`RestrictionSitesCharacterMatrix.get <dendropy.datamodel.charmatrixmodel.RestrictionSitesCharacterMatrix.get>`
+    -   :meth:`InfiniteSitesCharacterMatrix.get <dendropy.datamodel.charmatrixmodel.InfiniteSitesCharacterMatrix.get>`
+    -   :meth:`StandardCharacterMatrix.get <dendropy.datamodel.charmatrixmodel.StandardCharacterMatrix.get>`
+    -   :meth:`DataSet.get <dendropy.datamodel.datasetmodel.DataSet.get>`
+
+Other optional keyword arguments are :ref:`specific to the schema or format <Schema_Specific_Keyword_Arguments>` (e.g., the "``preserve_underscores``" argument when reading |Newick| or |Nexus| data).
+These are covered in detail in the :doc:`DendroPy Schema Guide </schemas/index>`.
+
+.. note::
+
+    The |Tree|, |TreeList|, |CharacterMatrix|-derived, and |DataSet| classes
+    also support a "|get_from_methods|" family of factory class-methods that
+    can be seen as specializations of the "|get|" method for various types of
+    sources (in fact, the "|get|" method is actually a dispatcher that calls on
+    one of these methods below for implementation of the functionality):
+
+        :meth:`get_from_stream(src, schema, \*\*kwargs)`
+            Takes a file or file-like object opened for reading the data source as the first argument, and a :ref:`schema specification string <Specifying_the_Data_Source_Format>` as the second.
+            Optional :term:`schema`-specific keyword arguments can be to control the parsing and other options.
+            This is equivalent to calling ":meth:`get(file=src, schema=schema, ...)`".
+
+        :meth:`get_from_path(src, schema, \*\*kwargs)`
+            Takes a string specifying the path to the the data source file as the first argument, and a :ref:`schema specification string <Specifying_the_Data_Source_Format>` as the second.
+            Optional :term:`schema`-specific keyword arguments can be to control the parsing and other options.
+            This is equivalent to calling ":meth:`get(path=src, schema=schema, ...)`".
+
+        :meth:`get_from_string(src, schema, \*\*kwargs)`
+            Takes a string containing the source data as the first argument, and a :ref:`schema specification string <Specifying_the_Data_Source_Format>` as the second.
+            Optional :term:`schema`-specific keyword arguments can be to control the parsing and other options.
+            This is equivalent to calling ":meth:`get(data=src, schema=schema, ...)`".
+
+        :meth:`get_from_url(src, schema, \*\*kwargs)`
+            Takes a string containing the URL of the data as the first argument, and a :ref:`schema specification string <Specifying_the_Data_Source_Format>` as the second.
+            Optional :term:`schema`-specific keyword arguments can be  to control the parsing and other options.
+            This is equivalent to calling ":meth:`get(url=src, schema=schema, ...)`".
+
+    As with the "|get|" method, the additional keyword arguments are specific to the given class or schema type.
+
+Adding Data to Existing Objects from an External Data Source
+============================================================
+
+In addition to the "|get|" class factory method, the collection classes (|TreeList|, |TreeArray| and |DataSet|) each support a "|read|" *instance* method that *add* data from external sources to an existing object (as opposed to creating and returning a new object based on an external data source).
+This "|read|" instance method has a signature that parallels the "|get|" factory method described above, requiring:
+
+    -   A specification of a source using one and exactly one of the following keyword arguments: "``path``", "``file``", "``data``", "``url``".
+    -   A specification of the :ref:`schema <Specifying_the_Data_Source_Format>` or format of the data.
+    -   Optional keyword arguments to customize/control the parsing and interpretation of the data.
+
+As with the "|get|" method, the "|read|" method takes a number of other optional keyword arguments that provide control over how the data is interpreted and processed, which are covered in more detail in the documentation of the respective methods for each class:
+
+    -   :meth:`TreeList.read <dendropy.datamodel.treecollectionmodel.TreeList.read>`
+    -   :meth:`TreeArray.read <dendropy.datamodel.treecollectionmodel.TreeArray.read>`
+    -   :meth:`DataSet.read <dendropy.datamodel.datasetmodel.DataSet.read>`
+
+as well as :ref:`schema-specific keyword arguments <Schema_Specific_Keyword_Arguments>` which are covered in detail in the :doc:`DendroPy Schema Guide </schemas/index>`.
+
+For example, the following accumulates post-burn-in trees from several different files into a single |TreeList| object::
+
+    >>> import dendropy
+    >>> post_trees = dendropy.TreeList()
+    >>> post_trees.read(
+    ...         file=open("pythonidae.nex.run1.t", "r")
+    ...         schema="nexus",
+    ...         tree_offset=200)
+    >>> print(len(post_trees))
+    800
+    >>> post_trees.read(
+    ...         path="pythonidae.nex.run2.t",
+    ...         schema="nexus",
+    ...         tree_offset=200)
+    >>> print(len(post_trees))
+    1600
+    >>> s = open("pythonidae.nex.run3.t", "r").read()
+    >>> post_trees.read(
+    ...         data=s,
+    ...         schema="nexus",
+    ...         tree_offset=200)
+    >>> print(len(post_trees))
+    2400
+
+.. The |TreeList| object automatically handles taxon management, and ensures that all appended |Tree| objects share the same |TaxonNamespace| reference. Thus all the |Tree| objects created and aggregated from the data sources in the example will all share the same |TaxonNamespace| and |Taxon| objects, which is important if you are going to be carrying comparisons or operations between multiple |Tree| objects.
+.. As with the "|get|" method, keyword arguments can be used to provide :ref:`control on the data source parsing <Customizing_Data_Creation_and_Reading>`.
+
+while the following accumulates data from a variety of sources into a single |DataSet| object under the same |TaxonNamespace| to ensure that they all reference the same set of |Taxon| objects::
+
+    >>> import dendropy
+    >>> ds = dendropy.DataSet()
+    >>> tns = ds.new_taxon_namespace()
+    >>> ds.attach_taxon_namespace(tns)
+    >>> ds.read(url="http://api.opentreeoflife.org/v2/study/pg_1144/tree/tree2324.nex",
+    ...     schema="nexus")
+    >>> ds.read(file=open("pythonidae.fasta"), schema="fasta")
+    >>> ds.read(url="http://purl.org/phylo/treebase/phylows/matrix/TB2:M2610?format=nexus",
+    ...     schema="nexus")
+    >>> ds.read(file=open("pythonidae.dat"), schema="phylip")
+    >>> ds.read(path="python_morph.nex", schema="nexus")
+    >>> ds.read(data=">t1\n01011\n\n>t2\n11100", schema="fasta")
+
+.. note:: DendroPy 3.xx supported "|read_from_methods|" methods on |Tree| and |CharacterMatrix|-derived classes. This is no longer supported in DendroPy 4 and above. Instead of trying to re-populate an existing |Tree| or |CharacterMatrix|-derived object by using "|read_from_methods|"::
+
+            x = dendropy.Tree()
+            x.read_from_path("tree1.nex", "nexus")
+            .
+            .
+            .
+            x.read_from_path("tree2.nex", "nexus")
+
+        simply rebind the new object returned by "|get|"::
+
+            x = dendropy.Tree.get(path="tree1.nex", schema="nexus")
+            .
+            .
+            .
+            x = dendropy.Tree.get(path="tree2.nex", schema="nexus")
+
+.. note:: The |TreeList|, |TreeArray|, and |DataSet| classes
+    also support a "|read_from_methods|" family of instance methods that
+    can be seen as specializations of the "|read|" method for various types of
+    sources (in fact, the "|read|" method is actually a dispatcher that calls on
+    one of these methods below for implementation of the functionality):
+
+        :meth:`read_from_stream(src, schema, \*\*kwargs)`
+            Takes a file or file-like object opened for reading the data source as the first argument, and a :ref:`schema specification string <Specifying_the_Data_Source_Format>` as the second.
+            Optional :term:`schema`-specific keyword arguments can be to control the parsing and other options.
+            This is equivalent to calling ":meth:`read(file=src, schema=schema, ...)`".
+
+        :meth:`read_from_path(src, schema, \*\*kwargs)`
+            Takes a string specifying the path to the the data source file as the first argument, and a :ref:`schema specification string <Specifying_the_Data_Source_Format>` as the second.
+            Optional :term:`schema`-specific keyword arguments can be to control the parsing and other options.
+            This is equivalent to calling ":meth:`read(path=src, schema=schema, ...)`".
+
+        :meth:`read_from_string(src, schema, \*\*kwargs)`
+            Takes a string containing the source data as the first argument, and a :ref:`schema specification string <Specifying_the_Data_Source_Format>` as the second.
+            Optional :term:`schema`-specific keyword arguments can be to control the parsing and other options.
+            This is equivalent to calling ":meth:`read(data=src, schema=schema, ...)`".
+
+        :meth:`read_from_url(src, schema, \*\*kwargs)`
+            Takes a string containing the URL of the data as the first argument, and a :ref:`schema specification string <Specifying_the_Data_Source_Format>` as the second.
+            Optional :term:`schema`-specific keyword arguments can be  to control the parsing and other options.
+            This is equivalent to calling ":meth:`read(url=src, schema=schema, ...)`".
+
+    As with the "|read|" method, the additional keyword arguments are specific to the given class or schema type.
+
+
+Writing Out Phylogenetic Data
+=============================
+
+The |Tree|, |TreeList|, |CharacterMatrix|-derived (i.e., |DnaCharacterMatrix|,
+|ProteinCharacterMatrix|, |StandardCharacterMatrix|, etc.), and |DataSet|
+classes all support a "|write|" instance method for serialization of data to an
+external data source.
+This method takes two mandatory keyword arguments:
+
+    -   One and exactly one of the following to specify the *destination*:
+        -   a path to a file (specified using the keyword argument "``path``")
+        -   a file or a file-like object opened for writing (specified using the keyword argument ``"file"``)
+
+    -   A ":ref:`schema specification string <Specifying_the_Data_Source_Format>`" given by the keyword argument "``schema``", to identify the schema or format for the output.
+
+Alternatively, the |Tree|, |TreeList|, |CharacterMatrix|-derived, or |DnaCharacterMatrix| objects may also be represented as a string by calling the "``as_string()``" method, which requires at least one single mandatory argument, "``schema``", giving the ":ref:`schema specification string <Specifying_the_Data_Source_Format>`" to identify the format of the output.
+
+In either case, the ":ref:`schema specification string <Specifying_the_Data_Source_Format>`" can be one of: ":doc:`fasta </schemas/fasta>`", ":doc:`newick </schemas/newick>`", ":doc:`nexus </schemas/nexus>`", ":doc:`nexml </schemas/nexml>`", or ":doc:`phylip </schemas/phylip>`".
+
+For example:
+
+.. code-block:: python
+
+
+    tree.write(path="output.tre", schema="newick")
+    dest = open("output.xml", "w")
+    tree_list.write(file=dest, schema="nexml")
+    print(dna_character_matrix.as_string(schema="fasta"))
+
+
+As with the "|get|" and "|read|" methods, further keyword arguments can be specified to control behavior.
+These are covered in detail in the ":doc:`/schemas/index`" section.
+
+.. note::
+
+    The |Tree|, |TreeList|, |CharacterMatrix|-derived, and |DataSet| classes also support a "|write_to_methods|" family of instance methods that can be seen as specializations of the "|write|" method for various types of destinations:
+
+        :meth:`write_to_stream(dest, schema, \*\*kwargs)`
+            Takes a file or file-like object opened for writing the data as the first argument, and a string specifying the schema as the second.
+
+        :meth:`write_to_path(dest, schema, \*\*kwargs)`
+            Takes a string specifying the path to the file as the first argument, and a string specifying the schema as the second.
+
+        :meth:`as_string(schema, \*\*kwargs)`
+            Takes a string specifying the schema as the first argument, and returns a string containing the formatted-representation of the data.
+
diff --git a/doc/source/primer/seqgen.rst b/doc/source/primer/seqgen.rst
new file mode 100644
index 0000000..4e94ce3
--- /dev/null
+++ b/doc/source/primer/seqgen.rst
@@ -0,0 +1,13 @@
+*******
+Seq-Gen
+*******
+
+|DendroPy| includes native infrastructure for phylogenetic sequence simulation
+on DendroPy trees under the HKY model. Being pure-Python, however, it is a
+little slow. If |SeqGen| is installed on your system, though, you can take
+advantage of the :class:`dendropy.interop.seqgen.SeqGen` wrapper to efficiently
+simulate sequences under a wide variety of models.  The following examples
+should be enough to get started, and the class is simple and straightforward
+enough so that all options should be pretty much self-documented.
+
+.. literalinclude:: /examples/seqgen.py
diff --git a/doc/source/primer/taxa.rst b/doc/source/primer/taxa.rst
new file mode 100644
index 0000000..58af92e
--- /dev/null
+++ b/doc/source/primer/taxa.rst
@@ -0,0 +1,277 @@
+*************************************
+Taxon Namespaces and Taxon Management
+*************************************
+
+Conceptual Background
+=====================
+
+Many elements of phylogenetic and, more generally, evo- or bioinformatic data
+are associated with some element in the real world.
+For example, a leaf :term:`node` on a :term:`tree` or a sequence in a character
+matrix is typically associated with an individual biological organism, a
+population or deme, a species or higher-level taxonomic group, and so on.
+In classical phylogenetic literature, this referent is termed an "operational
+taxonomic unit" or OTU.
+In DendroPy, we use the term "taxon".
+Regardless of whether the referent of the data represents an
+individual organism (or even a less distinct subunit, e.g., a fragment from a
+shotgun assay) or an actual taxonomic group, we apply the term "taxon".
+We assign a (string) label to the concept of the entity (individual,
+sub-individual, or group) represented by a "taxon", which allows us to relate
+different elements of data to the same or different real-world referent.
+These collections of labels representing taxon concepts are organized into
+"taxon namespaces".
+
+The concept of a "taxon namespace" is fundamental to managing data in DendroPy.
+A "taxon namespace" represents a self-contained universe of *names* that
+map to operational taxonomic unit *concepts*.
+Operational taxonomic unit concepts are essentially names for groups of
+organisms in the "real world". Operational taxonomic unit concepts are
+organized into taxonomic namespaces. A taxonomic namespace is a self-contained
+and functionally-complete collection of mutually-distinct operational taxonomic
+unit concepts, and provide the semantic context in which operational taxonomic
+units from across various data sources of different formats and provenances can
+be related through correct interpretation of their taxon labels.
+
+    * Operational taxonomic units are modeled by a |Taxon| object.
+
+    * Taxonomic namespaces, in which operational taxonomic units are organized,
+      are modeled by a |TaxonNamespace| object.
+
+    * A |TaxonNamespace| manages a collection of |Taxon| objects, where each
+      object represents a distinct operational taxonomic unit concept within
+      the taxonomic namespace represented by that |TaxonNamespace| object.
+
+    * Each |Taxon| object can belong to one and only one |TaxonNamespace|:
+      |Taxon| objects are not shared across |TaxonNamespace| objects.
+
+    * Each |Taxon| object has an attribute, ``label``, whose (string) value
+      is the name of the operational taxon unit concept that it represents.
+
+    * Different |Taxon| objects represent different operational taxonomic
+      unit concepts, even if they have the same label value.
+
+    * All client objects (`TaxonNamespaceAssociated` objects) that reference
+      the same |TaxonNamespace| reference the same "universe" or domain of
+      operational taxonomic unit concepts.
+
+    * Operational taxonomic units from across different data sources are mapped
+      to distinct |Taxon| objects within a particular |TaxonNamespace| based on
+      matching the string values of labels of the |Taxon| object.
+
+    * A particular taxonomic unit concept in one data source will only be
+      correctly related to the same taxonomic unit concept (i.e, the same
+      |Taxon| object) in another data source only if they have both
+      been parsed with reference to the same taxonomic namespace (i.e., the
+      same |TaxonNamespace| has been used).
+
+    * A |TaxonNamespace| assigned an "accession index" to every |Taxon| object
+      added to it. This is a stable and unique number within the context of any
+      given |TaxonNamespace| object (though a |Taxon| object may have different
+      accession indexes in different |TaxonNamespace| objects if it
+      belongs to multiple namespaces). This number is will be used to
+      calculate the "split bitmask" hash of the trivial split or external edge
+      subtending the node to which this |Taxon| object is assigned on a tree.
+      The concept of a "split bitmask" hash is fundamental to DendroPy's tree
+      operations. The split bitmask is a hash that uniquely identifies every
+      split on a tree.  It is calculated by OR'ing the split bitmask of all the
+      child splits of the given split. Terminal edges, of course, do not have
+      child edges, and their split bitmask is given by the accession index of
+      the |Taxon| object at their head or target nodes.
+
+Management of Shared Taxon Namespaces
+=====================================
+
+Operational taxonomic units in DendroPy are represented by |Taxon| objects, and distinct collections of operational taxonomic units are represented by |TaxonNamespace| objects.
+Two distinct |Taxon| objects are considered distinct entities, *even if they share the same label*.
+Understanding this is crucial to understanding management of data in DendroPy.
+Many operations in DendroPy are based on the identity of the |Taxon| objects (e.g., counting of splits on trees).
+Many errors by novices using DendroPy come from inadventently creating and using multiple |Taxon| objects to refer to the same taxon concept.
+
+Every time a definition of taxa is encountered in a data source, for example, a "TAXA" block in a NEXUS file, a new |TaxonNamespace| object is created and populated with |Taxon| objects corresponding to the taxa defined in the data source.
+Some data formats do not have explicit definition of taxa, e.g. a Newick tree source.
+These nonetheless can be considered to have an implicit definition of a collection of operational taxonomic units given by the aggregate of all operational taxonomic units referenced in the data (i.e., the set of all distinct labels on trees in a Newick file).
+
+Every time a reference to a taxon is encountered in a data source, such as a taxon label in a tree or matrix statement in a NEXUS file, the current |TaxonNamespace| object is searched for corresponding |Taxon| object with a matching label (see below for details on how the match is made).
+If found, the |Taxon| object is used to represent the taxon.
+If not, a new |Taxon| object is created, added to the |TaxonNamespace| object, and used to represent the taxon.
+
+If multiple data sources are read, then with |TreeList| or |TreeArray| the |TaxonNamespace| instance associated with the collection through the ``taxon_namespace`` attribute will always be used to manage the |Taxon| objects, resulting in correct association of labels with |Taxon| objects across multiple reads. So, for example, the following:
+
+.. literalinclude:: /examples/taxa_mgmt1.py
+
+results in::
+
+    ['A', 'B', 'C']
+    ['A', 'B', 'C']
+
+Note how the total number of taxa is three, and there is full correspondence between the taxa.
+That is, the taxa referenced by "A", "B", and "C" in the second read operation were correctly mapped to the taxa from the second read operation.
+
+With |DataSet| instances, however, each independent read operation will, by default, be managed under a *new* (i.e., independent and different) |TaxonNamespace|.
+
+.. literalinclude:: /examples/taxa_mgmt2.py
+
+So, if reading data from multiple data sources using a |DataSet| instance that should all be managed under the same taxon namespace, then the |TaxonNamespace| instance to use should be explicitly passed in using the "``taxon_namespace``" keyword argument:
+
+.. literalinclude:: /examples/taxa_mgmt3.py
+
+
+While each |TreeList| manages all its member |Tree| objects under the same |TaxonNamespace| reference, if two different |TreeList| instances have different |TaxonNamespace| references, then the |Taxon| objects read/managed by them *will* be necessarily different from each other, even if the labels are the same.
+
+.. literalinclude:: /examples/taxa_mgmt4.py
+
+Again, this can be addressed by ensuring that the |TaxonNamespace| reference is the same for |TreeList| instances that need to interact:
+
+.. literalinclude:: /examples/taxa_mgmt5.py
+
+The same obtains for |Tree| and |CharacterMatrix|-derived instances: if the associated |TaxonNamespace| references are different, then the associated |Taxon| objects will be different, even if the labels are the same. This will make comparison or any operation between them impossible:
+
+.. literalinclude:: /examples/taxa_mgmt1a.py
+
+So, if taxa are shared, then the |TaxonNamespace| to use should be passed in explicitly to ensure that each |Tree| or |CharacterMatrix|-derived instance also share the same |TaxonNamespace|:
+
+.. literalinclude:: /examples/taxa_mgmt1b.py
+
+Managing Taxon Name Mapping Within a Taxon Namespace
+====================================================
+
+DendroPy maps taxon definitions encountered in a data source to |Taxon| objects by the taxon label.
+The labels have to match **exactly** for the taxa to be correctly mapped.
+By default, this matching is case-insensitive, though case-sensitivity can be set by specifying "``case_sensitive_taxon_labels=True``".
+
+Some quirks may arise due to some schema-specific idiosyncracies.
+For example, the NEXUS standard dictates that an underscore ("_") should be substituted for a space in all labels.
+Thus, when reading a NEXUS or Newick source, the taxon labels "Python_regius" and "Python regius" are exactly equivalent, and will be mapped to the same |Taxon| object.
+
+However, this underscore-to-space mapping does **not** take place when reading, for example, a FASTA schema file.
+Here, underscores are preserved, and thus "Python_regius" does not map to "Python regius".
+This means that if you were to read a NEXUS file with the taxon label, "Python_regius", and later a read a FASTA file with the same taxon label, i.e., "Python_regius", these would map to different taxa!
+This is illustrated by the following:
+
+.. literalinclude:: /examples/taxon_labels1.py
+
+Which produces the following, almost certainly incorrect, result::
+
+    TaxonNamespace object at 0x43b4e0 (TaxonNamespace4437216): 4 Taxa
+        [0] Taxon object at 0x22867b0 (Taxon36202416): 'Python regius'
+        [1] Taxon object at 0x2286810 (Taxon36202512): 'Python sebae'
+        [2] Taxon object at 0x22867d0 (Taxon36202448): 'Python_regius'
+        [3] Taxon object at 0x2286830 (Taxon36202544): 'Python_sebae'
+
+Even more confusingly, if this file is written out in NEXUS schema, it would result in the space/underscore substitution taking place, resulting in two pairs of taxa with the same labels.
+
+If you plan on mixing sources from different formats, it is important to keep in mind the space/underscore substitution that takes place by default with NEXUS/Newick formats, but does not take place with other formats.
+
+You could simply avoid underscores and use only spaces instead:
+
+.. literalinclude:: /examples/taxon_labels2.py
+
+Which results in::
+
+    TaxonNamespace object at 0x43b4e0 (TaxonNamespace4437216): 2 Taxa
+        [0] Taxon object at 0x22867b0 (Taxon36202416): 'Python_regius'
+        [1] Taxon object at 0x2286810 (Taxon36202512): 'Python_sebae'
+
+Or use underscores in the NEXUS-formatted data, but spaces in the non-NEXUS data:
+
+.. literalinclude:: /examples/taxon_labels2b.py
+
+Which results in the same as the preceding example::
+
+    TaxonNamespace object at 0x43b4e0 (TaxonNamespace4437216): 2 Taxa
+        [0] Taxon object at 0x22867b0 (Taxon36202416): 'Python regius'
+        [1] Taxon object at 0x2286810 (Taxon36202512): 'Python sebae'
+
+You can also wrap the underscore-bearing labels in the NEXUS/Newick source in quotes, which preserves them from being substituted for spaces:
+
+.. literalinclude:: /examples/taxon_labels3.py
+
+Which will result in::
+
+    TaxonNamespace object at 0x43c780 (TaxonNamespace4441984): 2 Taxa
+        [0] Taxon object at 0x2386770 (Taxon37250928): 'Python_regius'
+        [1] Taxon object at 0x2386790 (Taxon37250960): 'Python_sebae'
+
+Finally, you can also override the default behavior of DendroPy's NEXUS/Newick parser by passing the keyword argument ``preserve_underscores=True`` to any "|read_from_methods|" or "|get_from_methods|" method. For example:
+
+.. literalinclude:: /examples/taxon_labels4.py
+
+will result in::
+
+    TaxonNamespace object at 0x43c780 (TaxonNamespace4441984): 2 Taxa
+        [0] Taxon object at 0x2386770 (Taxon37250928): 'Python_regius'
+        [1] Taxon object at 0x2386790 (Taxon37250960): 'Python_sebae'
+
+This may seem the simplest solution, in so far as it means that you need not maintain lexically-different taxon labels across files of different formats, but a gotcha here is that if writing to NEXUS/Newick schema, any label with underscores will be automatically quoted to preserve the underscores (again, as dictated by the NEXUS standard), which will mean that: (a) your output file will have quotes, and, as a result, (b) the underscores in the labels will be "hard" underscores if the fi [...]
+
+    >>> print(d.as_string('nexus'))
+    #NEXUS
+
+    BEGIN TAXA;
+        TITLE TaxonNamespace5736800;
+        DIMENSIONS NTAX=2;
+        TAXLABELS
+            'Python_regius'
+            'Python_sebae'
+      ;
+    END;
+
+    BEGIN CHARACTERS;
+        TITLE DnaCharacterMatrix37505040;
+        LINK TAXA = TaxonNamespace5736800;
+        DIMENSIONS  NCHAR=5;
+        FORMAT DATATYPE=DNA GAP=- MISSING=? MATCHCHAR=.;
+        MATRIX
+    'Python_regius'    ACGTA
+    'Python_sebae'      ACGTA
+        ;
+    END;
+
+    BEGIN CHARACTERS;
+        TITLE DnaCharacterMatrix37504848;
+        LINK TAXA = TaxonNamespace5736800;
+        DIMENSIONS  NCHAR=4;
+        FORMAT DATATYPE=DNA GAP=- MISSING=? MATCHCHAR=.;
+        MATRIX
+    'Python_regius'    AAAA
+    'Python_sebae'      ACGT
+        ;
+    END;
+
+Note that the taxon labels have changed semantically between the input and the NEXUS output, as, according to the NEXUS standard, "Python_regius", while equivalent to "Python regius", is **not** equivalent to "'Python_regius'".
+To control this, you can pass the keyword argument ``quote_underscores=False`` to any :meth:`write_to_*`, or :meth:`as_string()` method, which will omit the quotes even if the labels contain underscores::
+
+    >>> print(d.as_string('nexus', quote_underscores=False))
+    #NEXUS
+
+    BEGIN TAXA;
+        TITLE TaxonNamespace5736800;
+        DIMENSIONS NTAX=2;
+        TAXLABELS
+            Python_regius
+            Python_sebae
+      ;
+    END;
+
+    BEGIN CHARACTERS;
+        TITLE DnaCharacterMatrix37505040;
+        LINK TAXA = TaxonNamespace5736800;
+        DIMENSIONS  NCHAR=5;
+        FORMAT DATATYPE=DNA GAP=- MISSING=? MATCHCHAR=.;
+        MATRIX
+    Python_regius    ACGTA
+    Python_sebae      ACGTA
+        ;
+    END;
+
+    BEGIN CHARACTERS;
+        TITLE DnaCharacterMatrix37504848;
+        LINK TAXA = TaxonNamespace5736800;
+        DIMENSIONS  NCHAR=4;
+        FORMAT DATATYPE=DNA GAP=- MISSING=? MATCHCHAR=.;
+        MATRIX
+    Python_regius    AAAA
+    Python_sebae      ACGT
+        ;
+    END;
diff --git a/doc/source/primer/taxa_partitions.rst b/doc/source/primer/taxa_partitions.rst
new file mode 100644
index 0000000..696092d
--- /dev/null
+++ b/doc/source/primer/taxa_partitions.rst
@@ -0,0 +1,142 @@
+************************
+Partitions of Taxon Sets
+************************
+
+A number of different applications require a specification of a partition of a set of taxa.
+For example, when calculating population genetic summary statistics for a multi-population sample or numbers of deep coalescences given a particular monophyletic groupings of taxa.
+The :class:`~dendropy.datamodel.taxonmodel.TaxonNamespacePartition` object describes a partitioning of a :class:`~dendropy.datamodel.taxonmodel.TaxonNamespace` into an exhaustive set of mutually-exclusive :class:`~dendropy.datamodel.taxonmodel.TaxonNamespace` subsets.
+
+There are four different ways to specify a partitioning scheme: by using a function, attribute, dictionary or list.
+The first three of these rely on providing a mapping of a :class:`~dendropy.datamodel.taxonmodel.Taxon` object to a subset membership identifier, i.e., a string, integer or some other type of value that identifies the grouping. The last explicitly describes the grouping as a list of lists.
+
+For example, consider the following::
+
+    >>> seqstr = """\
+    ... #NEXUS
+    ...
+    ... BEGIN TAXA;
+    ...     DIMENSIONS NTAX=13;
+    ...     TAXLABELS a1 a2 a3 b1 b2 b3 c1 c2 c3 c4 c5 d1 d2;
+    ... END;
+    ... BEGIN CHARACTERS;
+    ...     DIMENSIONS NCHAR=7;
+    ...     FORMAT DATATYPE=DNA MISSING=? GAP=- MATCHCHAR=.;
+    ...     MATRIX
+    ...         a1 ACCTTTG
+    ...         a2 ACCTTTG
+    ...         a3 ACCTTTG
+    ...         b1 ATCTTTG
+    ...         b2 ATCTTTG
+    ...         b3 ACCTTTG
+    ...         c1 ACCCTTG
+    ...         c2 ACCCTTG
+    ...         c3 ACCCTTG
+    ...         c4 ACCCTTG
+    ...         c5 ACCCTTG
+    ...         d1 ACAAAAG
+    ...         d2 ACCAAAG
+    ...     ;
+    ... END
+    ... """
+    >>> seqs = DnaCharacterMatrix.get_from_string(seqstr, 'nexus')
+    >>> taxon_namespace = seqs.taxon_namespace
+
+Here we have sequences sampled from four populations, with the population identified by the first character of the taxon label. To create a parition of the |TaxonNamespace| resulting from parsing the file, we call the :meth:`~dendropy.datamodel.taxonmodel.TaxonNamespace.partition` method. This method takes one of the following four keyword arguments:
+
+        ``membership_func``
+            A function that takes a ``Taxon`` object as an argument and
+            returns a a population membership identifier or flag
+            (e.g., a string, an integer) .
+
+        ``membership_attr_name``
+            Name of an attribute of ``Taxon`` objects that serves as an
+            identifier for subset membership.
+
+        ``membership_dict``
+            A dictionary with ``Taxon`` objects as keys and population
+            membership identifier or flag as values (e.g., a string,
+            an integer).
+
+        ``membership_lists``
+            A container of containers of ``Taxon`` objects, with every
+            ``Taxon`` object in ``taxon_namespace`` represented once and only
+            once in the sub-containers.
+
+For example, using the membership function approach, we define a function that returns the first character of the taxon label, and pass it to the :meth:`~dendropy.datamodel.taxonmodel.TaxonNamespace.partition` using the ``membership_func`` keyword argument::
+
+    >>> def mf(t):
+    ...     return t.label[0]
+    ...
+    >>> tax_parts = taxon_namespace.partition(membership_func=mf)
+
+Or, as would probably be done with such a simple membership function in practice::
+
+    >>> tax_parts = taxon_namespace.partition(membership_func=lambda x: x.label[0])
+
+Either way, we would get the following results::
+
+    >>> for s in tax_parts.subsets():
+    ...     print(s.description())
+    ...
+    TaxonNamespace object at 0x101116838 (TaxonNamespace4312885304: 'a'): 3 Taxa
+    TaxonNamespace object at 0x101116788 (TaxonNamespace4312885128: 'c'): 5 Taxa
+    TaxonNamespace object at 0x101116730 (TaxonNamespace4312885040: 'd'): 2 Taxa
+    TaxonNamespace object at 0x1011167e0 (TaxonNamespace4312885216: 'b'): 3 Taxa
+
+We could also add a population identification attribute to each |Taxon| object, and use the ``membership_attr_name`` keyword to specify that subsets should be created based on the value of this attribute::
+
+    >>> for t in taxon_namespace:
+    ...     t.population = t.label[0]
+    ...
+    >>> tax_parts = taxon_namespace.partition(membership_attr_name='population')
+
+The results are identical to that above::
+
+    >>> for s in tax_parts.subsets():
+    ...     print(s.description())
+    ...
+    TaxonNamespace object at 0x1011166d8 (TaxonNamespace4312884952: 'a'): 3 Taxa
+    TaxonNamespace object at 0x1011165d0 (TaxonNamespace4312884688: 'c'): 5 Taxa
+    TaxonNamespace object at 0x1011169f0 (TaxonNamespace4312885744: 'd'): 2 Taxa
+    TaxonNamespace object at 0x101116680 (TaxonNamespace4312884864: 'b'): 3 Taxa
+
+The third approach involves constructing a dictionary that maps |Taxon| objects to their identification label and passing this using the ``membership_dict`` keyword argument::
+
+    >>> tax_pop_label_map = {}
+    >>> for t in taxon_namespace:
+    ...     tax_pop_label_map[t] = t.label[0]
+    ...
+    >>> tax_parts = taxon_namespace.partition(membership_dict=tax_pop_label_map)
+
+Again, the results are the same::
+
+    >>> for s in tax_parts.subsets():
+    ...     print(s.description())
+    ...
+    TaxonNamespace object at 0x1011166e8 (TaxonNamespace4312884952: 'a'): 3 Taxa
+    TaxonNamespace object at 0x1011165f0 (TaxonNamespace4312884688: 'c'): 5 Taxa
+    TaxonNamespace object at 0x1011169f1 (TaxonNamespace4312885744: 'd'): 2 Taxa
+    TaxonNamespace object at 0x101116620 (TaxonNamespace4312884864: 'b'): 3 Taxa
+
+Finally, a list of lists can be constructed and passed using the ``membership_lists`` argument::
+
+    >>> pops = []
+    >>> pops.append(taxon_namespace[0:3])
+    >>> pops.append(taxon_namespace[3:6])
+    >>> pops.append(taxon_namespace[6:11])
+    >>> pops.append(taxon_namespace[11:13])
+    >>> tax_parts = taxon_namespace.partition(membership_lists=pops)
+
+Again, a :class:`~dendropy.datamodel.taxonmodel.TaxonNamespacePartition` object with four |TaxonNamespace| subsets is the result, only this time the subset labels are based on the list indices::
+
+    >>> subsets = tax_parts.subsets()
+    >>> print(subsets)
+    set([<TaxonNamespace object at 0x10069f838>, <TaxonNamespace object at 0x10069fba8>, <TaxonNamespace object at 0x101116520>, <TaxonNamespace object at 0x1011164c8>])
+    >>> for s in subsets:
+    ...     print(s.description())
+    ...
+    TaxonNamespace object at 0x10069f838 (TaxonNamespace4301912120: '0'): 3 Taxa
+    TaxonNamespace object at 0x10069fba8 (TaxonNamespace4301913000: '1'): 3 Taxa
+    TaxonNamespace object at 0x101116520 (TaxonNamespace4312884512: '3'): 2 Taxa
+    TaxonNamespace object at 0x1011164c8 (TaxonNamespace4312884424: '2'): 5 Taxa
+
diff --git a/doc/source/primer/treecollections.rst b/doc/source/primer/treecollections.rst
new file mode 100644
index 0000000..7a6e421
--- /dev/null
+++ b/doc/source/primer/treecollections.rst
@@ -0,0 +1,108 @@
+********************
+Collections of Trees
+********************
+
+Collections of Trees: The |TreeList| Class
+==========================================
+
+|TreeList| objects are collections of |Tree| objects constrained to sharing the same |TaxonNamespace|.
+Any |Tree| object added to a |TreeList| will have its :attr:`~dendropy.datamodel.treemodel.Tree.taxon_namespace` attribute assigned to the |TaxonNamespace| object of the |TreeList|, and all referenced |Taxon| objects will be mapped to the same or corresponding |Taxon| objects of this new |TaxonNamespace|, with new |Taxon| objects created if no suitable match is found.
+Objects of the |TreeList| class have an "``annotations``" attribute, which is a :class:`~dendropy.datamodel.basemodel.AnnotationSet` object, i.e. a collection of :class:`~dendropy.datamodel.basemodel.Annotation` instances tracking metadata.
+More information on working with metadata can be found in the ":doc:`/primer/working_with_metadata_annotations`" section.
+
+Reading and Writing |TreeList| Instances
+----------------------------------------
+
+The |TreeList| class supports the ":meth:`~dendropy.datamodel.treecollectionmodel.TreeList.get`" factory class method for simultaneously instantiating and populating |TreeList| instances, taking a data source as the first argument and a :ref:`schema specification string <Specifying_the_Data_Source_Format>` ("``nexus``", "``newick``", "``nexml``", "``fasta``", or "``phylip``", etc.) as the second::
+
+    import dendropy
+    treelist = dendropy.TreeList.get(path='pythonidae.mcmc.nex', schema='nexus')
+
+The ":meth:`~dendropy.datamodel.treecollectionmodel.TreeList.read`" instance method can be used to add trees from a data source to an existing |TreeList| instance:
+
+.. literalinclude:: /examples/tree_list_add1.py
+
+A |TreeList| object can be written to an external resource using the ":meth:`~dendropy.datamodel.treecollectionmodel.TreeList.write`" method::
+
+    import dendropy
+    treelist = dendropy.TreeList.get(
+        path="trees1.nex",
+        schema="nexus",
+        )
+    treelist.write(
+        path="trees1.newick",
+        schema="newick",
+        )
+
+
+It can also be represented as a string using the ":meth:`~dendropy.datamodel.treecollectionmodel.TreeList.as_string`" method::
+
+    import dendropy
+    treelist = dendropy.TreeList.get(
+        path="trees1.nex",
+        schema="nexus",
+        )
+    print(treelist.as_string(schema="newick",)
+
+More information on reading operations is available in the :doc:`/primer/reading_and_writing` section.
+
+Using and Managing the Collections of Trees
+-------------------------------------------
+
+A |TreeList| behaves very much like a list, supporting iteration, indexing, slices, removal, indexing, sorting, etc.:
+
+.. literalinclude:: /examples/tree_list_ops1.py
+
+
+The |TreeList| class supports the native Python ``list`` interface methods of adding individual |Tree| instances through
+        :meth:`~dendropy.datamodel.treecollectionmodel.TreeList.append`,
+        :meth:`~dendropy.datamodel.treecollectionmodel.TreeList.extend`,
+        :meth:`~dendropy.datamodel.treecollectionmodel.TreeList.insert`,
+        and other methods, but with the added aspect of :doc:`taxon namespace </primer/taxa>` migration:
+
+.. literalinclude:: /examples/tree_list_ops2.py
+
+
+Cloning/Copying a |TreeList|
+----------------------------
+
+You can make a *shallow*-copy of a |TreeList| calling :meth:`dendropy.datamodel.treecollectionmodel.TreeList.clone` with a "``depth``" argument value of 0 or by slicing:
+
+.. literalinclude:: /examples/tree_list_copy1.py
+
+With a shallow-copy, the actual |Tree| instances are shared between lists (as is the |TaxonNamespace|).
+
+For a taxon namespace-scoped *deep*-copy, on the other hand, i.e., where the |Tree| instances are also cloned but the |Taxon| and |TaxonNamespace| references are preserved, you can call :meth:`dendropy.datamodel.treecollectionmodel.TreeList.clone` with a "``depth``" argument value of 1 or by copy construction:
+
+.. literalinclude:: /examples/tree_list_copy2.py
+
+Finally, for a true and complete deep-copy, where even the |Taxon| and |TaxonNamespace| references are copied, call :func:`copy.deepcopy`:
+
+.. literalinclude:: /examples/tree_list_copy3.py
+
+Efficiently Iterating Over Trees in a File
+==========================================
+
+If you need to process a collection of trees defined in a file source, you can, of course, read the trees into a |TreeList| object and iterate over the resulting collection::
+
+    import dendropy
+    trees = dendropy.TreeList.get(path='pythonidae.beast-mcmc.trees', schema='nexus')
+    for tree in trees:
+        print(tree.as_string('newick'))
+
+In the above, the entire data source is parsed and stored in the ``trees`` object before being processed in the subsequent lines.
+In some cases, you might not need to maintain all the trees in memory at the same time.
+For example, you might be interested in calculating the distribution of a statistic over a collection of trees, but have no need to refer to any of the trees after the statistic has been calculated.
+In this case, it will be more efficient to use the :meth:`~dendropy.datamodel.treemodel.Tree.yield_from_files` function.
+This takes a *list* or any other iterable of file-like objects or strings (giving filepaths) as the first argument ("``files``") and a mandatory :ref:`schema specification string <Specifying_the_Data_Source_Format>` as the second argument ("``schema``).
+Additional keyword arguments to customize the parsing are the same as that for the general "|get|" and "|read|" methods.
+For example, the following script reads a model tree from a file, and then iterates over a collection of MCMC trees in a set of files, calculating and storing the symmetric distance between the model tree and each of the MCMC trees one at time:
+
+.. literalinclude:: /examples/tree_iter1.py
+
+Note how a |TaxonNamespace| object is created and passed to both the :meth:`~dendropy.datamodel.treemodel.Tree.get` and the :meth:`~dendropy.datamodel.treemodel.Tree.yield_from_files` functions using the ``taxon_namespace`` keyword argument.
+This is to ensure that the corresponding taxa in both sources get mapped to the same |Taxon| objects in DendroPy object space, so as to enable comparisons of the trees.
+If this was not done, then each tree would have its own distinct |TaxonNamespace| object (and associated |Taxon| objects), making comparisons impossible.
+
+When the number of trees are large or the trees themselves are large or both, iterating over trees in files using :meth:`~dendropy.datamodel.treemodel.Tree.yield_from_files` is almost always going to give the best performance, sometimes orders of magnitude faster.
+This is due to avoiding the Python virtual machine itself from slowing down due to memory usage.
diff --git a/doc/source/primer/treemanips.rst b/doc/source/primer/treemanips.rst
new file mode 100644
index 0000000..5f0c009
--- /dev/null
+++ b/doc/source/primer/treemanips.rst
@@ -0,0 +1,452 @@
+***********************************
+Tree Manipulation and Restructuring
+***********************************
+
+The |Tree| class provides both low-level and high-level methods for manipulating tree structure.
+
+Low-level methods are associated with |Node| objects, and allow to restructure the relationships between nodes at a fine level: :meth:`~dendropy.datamodel.treemodel.Node.add_child`, :meth:`~dendropy.datamodel.treemodel.Node.new_child`, :meth:`~dendropy.datamodel.treemodel.Node.remove_child`, etc.
+
+In most cases, however, you will be using high-level methods to restructure |Tree| objects.
+
+In all cases, if any part of the |Tree| object's structural relations change, *and* you are interested in calculating any metrics or statistics on the tree or comparing the tree to another tree, you need to call :meth:`~dendropy.datamodel.treemodel.Tree.update_bipartitions()` on the object to update the internal splits hash representation.
+This is not done for you automatically because there is a computational cost associated with the operation, and the splits hashes are not always needed. Furthermore, even when needed, if there are a number of structural changes to be made to a |Tree| object before calculations/comparisions, it makes sense to postpone the splits rehashing until there all the tree manipulations are completed.
+Most methods that affect the tree structure that require the splits hashes to updated take a ``update_bipartitions`` argument. By specifying |True| for this, the |Tree| object will recalculate the splits hashes after the changes have been made.
+
+Rooting, Derooting and Rerooting
+================================
+
+The Rooting of Tree(s) Read from External Sources
+-------------------------------------------------
+
+|Newick| and |Nexus| formats have a convention where the rooting of the tree is specified by a special comment token preceding the tree statement: "``[&R]``" to indicate a rooted tree::
+
+    [&R] ((A,B),(C,D));
+
+and : "``[&U]``" to indicate an unrooted tree::
+
+    [&U] ((A,B),(C,D));
+
+These rooting comment tokens are respected when tree data is read. If no such comment token is given, then the tree is assumed to be **unrooted** by default.
+
+You can control the behavior of trees read by using the "``rooting`` keyword argument when using the "|get|" or "|read|" methods of the |Tree| or |TreeList| classes. This takes one of four string values which determines how the rooting states of the tree(s) will be set:
+
+    "default-unrooted" [default]
+        All trees are interpreted as unrooted unless a "[&R]"
+        comment token explicitly specifies them as rooted.
+    "default-rooted"
+        All trees are interpreted as rooted unless a "[&U]"
+        comment token explicitly specifies them as unrooted.
+    "force-unrooted"
+        All trees are unconditionally interpreted as unrooted.
+    "force-rooted"
+        All trees are unconditionally interpreted as rooted.
+
+The behavior of this can be be summarized by the following:
+
++-----------------------------------+----------------------------+----------------------------+------------------------------------+
+| Keyword Argument                  | ``[&U]`` in Tree Statement | ``[&R]`` in Tree Statement | No Rooting Given in Tree Statement |
++-----------------------------------+----------------------------+----------------------------+------------------------------------+
+| ``rooting=None`` (or unspecified) | unrooted                   | rooted                     | None                               |
++-----------------------------------+----------------------------+----------------------------+------------------------------------+
+| ``rooting="default-unrooted"``    | unrooted                   | rooted                     | unrooted                           |
++-----------------------------------+----------------------------+----------------------------+------------------------------------+
+| ``rooting="default-rooted"``      | unrooted                   | rooted                     | rooted                             |
++-----------------------------------+----------------------------+----------------------------+------------------------------------+
+| ``rooting="force-unrooted"``      | unrooted                   | unrooted                   | unrooted                           |
++-----------------------------------+----------------------------+----------------------------+------------------------------------+
+| ``rooting="force-rooted"``        | rooted                     | rooted                     | rooted                             |
++-----------------------------------+----------------------------+----------------------------+------------------------------------+
+
+As an example:
+
+.. literalinclude:: /examples/readroot2.py
+
+Setting the Rooting State
+-------------------------
+
+All |Tree| objects have a boolean property, :attr:`~dendropy.datamodel.treemodel.Tree.is_rooted` that DendroPy uses to track whether or not the tree should be treated as rooted. The property :attr:`~dendropy.datamodel.treemodel.Tree.is_unrooted` is also defined, and these two properties are synchronized. Thus setting :attr:`~dendropy.datamodel.treemodel.Tree.is_rooted` to |True| will result in :attr:`~dendropy.datamodel.treemodel.Tree.is_rooted` being set to |False| and vice versa.
+
+The state of a |Tree| object's rootedness flag does not modify any internal structural relationship between nodes. It simply determines how its splits hashes are calculated, which in turn affects a broad range of comparison and metric operations. Thus you need to update the splits hashes after modifying the :attr:`~dendropy.datamodel.treemodel.Tree.is_rooted` property by calling the :meth:`~dendropy.datamodel.treemodel.Tree.update_bipartitions()` before carrying out any calculations on o [...]
+
+For example, the following:
+
+.. literalinclude:: /examples/setroot1.py
+
+
+will result in::
+
+
+    Original:
+    /---------------------------------------------------- A
+    +
+    |            /--------------------------------------- B
+    \------------+
+                 |            /-------------------------- C
+                 \------------+
+                              |            /------------- D
+                              \------------+
+                                           \------------- E
+
+
+    After `is_rooted=False`:
+    /---------------------------------------------------- A
+    +
+    |            /--------------------------------------- B
+    \------------+
+                 |            /-------------------------- C
+                 \------------+
+                              |            /------------- D
+                              \------------+
+                                           \------------- E
+
+
+    After `update_bipartitions()`:
+    /---------------------------------------------------- A
+    |
+    +---------------------------------------------------- B
+    |
+    |                /----------------------------------- C
+    \----------------+
+                     |                 /----------------- D
+                     \-----------------+
+                                       \----------------- E
+
+
+    After `update_bipartitions(suppress_unifurcations=False)`:
+    /---------------------------------------------------- A
+    +
+    |            /--------------------------------------- B
+    \------------+
+                 |            /-------------------------- C
+                 \------------+
+                              |            /------------- D
+                              \------------+
+                                           \------------- E
+
+Derooting
+---------
+
+To deroot a rooted |Tree|, you can also call the :meth:`~dendropy.datamodel.treemodel.Tree.deroot()` method, which collapses the root to a trifurcation if it is bifurcation *and* sets the :attr:`~dendropy.datamodel.treemodel.Tree.is_rooted` to |False|. The :meth:`~dendropy.datamodel.treemodel.Tree.deroot()` method has the same structural and semantic affect of :attr:`~dendropy.datamodel.treemodel.Tree.is_rooted` to |False| and then calling :meth:`~dendropy.datamodel.treemodel.Tree.update [...]
+
+Rerooting
+---------
+
+To reroot a |Tree| along an existing edge, you can use the :meth:`~dendropy.datamodel.treemodel.Tree.reroot_at_edge()` method. This method takes an |Edge| object as as its first argument. This rerooting is a structural change that will require the splits hashes to be updated before performing any tree comparisons or calculating tree metrics. If needed, you can do this yourself by calling :meth:`~dendropy.datamodel.treemodel.Tree.update_bipartitions()` later, or you can pass in |True| as  [...]
+
+As an example, the following reroots the tree along an internal edge (note that we do not recalculate the splits hashes, as we are not carrying out any calculations or comparisons with the |Tree|):
+
+.. literalinclude:: /examples/reroot_at_internal_edge.py
+
+and results in::
+
+    Before:
+    [&R] (A,(B,(C,(D,E))));
+
+    /---------------------------------------------------- A
+    +
+    |            /--------------------------------------- B
+    \------------+
+                 |            /-------------------------- C
+                 \------------+
+                              |            /------------- D
+                              \------------+
+                                           \------------- E
+
+
+    After:
+    [&R] ((D,E),(C,(B,A)));
+
+                                       /----------------- D
+    /----------------------------------+
+    |                                  \----------------- E
+    +
+    |                /----------------------------------- C
+    \----------------+
+                     |                 /----------------- B
+                     \-----------------+
+                                       \----------------- A
+
+Another example, this time rerooting along an edge subtending a tip instead of an internal edge:
+
+.. literalinclude:: /examples/reroot_at_external_edge.py
+
+which results in::
+
+    Before:
+    [&R] (A,(B,(C,(D,E))));
+
+    /---------------------------------------------------- A
+    +
+    |            /--------------------------------------- B
+    \------------+
+                |             /-------------------------- C
+                 \------------+
+                              |            /------------- D
+                              \------------+
+                                           \------------- E
+
+
+    After:
+    [&R] (D,(E,(C,(B,A))));
+
+    /---------------------------------------------------- D
+    +
+    |            /--------------------------------------- E
+    \------------+
+                 |            /-------------------------- C
+                 \------------+
+                              |            /------------- B
+                              \------------+
+                                           \------------- A
+
+To reroot a |Tree| at a node instead, you can use the :meth:`~dendropy.datamodel.treemodel.Tree.reroot_at_node()` method:
+
+.. literalinclude:: /examples/reroot_at_node.py
+
+which results in::
+
+    Before:
+    [&R] (A,(B,(C,(D,E))));
+
+    /---------------------------------------------------- A
+    +
+    |            /--------------------------------------- B
+    \------------+
+                 |            /-------------------------- C
+                 \------------+
+                              |            /------------- D
+                              \------------+
+                                           \------------- E
+
+
+    After:
+    [&R] (D,E,(C,(B,A)));
+
+    /---------------------------------------------------- D
+    |
+    +---------------------------------------------------- E
+    |
+    |                /----------------------------------- C
+    \----------------+
+                     |                 /----------------- B
+                     \-----------------+
+                                       \----------------- A
+
+
+You can also reroot the tree such that a particular node is moved to the outgroup position using the :meth:`~dendropy.datamodel.treemodel.Tree.to_outgroup_position()`, which takes a |Node| as the first argument. Again, you can update the splits hashes *in situ* by passing |True| to the second argument, and again, here we do not because we are not carrying out any calculations. For example:
+
+.. literalinclude:: /examples/to_outgroup_position.py
+
+which will result in::
+
+    Before:
+    [&R] (A,(B,(C,(D,E))));
+
+    /---------------------------------------------------- A
+    +
+    |            /--------------------------------------- B
+    \------------+
+                 |            /-------------------------- C
+                 \------------+
+                              |            /------------- D
+                              \------------+
+                                           \------------- E
+
+
+    After:
+    [&R] (C,(D,E),(B,A));
+
+    /---------------------------------------------------- C
+    |
+    |                         /-------------------------- D
+    +-------------------------+
+    |                         \-------------------------- E
+    |
+    |                         /-------------------------- B
+    \-------------------------+
+                              \-------------------------- A
+
+If you have a tree with edge lengths specified, you can reroot it at the midpoint, using the :meth:`~dendropy.datamodel.treemodel.Tree.reroot_at_midpoint()` method:
+
+.. literalinclude:: /examples/reroot_at_midpoint.py
+
+which results in::
+
+    Before:
+    [&R] (A:0.55,(B:0.82,(C:0.74,(D:0.42,E:0.64):0.24):0.15):0.2):0.3;
+
+              /------------------- A
+              +
+              |      /---------------------------- B
+              \------+
+                     |    /-------------------------- C
+                     \----+
+                          |        /-------------- D
+                          \--------+
+                                   \---------------------- E
+
+
+    After:
+    [&R] ((C:0.74,(D:0.42,E:0.64):0.24):0.045,(B:0.82,A:0.75):0.105):0.3;
+
+                   /------------------------------- C
+                 /-+
+                 | |         /------------------ D
+                 | \---------+
+                 +           \---------------------------- E
+                 |
+                 |   /------------------------------------ B
+                 \---+
+                     \-------------------------------- A
+
+
+
+Pruning Subtrees and Tips
+=========================
+
+To remove a set of tips from a |Tree|, you cna use either the :meth:`~dendropy.datamodel.treemodel.Tree.prune_taxa()` or the :meth:`~dendropy.datamodel.treemodel.Tree.prune_taxa_with_labels()` methods. The first takes a container of |TaxonNamespace| objects as an argument, while the second takes container of strings. In both cases, nodes associated with the specified taxa (as given by the |TaxonNamespace| objects directly in the first case, or |TaxonNamespace| objects with labels given i [...]
+
+.. literalinclude:: /examples/prune_taxa_with_labels.py
+
+which results in::
+
+    Before:
+    [&R] ((A,(B,(C,(D,E)))),(F,(G,H)));
+
+              /------------------------------------------- A
+    /---------+
+    |         |          /-------------------------------- B
+    |         \----------+
+    |                    |          /--------------------- C
+    |                    \----------+
+    +                               |          /---------- D
+    |                               \----------+
+    |                                          \---------- E
+    |
+    |                               /--------------------- F
+    \-------------------------------+
+                                    |          /---------- G
+                                    \----------+
+                                               \---------- H
+
+
+    After:
+    [&R] ((B,(D,E)),(F,H));
+
+                      /----------------------------------- B
+    /-----------------+
+    |                 |                 /----------------- D
+    |                 \-----------------+
+    +                                   \----------------- E
+    |
+    |                                   /----------------- F
+    \-----------------------------------+
+                                        \----------------- H
+
+Alternatively, the tree can be pruned based on a set of taxa that you want to *keep*. This can be affected through the use of the counterpart "retain" methods, :meth:`~dendropy.datamodel.treemodel.Tree.retain_taxa()` and :meth:`~dendropy.datamodel.treemodel.Tree.retain_taxa_with_labels()`. For example:
+
+.. literalinclude:: /examples/retain_taxa_with_labels.py
+
+which results in::
+
+    Before:
+    [&R] ((A,(B,(C,(D,E)))),(F,(G,H)));
+
+              /------------------------------------------- A
+    /---------+
+    |         |          /-------------------------------- B
+    |         \----------+
+    |                    |          /--------------------- C
+    |                    \----------+
+    +                               |          /---------- D
+    |                               \----------+
+    |                                          \---------- E
+    |
+    |                               /--------------------- F
+    \-------------------------------+
+                                    |          /---------- G
+                                    \----------+
+                                               \---------- H
+
+
+    After:
+    [&R] ((A,C),G);
+
+                               /-------------------------- A
+    /--------------------------+
+    +                          \-------------------------- C
+    |
+    \----------------------------------------------------- G
+
+Again, it should be noted that, as these operations modify the structure of the tree, you need to call :meth:`~dendropy.datamodel.treemodel.Tree.update_bipartitions()` to update the internal splits hashes, before carrying out any calculations, comparisons, or metrics.
+
+Rotating
+========
+
+You can ladderize trees (sort the child nodes in order of the number of their children) by calling the :meth:`~dendropy.datamodel.treemodel.Tree.ladderize()` method. This method takes one argument, ``ascending``. If ``ascending=True``, which is the default, then the nodes are sorted in ascending order (i.e., nodes with fewer children sort before nodes with more children). If ``ascending=False``, then the nodes are sorted in descending order (i.e., nodes with more children sorting before  [...]
+
+.. literalinclude:: /examples/ladderize.py
+
+
+results in::
+
+    Before:
+    [&R] ((A,(B,(C,(D,E)))),(F,(G,H)));
+
+              /------------------------------------------- A
+    /---------+
+    |         |          /-------------------------------- B
+    |         \----------+
+    |                    |          /--------------------- C
+    |                    \----------+
+    +                               |          /---------- D
+    |                               \----------+
+    |                                          \---------- E
+    |
+    |                               /--------------------- F
+    \-------------------------------+
+                                    |          /---------- G
+                                    \----------+
+                                               \---------- H
+
+
+    Ladderize, ascending=True:
+    [&R] ((F,(G,H)),(A,(B,(C,(D,E)))));
+
+                                    /--------------------- F
+    /-------------------------------+
+    |                               |          /---------- G
+    |                               \----------+
+    +                                          \---------- H
+    |
+    |         /------------------------------------------- A
+    \---------+
+              |          /-------------------------------- B
+              \----------+
+                         |          /--------------------- C
+                         \----------+
+                                    |          /---------- D
+                                    \----------+
+                                               \---------- E
+
+
+    Ladderize, ascending=False:
+    [&R] (((((D,E),C),B),A),((G,H),F));
+
+                                               /---------- D
+                                    /----------+
+                         /----------+          \---------- E
+                         |          |
+              /----------+          \--------------------- C
+              |          |
+    /---------+          \-------------------------------- B
+    |         |
+    |         \------------------------------------------- A
+    +
+    |                                          /---------- G
+    |                               /----------+
+    \-------------------------------+          \---------- H
+                                    |
+                                    \--------------------- F
+
+Tree rotation operations do not actually change the tree structure, at least in so far as splits are concerned, so it is not neccessary to update the splits hashes.
diff --git a/doc/source/primer/trees.rst b/doc/source/primer/trees.rst
new file mode 100644
index 0000000..5a038ba
--- /dev/null
+++ b/doc/source/primer/trees.rst
@@ -0,0 +1,220 @@
+*****
+Trees
+*****
+
+The |Tree| Class
+================
+
+:term:`Trees <tree>` in DendroPy are represented by objects of the class |Tree|.
+:term:`Trees <tree>` consist of a collection of :term:`nodes <node>`, represented by objects of the class |Node|, connected to each other in parent-child or ancestor-descendent relationships by objects of the class |Edge|.
+The first or initial :term:`node` of a |Tree| is the head of the data structure, and is represented by the :attr:`seed_node` attribute of a |Tree| object.
+If the tree is :term:`rooted <rooted tree>`, then this is the :term:`root node`.
+If the tree is :term:`unrooted <unrooted tree>`, however, then this is an artificial node that *only* serves as the initial node when iterating over a tree in :term:`preorder <preorder traversal>` or the final node when iterating over the tree in :term:`postorder <postorder traversal>`, but does not have any informational significance by itself: it is an algorithmic artifact.
+
+The :attr:`~dendropy.datamodel.treemodel.Tree.seed_node` attribute of a |Tree| object, like every other node on the tree, is an object of the |Node| class.
+Every |Node| object maintains a list of its immediate child |Node| objects as well as a reference to its parent |Node| object.
+You can iterate over the :term:`child <child node>` of a particular |Node| object using the :meth:`~dendropy.datamodel.treemodel.Node.child_node_iter()` method, get a shallow-copy list of child |Node| objects using the :meth:`~dendropy.datamodel.treemodel.Node.child_nodes()` method, or access the :term:`parent <parent node>` |Node| object directly through the :attr:`~dendropy.datamodel.treemodel.Node.parent_node` attribute of the |Node|.
+By definition, the :attr:`~dendropy.datamodel.treemodel.Tree.seed_node` has no :term:`parent node`, :term:`leaf nodes <leaf node>` have no :term:`child nodes <child node>`, and term:`internal nodes <internal node>` have both :term:`parent nodes <parent node>` and :term:`child nodes <child node>`.
+
+Every |Node| object has an attribute, :attr:`~dendropy.datamodel.treemodel.Node.edge`, which is an |Edge| object representing the :term:`edge` that is :term:`incident to or subtends <incident edge>` the :term:`node` represented by that |Node| object.
+Each |Edge|, in turn, has an attribute, :attr:`~dendropy.datamodel.treemodel.Edge.head_node`, which is the |Node| object representing the :term:`node` that the edge subtends.
+
+The |Tree|, |Node|, and |Edge| classes all have "``annotations``" as an attribute, which is a :class:`~dendropy.datamodel.basemodel.AnnotationSet` object, i.e. a collection of :class:`~dendropy.datamodel.basemodel.Annotation` instances tracking metadata.
+More information on working with metadata can be found in the ":doc:`/primer/working_with_metadata_annotations`" section.
+
+Reading and Writing |Tree| Instances
+====================================
+
+The |Tree| class supports the ":meth:`~dendropy.datamodel.treemodel.Tree.get`" factory class method for simultaneously instantiating and populating a |Tree| instance, taking a data source as the first argument and a :ref:`schema specification string <Specifying_the_Data_Source_Format>` ("``nexus``", "``newick``", "``nexml``", "``fasta``", or "``phylip``", etc.) as the second::
+
+    import dendropy
+    tree = dendropy.Tree.get(
+        path='pythonidae.mcmc.nex',
+        schema='nexus')
+
+A |Tree| object can be written to an external resource using the ":meth:`~dendropy.datamodel.treemodel.Tree.write`" method::
+
+    import dendropy
+    tree = dendropy.Tree.get(
+        path="trees1.nex",
+        schema="nexus",
+        tree_offset=2,
+        )
+    tree.write(
+        path="trees1.newick",
+        schema="newick",
+        )
+
+It can also be represented as a string using the ":meth:`~dendropy.datamodel.treemodel.Tree.as_string`" method::
+
+    import dendropy
+    tree = dendropy.Tree.get(
+        path="trees1.nex",
+        schema="nexus",
+        )
+    print(tree.as_string(schema="newick",)
+
+More information on reading operations is available in the :doc:`/primer/reading_and_writing` section.
+
+Cloning/Copying a |Tree|
+========================
+
+You can make a "taxon namespace-scoped" copy of a |Tree| instance, i.e., where all |Node| and associated |Edge| instances of a |Tree| are cloned, but references to |Taxon| objects are preserved, you can call :meth:`dendropy.datamodel.treemodel.Tree.clone` with a "``depth``" argument value of 1 or by copy construction:
+
+.. literalinclude:: /examples/tree_copy1.py
+
+Finally, for a true and complete deep-copy, where even the |Taxon| and |TaxonNamespace| references are copied, call :func:`copy.deepcopy`:
+
+.. literalinclude:: /examples/tree_copy2.py
+
+Tree Traversal
+==============
+
+Iterating Over Nodes
+--------------------
+
+The following example shows how you might evolve a continuous character on a tree by recursively visting each node, and setting the value of the character to one drawn from a normal distribution centered on the value of the character of the node's ancestor and standard deviation given by the length of the edge subtending the node:
+
+.. literalinclude:: /examples/tree_evolve_char1.py
+    :linenos:
+
+While the previous example works, it is probably clearer and more efficient to use one of the pre-defined node iterator methods:
+
+    :meth:`~dendropy.datamodel.treemodel.Tree.preorder_node_iter()`
+        Iterates over nodes in a |Tree| object in a `depth-first <http://en.wikipedia.org/wiki/Depth-first_traversal>`_ search pattern, i.e., "visiting" a node before visiting the children of the node. This is the same traversal order as the previous example. This traversal order is useful if you require ancestral nodes to be processed before descendent nodes, as, for example, when evolving sequences over a tree.
+
+    :meth:`~dendropy.datamodel.treemodel.Tree.postorder_node_iter()`
+        Iterates over nodes in a |Tree| object in a `postorder <http://en.wikipedia.org/wiki/Tree_traversal>`_ search pattern, i.e., visiting the children of the node before visiting the node itself. This traversal order is useful if you require descendent nodes to be processed before ancestor nodes, as, for example, when calculating ages of nodes.
+
+    :meth:`~dendropy.datamodel.treemodel.Tree.level_order_node_iter()`
+        Iterates over nodes in a |Tree| object in a  `breadth-first <http://en.wikipedia.org/wiki/Breadth-first_traversal>`_  search pattern, i.e., every node at a particular level is visited before proceeding to the next level.
+
+    :meth:`~dendropy.datamodel.treemodel.Tree.leaf_node_iter()`
+        Iterates over the leaf or tip nodes of a |Tree| object.
+
+The previous example would thus be better implemented as follows:
+
+.. literalinclude:: /examples/tree_evolve_char2.py
+
+The nodes returned by each of these iterators can be filtered if a filter function is passed as a second argument to the iterator.
+This filter function should take a |Node| object as an argument, and return |True| if the node is to be returned or |False| if it is not. For example, the following iterates over all nodes that have more than two children:
+
+.. literalinclude:: /examples/preorder_filtered_node_iteration.py
+    :linenos:
+
+Iterating Over Edges
+--------------------
+
+The |Edge| objects associated with each |Node| can be accessed through the :attr:`~dendropy.datamodel.treemodel.Node.edge` attribute of the |Node| object.
+So it is possible to iterate over every edge on a tree by iterating over the nodes and referencing the :attr:`~dendropy.datamodel.treemodel.Node.edge` attribute of the node when processing the node.
+But it is clearer and probably more convenient to use one of the |Edge| iterators:
+
+    :meth:`~dendropy.datamodel.treemodel.Tree.preorder_edge_iter()`
+        Iterates over edges in a |Tree| object in a `depth-first <http://en.wikipedia.org/wiki/Depth-first_traversal>`_ search pattern, i.e., "visiting" an edge before visiting the edges descending from that edge. This is the same traversal order as the previous example. This traversal order is useful if you require ancestral edges to be processed before descendent edges, as, for example, when calculating the sum of edge lengths from the root.
+
+    :meth:`~dendropy.datamodel.treemodel.Tree.postorder_edge_iter()`
+        Iterates over edges in a |Tree| object in a `postorder <http://en.wikipedia.org/wiki/Tree_traversal>`_ search pattern, i.e., visiting the descendents of the edge before visiting the edge itself. This traversal order is useful if you require descendent edges to be processed before ancestral edges, as, for example, when calculating the sum of edge lengths from the tip
+
+    :meth:`~dendropy.datamodel.treemodel.Tree.level_order_edge_iter()`
+        Iterates over edges in a |Tree| object in a  `breadth-first <http://en.wikipedia.org/wiki/Breadth-first_traversal>`_  search pattern, i.e., every edge at a particular level is visited before proceeding to the next level.
+
+The following example sets the edge lengths of a tree to the proportions of the total tree length that they represent:
+
+.. literalinclude:: /examples/rescale_tree_length.py
+    :linenos:
+
+While this one removes the edge lengths entirely:
+
+.. literalinclude:: /examples/remove_branch_lengths.py
+    :linenos:
+
+Like the node iterators, the edge iterators also optionally take a filter function as a second argument, except here the filter function should take an |Edge| object as an argument.
+The following example shows how you might iterate over all edges with lengths less than some value:
+
+.. literalinclude:: /examples/preorder_filtered_edge_iteration.py
+    :linenos:
+
+Finding Nodes on Trees
+======================
+
+Nodes with Particular Taxa
+--------------------------
+
+To retrieve a node associated with a particular taxon, we can use the :meth:`~dendropy.datamodel.treemodel.Tree.find_taxon_node()` method, which takes a filter function as an argument.
+The filter function should take a |Taxon| object as an argument and return |True| if the taxon is to be returned.
+For example:
+
+.. literalinclude:: /examples/find_taxon_node1.py
+
+Because we might find it easier to refer to |Taxon| objects by their labels, a convenience method that wraps the retrieval of nodes associated with |Taxon| objects of particular label is provided:
+
+.. literalinclude:: /examples/find_taxon_node2.py
+
+Most Recent Common Ancestors
+----------------------------
+
+The MRCA (most recent common ancestor) of taxa or nodes can be retrieved by the instance method :meth:`~dendropy.datamodel.treemodel.Tree.mrca()`.
+This method takes a list of |Taxon| objects given by the ``taxa`` keyword argument, or a list of taxon labels given by the ``taxon_labels`` keyword argument, and returns a |Node| object that corresponds to the MRCA of the specified taxa.
+For example:
+
+.. literalinclude:: /examples/mrca.py
+
+Note that this method is inefficient when you need to resolve MRCA's for multiple sets or pairs of taxa.
+In this context, the :class:`~dendropy.calculate.treemeasure.PatristicDistanceMatrix` offers a more efficient approach, and should be preferred for applications such as calculating the patristic distances between all pairs of taxa:
+
+.. literalinclude:: /examples/mrca2.py
+
+Viewing and Displaying Trees
+============================
+
+Sometimes it is useful to get a visual representation of a |Tree|.
+
+For quick inspection, the :meth:`~dendropy.datamodel.treemodel.Tree.print_plot()` will write an ASCII text plot to the standard output stream::
+
+    >>> t = dendropy.Tree.get_from_string("(A,(B,(C,D)))", "newick")
+    >>> t.print_plot()
+    /----------------------------------------------- A
+    +
+    |                /------------------------------ B
+    \----------------+
+                     |          /------------------- C
+                     \----------+
+                                \------------------- D
+
+If you need to store this representation as a string instead, you can use :meth:`~dendropy.datamodel.treemodel.Tree.as_ascii_plot()`::
+
+    >>> s = t.as_ascii_plot()
+    >>> print(s)
+    /----------------------------------------------- A
+    +
+    |                /------------------------------ B
+    \----------------+
+                     |          /------------------- C
+                     \----------+
+                                \------------------- D
+
+You can also, as mentioned above, using the :meth:`~dendropy.datamodel.treemodel.Tree.as_string` method to represent a |Tree| as string in any format::
+
+    t = dendropy.Tree.get_from_string("(A,(B,(C,D)))", "newick")
+    print(t.as_string(schema="nexus"))
+    print(t.as_string(schema="newick"))
+
+
+Building a Tree Programmatically
+================================
+
+For example:
+
+.. literalinclude:: /examples/build_tree_programmatically.py
+
+produces the following::
+
+    ((A:1,B:2):1,(C:1,D:2):1);
+
+                                       /---------------------------------- A
+    /----------------------------------+
+    |                                  \---------------------------------- B
+    +
+    |                                  /---------------------------------- C
+    \----------------------------------+
+                                       \---------------------------------- D
diff --git a/doc/source/primer/treesims.rst b/doc/source/primer/treesims.rst
new file mode 100644
index 0000000..f23387c
--- /dev/null
+++ b/doc/source/primer/treesims.rst
@@ -0,0 +1,285 @@
+******************************
+Tree Simulation and Generation
+******************************
+
+The :mod:`~dendropy.simulate.treesim` module provides functions for the simulation of trees under a variety of theoretical models.
+This module is actually just a namespace that aggregates functions and classes for tree simulation routines in one convenient place.
+For example the :func:`~dendropy.model.birthdeath.birth_death_tree()` function
+is actually defined in the :mod:`~dendropy.model.birthdeath` module, but is
+exposed in the :mod:`~dendropy.simulate.treesim` for ease of access.
+
+Birth-Death Process Trees
+=========================
+
+There are two different birth-death process tree simulation routines in DendroPy:
+
+    :func:`~dendropy.simulate.treesim.birth_death_tree()`
+        Returns a tree generated under a continuous-time birth-death process, with branch lengths in arbitrary time units.
+
+    :func:`~dendropy.simulate.treesim.discrete_birth_death_tree()`
+        Returns a tree generated under discrete-time birth-death process, with branch length in generation units.
+
+Both of these functions have identical interfaces, and will grow a tree under a branching process with the specified birth-date and death-rate until the termination condition (pre-specified number of leaves or maximum amount of time) is met.
+
+For example, to get a continuous-time tree with 10 leaves, generated under a birth rate of 1.0 and death rate of 0.5::
+
+    >>> from dendropy.simulate import treesim
+    >>> t = treesim.birth_death_tree(birth_rate=1.0, death_rate=0.5, ntax=10)
+    >>> t.print_plot()
+                  /-------------------------------------------- T1
+                  |
+    /-------------+                             /-------------- T2
+    |             |              /--------------+
+    |             \--------------+              \-------------- T3
+    |                            |
+    |                            \----------------------------- T4
+    +
+    |                            /----------------------------- T5
+    |             /--------------+
+    |             |              |              /-------------- T6
+    |             |              \--------------+
+    \-------------+                             \-------------- T7
+                  |
+                  |                             /-------------- T8
+                  |              /--------------+
+                  \--------------+              \-------------- T9
+                                 |
+                                 \----------------------------- T10
+
+While to get a continuous time tree generated under the same rates after 6 time units::
+
+    >>> t = treesim.birth_death_tree(birth_rate=1.0, death_rate=0.5, max_time=6.0)
+
+If both conditions are given simultaneously, then tree growth will terminate when
+*any* of the termination conditions (i.e., number of tips == ``ntax``, or number
+of tips == len(taxon_namespace) or maximum time == ``max_time``) are met.
+
+Specifying a |TaxonNamespace|
+-----------------------------
+
+By default, a new |Taxon| object will be created and associated with each leaf (labeled "T1", "T2", etc.),  all belonging to a new |TaxonNamespace| object associated with the resulting tree.
+
+You can pass in an explicit |TaxonNamespace| object using the "``taxon_namespace``" keyword::
+
+    >>> import dendropy
+    >>> from dendropy.simulate import treesim
+    >>> taxa = dendropy.TaxonNamespace(['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h'])
+    >>> t = treesim.birth_death_tree(0.4, 0.1, taxon_namespace=taxa)
+    >>> t.print_plot()
+                /-------------------------------------- h
+                |
+    /-----------+                         /------------ c
+    |           |            /------------+
+    |           \------------+            \------------ a
+    |                        |
+    +                        \------------------------- g
+    |
+    |                                     /------------ e
+    |                        /------------+
+    |                        |            \------------ f
+    \------------------------+
+                             |            /------------ d
+                             \------------+
+                                          \------------ b
+
+
+In this case, the branching process underlying the tree generation will terminate when the number of leaves in the tree equals the number of taxa in the |TaxonNamespace| "``taxa``", and the |Taxon| objects in "``taxa``" will be randomly assigned to the leaves.
+
+The "``taxon_namespace``" keyword can be combined with the "``ntax``" keyword.
+If the size of the |TaxonNamespace| object given by the ``taxon_namespace`` argument is greater than the specified target tree taxon number, then a random subset of |Taxon| object in the |TaxonNamespace| will be assigned to the leaves::
+
+    >>> import dendropy
+    >>> from dendropy.simulate import treesim
+    >>> taxa = dendropy.TaxonNamespace(['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h'])
+    >>> t = treesim.birth_death_tree(birth_rate=1.0, death_rate=0.5, ntax=5, taxon_namespace=taxa)
+    >>> t.print_plot()
+    /-------------------------------------------------- g
+    |
+    +                        /------------------------- a
+    |           /------------+
+    |           |            |            /------------ d
+    \-----------+            \------------+
+                |                         \------------ c
+                |
+                \-------------------------------------- f
+
+If the size of the |TaxonNamespace| object is less than the target taxon number, then new |Taxon| objects will be created as needed and added to the |TaxonNamespace| object as well as associated with the leaves::
+
+    >>> import dendropy
+    >>> from dendropy.simulate import treesim
+    >>> taxa = dendropy.TaxonNamespace(['a', 'b'])
+    >>> t = treesim.birth_death_tree(birth_rate=1.0, death_rate=0.5, ntax=5, taxon_namespace=taxa)
+    >>> t.print_plot()
+                                     /---------------- a
+    /--------------------------------+
+    |                                \---------------- b
+    +
+    |               /--------------------------------- T3
+    \---------------+
+                    |                /---------------- T4
+                    \----------------+
+                                     \---------------- T5
+
+
+Repeating Failed Branching Processes
+------------------------------------
+
+With a non-zero death rate, it is possible for all lineages of a tree to go extinct before the termination conditions are reached.
+In this case, by default a :class:`~dendropy.simulate.treesim.TreeSimTotalExtinctionException` will be raised::
+
+    >>> t = treesim.birth_death_tree(birth_rate=1.0, death_rate=0.9, ntax=10)
+    Traceback (most recent call last):
+      File "<stdin>", line 1, in <module>
+      File "/Users/jeet/Projects/DendroPy/dendropy/treesim.py", line 188, in birth_death
+        raise TreeSimTotalExtinctionException()
+    dendropy.simulate.treesim.TreeSimTotalExtinctionException
+
+If the keyword argument "``repeat_until_success``" is given, then instead of raising an exception the process starts again and repeats until the termination condition is met::
+
+    >>> t = treesim.birth_death_tree(birth_rate=1.0,
+    ...                         death_rate=0.9,
+    ...                         ntax=10,
+    ...                         repeat_until_success=True)
+    >>> t.print_plot()
+                                           /------------------- T1
+    /--------------------------------------+
+    |                                      |         /--------- T2
+    |                                      \---------+
+    |                                                \--------- T3
+    +
+    |                                                /--------- T4
+    |        /---------------------------------------+
+    |        |                                       \--------- T5
+    |        |
+    \--------+                   /----------------------------- T6
+             |         /---------+
+             |         |         |         /------------------- T7
+             |         |         \---------+
+             \---------+                   |         /--------- T8
+                       |                   \---------+
+                       |                             \--------- T9
+                       |
+                       \--------------------------------------- T10
+
+Suppressing Taxon Assignment
+----------------------------
+You can specify "``assign_taxa``" to be ``False``  to avoid taxa from being automatically assigned to a tree (for example, when you want to build a tree in stages -- see below).
+
+Extending an Existing Tree
+--------------------------
+
+Both these functions also accept a Tree object (with valid branch lengths) as an argument passed using the keyword ``tree``.
+If given, then this tree will be used as the starting point; otherwise a new one will be created.
+
+Evolving Birth and Death Rates
+------------------------------
+
+The same functions can also produce trees generated under variable birth and death rates.
+The "``birth_rate_sd``" keyword argument specifies the standard deviation of the normally-distributed error of birth rates as they evolve from parent to child node, while the "``death_rate_sd``" keyword argument specifies the same of the the death rates.
+For example, to get a 10-taxon tree generated under a birth- and death-rate that evolves with a standard deviation of 0.1::
+
+    >>> t = treesim.birth_death_tree(birth_rate=1.0,
+                death_rate=0.5,
+                birth_rate_sd=0.1,
+                death_rate_sd=0.1,
+                ntax=10)
+
+Building a Tree in Multiple Stages under Different Conditions
+-------------------------------------------------------------
+
+You might want to generate a tree under different condition in different stages.
+To do this, you would start with an empty tree and passing it to the birth-death function as an argument using the "``tree``" keyword argument, and at the same time suppress the automatic taxon assignment using the "``assign_taxa=False``" keyword argument to avoid taxa being assigned to what will eventually become internal nodes.
+When the tree is ready, you will call the :meth:`~dendropy.datamodel.treemodel.Tree.randomly_assign_taxa()` function to assign taxa at random to the leaves.
+
+For example, the following generates a birth-death tree with equal birth and death rates, but both rates shifting for a short while to a temporarily higher (though equal) rates:
+
+.. literalinclude:: /examples/bdtree_multi1.py
+    :linenos:
+
+Another example draws birth and death rates from a normal distribution with the same mean and standard deviation in multiple stages:
+
+.. literalinclude:: /examples/bdtree_multi2.py
+    :linenos:
+
+Star Trees
+==========
+
+The :func:`~dendropy.simulate.treesim.star_tree()` generates a simple polytomy tree, with a single node as the immediate ancestor to a set of leaves, with one leaf per |Taxon| in the |TaxonNamespace| object given by the ``taxon_namespace`` argument.
+For example::
+
+    >>> from dendropy.simulate import treesim
+    >>> taxa = dendropy.TaxonNamespace(['a', 'b', 'c', 'd', 'e'])
+    >>> tree = treesim.star_tree(taxa)
+    >>> print(tree.as_ascii_plot())
+    /-------------------------------------- a
+    |
+    |-------------------------------------- b
+    |
+    +-------------------------------------- c
+    |
+    |-------------------------------------- d
+    |
+    \-------------------------------------- e
+
+
+(Pure Neutral) Coalescent Trees
+===============================
+
+The :func:`~dendropy.simulate.treesim.pure_kingman()` function returns a tree generated under an unconstrained neutral coalescent model. The first argument to this function, ``taxon_namespace``, is a |TaxonNamespace| object, where each member |Taxon| object represents a gene to be coalesced. The second argument, ``pop_size``, specifies the population size in terms of the number of gene copies in the population. This means that for a diploid population of size ``N``, ``pop_size`` should b [...]
+
+.. literalinclude:: /examples/pure_kingman1.py
+
+.. _Simulating_Contained_Coalescent_Trees:
+
+Contained Coalescent Trees
+==========================
+
+The :func:`~dendropy.simulate.treesim.contained_coalescent()` function returns a tree generated under a neutral coalescent model conditioned on population splitting times or events given by a containing species or population tree.
+Such a tree is often referred to as a contained, embedded, censored, truncated, or constrained genealogy/tree.
+At a minimum, this function takes two arguments: a |Tree| object representing the containing (species or population) tree, and a |TaxonNamespaceMapping| object describing how the sampled gene taxa map or are associated with the species/population |Tree| taxa.
+
+The |Tree| object representing the containing species or population tree should be rooted and ultrametric.
+If edge lengths are given in generations, then a meaningful population size needs to be communicated to the :func:`~dendropy.simulate.treesim.contained_coalescent()` function.
+In general, for coalescent operations in DendroPy, unless otherwise specified, population sizes are the *haploid* population size, i.e. the number of genes in the population.
+This is 2N for a diploid population with N individuals, or N for haploid population of N individuals.
+If edge lengths are given in population units (e.g., N), then the appropriate population size to use is 1.
+
+If the population size is fixed throughout the containing species/population tree, then simply passing in the appropriate value using the ``default_pop_size`` argument to the :func:`~dendropy.simulate.treesim.contained_coalescent()` function is sufficient.
+If, on the other hand, the population size varies, the a special attribute must be added to each edge, "``pop_size``", that specifies the population size for that edge.
+For example::
+
+    tree = dendropy.Tree.get_from_path("sp.tre", "newick")
+    for edge in tree.postorder_edge_iter():
+            edge.pop_size = 100000
+
+The easiest way to get a |TaxonNamespaceMapping| object is to call the special factory function :meth:`~dendropy.datamodel.taxonmodel.TaxonNamespaceMapping.create_contained_taxon_mapping()`.
+This will create a new |TaxonNamespace| to manage the gene taxa, and create the associations between the gene taxa and the containing tree taxa for you.
+It takes two arguments: the |TaxonNamespace| of the containing tree, and the number of genes you want sampled from each species.
+
+The following example shows how to create a |TaxonNamespaceMapping| using :meth:`~dendropy.datamodel.taxonmodel.TaxonNamespaceMapping.create_contained_taxon_mapping()`, and then calls :meth:`~dendropy.simulate.treesim.contained_coalescent()` to produce a contained coalescent tree:
+
+.. literalinclude:: /examples/contained_coalescent1.py
+
+In the above example, the branch lengths were in haploid population units, so we did not specify a population size.
+If the gene-species associations are more complex, e.g., different numbers of genes per species, we can pass in a list of values as the second argument to :meth:`~dendropy.datamodel.taxonmodel.TaxonNamespaceMapping.create_contained_taxon_mapping()`:
+
+
+.. literalinclude:: /examples/contained_coalescent2.py
+
+This approach should be used with caution if we cannot be certain of the order of taxa (as is the case with data read in Newick formats). In these case, and in more complex cases, we might need to directly instantiate the :class:`~dendropy.datamodel.taxonmodel.TaxonNamespaceMapping` object. The API to describe the associations when constructing this object is very similar to that of the :class:`~dendropy.datamodel.taxonmodel.TaxonNamespacePartition` object: you can use a function, attrib [...]
+
+.. _Simulating_and_Counting_Deep_Coalescences:
+
+Simulating the Distribution of Number Deep Coalescences Under Different Phylogeographic History Scenarios
+=========================================================================================================
+
+A typical application for simulating censored coalescent trees is to produce a distribution of trees under different hypotheses of demographic or phylogeographic histories.
+
+For example, imagine we wanted to generate the distribution of the number of deep coalescences under two scenarios: one in which a population underwent sequential or step-wise vicariance, and another when there was simultaneous fragmentation.
+This can be achieved by generating trees under :meth:`~dendropy.simulate.treesim.contained_coalescent()`, and then using a :class:`~dendropy.reconcile.ContainingTree` object to embed the trees and count the number of deep coalescences.
+
+.. literalinclude:: /examples/sim_and_count_deepcoal1.py
+
+Actually, the  :class:`~dendropy.reconcile.ContainingTree` class has its own native contained coalescent simulator, :meth:`~dendropy.reconcile.ContainingTree.embed_contained_kingman()`, which simulates *and* embeds a contained coalescent tree at the same time. So a more practical approach might be:
+
+.. literalinclude:: /examples/sim_and_count_deepcoal2.py
diff --git a/doc/source/primer/treestats.rst b/doc/source/primer/treestats.rst
new file mode 100644
index 0000000..85c3af9
--- /dev/null
+++ b/doc/source/primer/treestats.rst
@@ -0,0 +1,189 @@
+****************************************************************
+Tree Statistics, Metrics, Summarizations, and Other Calculations
+****************************************************************
+
+Some general tree metrics that are calculated without reference to any particular model or data and general report some tree metadata (e.g., tree length, node ages, etc.) are available as instance methods.
+More specialized tree statistics, however, are available through functions in various other modules:
+
+-   The :mod:`~dendropy.calculate.treemeasure` module provides for calculation of statistics that are typically calculated on a single tree
+-   The :mod:`~dendropy.calculate.treecompare` module provides for calculation of statistics that are typically calculated between trees
+-   The :mod:`~dendropy.calculate.treescore` module provides for statistics that typically score a tree under a model and with reference to some sort of data.
+-   The :mod:`~dendropy.model.coalescent` module provides for calcuations on trees under the coalescent model.
+
+Native Tree Statistic and Metric Methods
+========================================
+
+Basic meta-information about tree structure are available as native |Tree| methods.
+
+Tree Length
+-----------
+
+The :meth:`~dendropy.datamodel.treemodel.Tree.length()` method returns the sum of edge lengths of a |Tree| object, with edges that do not have any length assigned being treated as edges with length 0.
+The following example shows how to identify the "critical" value for an `Archie-Faith-Cranston or PTP test <http://hymenoptera.tamu.edu/courses/ento606/Suggested%20Readings/Slowinksi_Crother_1998.pdf>`_ from a sample of |Tree| objects, i.e. a tree length equal to or greater than 95% of the trees in the sample:
+
+.. literalinclude:: /examples/tree_length_crit.py
+
+Node Ages
+---------
+
+The :meth:`~dendropy.datamodel.treemodel.Tree.calc_node_ages()` method calculates the age of a node (i.e., the sum of edge lengths from the node to a tip) and assigns it to the :attr:`~dendropy.datamodel.treemodel.Node.age` attribute. The following example iterates through the post-burnin of an MCMC sample of ultrametric trees, calculating the age of the MRCA of two taxa, and reports the mean age of the node.
+
+.. literalinclude:: /examples/node_ages1.py
+
+Number of Lineages at a Particular Time and Lineage Through Time Plots
+----------------------------------------------------------------------
+
+The :meth:`~dendropy.datamodel.treemodel.Tree.num_lineages_at()` method of the |Tree| class returns the number of lineages at a particular time given in terms of distance from the root.
+The following example extracts the number of lineages at fixed intervals along the length of the tree to use in an Lineage Through Time (LTT) plot:
+
+.. literalinclude:: /examples/ltt.py
+
+
+Unary Tree Statistics and Metrics
+=================================
+
+Numerous specialized statistics and indexes of tree shape and structure (B1, Colless' imbalance, Pybus-Harvey-Gamma, etc.) are available through the :mod:`~dendropy.calculate.treemeasure` module:
+
+
+.. literalinclude:: /examples/treemeasures1.py
+
+Pybus-Harvey Gamma
+------------------
+
+The Pybus-Harvey Gamma statistic is given by the :meth:`~dendropy.datamodel.treemodel.Tree.pybus_harvey_gamma()` instance method. The following example iterates through the post-burn-in of an MCMC sample of trees, reporting the mean Pybus-Harvey Gamma statistic:
+
+.. literalinclude:: /examples/pbhg.py
+
+Patristic Distances
+-------------------
+
+The :class:`~dendropy.treecalc.PatristicDistanceMatrix` is the most efficient way to calculate the patristic distances between taxa or leaves on a tree, when doing multiple such calculations.
+Its constructor takes a |Tree| object as an argument, and the object return is callable, taking two |Taxon| objects as arguments and returning the sum of edge lengths between the two. The following example reports the pairwise distances between all taxa on the input tree:
+
+.. literalinclude:: /examples/pdm.py
+
+Comparing and Summarizing Trees
+===============================
+
+Distances Between Trees
+-----------------------
+
+Unweighted Robinson-Foulds Distance
+...................................
+
+The *unweighted* Robinson-Foulds distance (often referred to as just the Robinson-Foulds distance) is given by the :func:`dendropy.calculate.treecompare.symmetric_difference` function:
+
+
+.. literalinclude:: /examples/symdiff1.py
+
+Note that the two trees *must* share the same |TaxonNamespace| reference, otherwise an error will be raised::
+
+    >> import dendropy
+    >> from dendropy.calculate import treecompare
+    >> s1 = "(a,(b,(c,d)));"
+    >> s2 = "(a,(d,(b,c)));"
+    >> tree1 = dendropy.Tree.get(data=s1, schema='newick')
+    >> tree2 = dendropy.Tree.get(data=s2, schema='newick')
+    >> print(treecompare.symmetric_difference(tree1, tree2))
+    Traceback (most recent call last):
+        File "<stdin>", line 1, in <module>
+            print(treecompare.symmetric_difference(tree1, tree2))
+        File "/Users/jeet/Documents/Projects/Phyloinformatics/DendroPy/dendropy/dendropy/calculate/treecompare.py", line 85, in symmetric_difference
+            is_bipartitions_updated=is_bipartitions_updated)
+        File "/Users/jeet/Documents/Projects/Phyloinformatics/DendroPy/dendropy/dendropy/calculate/treecompare.py", line 221, in false_positives_and_negatives
+            raise error.TaxonNamespaceIdentityError(reference_tree, comparison_tree)
+        dendropy.utility.error.TaxonNamespaceIdentityError: Non-identical taxon namespace references: <TaxonNamespace object at 0x10052d310> is not <TaxonNamespace object at 0x101572210>
+
+Note, too, that results very much depend on the rooting states of the tree:
+
+.. literalinclude:: /examples/symdiff2.py
+
+Weighted Robinson-Foulds Distance
+.................................
+
+The *weighted* Robinson-Foulds distance takes edge lengths into account, and is given by the :func:`dendropy.calculate.treecompare.weighted_robinson_foulds_distance`:
+
+.. literalinclude:: /examples/weightedrf1.py
+
+Euclidean Distance
+..................
+
+The Euclidean distance, like the weighted Robinson-Foulds distance takes edge lengths into account, but squares the edge lengths instead of taking the absolute distance, and is given by the :func:`dendropy.calculate.treecompare.euclidean_distance`:
+
+.. literalinclude:: /examples/euctree.py
+
+Majority-Rule Consensus Tree from a Collection of Trees
+-------------------------------------------------------
+
+To get the majority-rule consensus tree of a |TreeList| object, you can call the :meth:`~dendropy.datamodel.treecollectionmodel.TreeList.consensus()` instance method.
+You can specify the frequency threshold for the consensus tree by the ``min_freq`` argument, which default to 0.5 (i.e., a 50% majority rule tree).
+The following example aggregates the post-burn-in trees from four MCMC samples into a single |TreeList| object, and prints the 95% majority-rule consensus as a Newick string:
+
+.. literalinclude:: /examples/majrule.py
+
+Frequency of a Split in a Collection of Trees
+---------------------------------------------
+
+The :meth:`~dendropy.datamodel.treecollectionmodel.TreeList.frequency_of_split()` method of a |TreeList| object returns the frequency of occurrence of a single split across all the |Tree| objects in the |TreeList|.
+The split can be specified by passing a split bitmask directly using the ``split_bitmask`` keyword argument, as a list of |Taxon| objects using the ``taxa`` keyword argument, or as a list of taxon labels using the ``labels`` keyword argument.
+The following example shows how to calculate the frequency of a split defined by two taxa, "Morelia amethistina" and "Morelia tracyae", from the post-burn-in trees aggregated across four MCMC samples:
+
+.. literalinclude:: /examples/splitfreq.py
+
+The Maximum Clade Credibility Tree: The Tree that Maximizes the Product of Split Support
+----------------------------------------------------------------------------------------
+
+The Maximum Clade Credibility Tree (MCCT) is one that maximize the *product* of split support, and is returned for a collection of trees managed in a |TreeList| instance by the :meth:`~dendropy.datamodel.treecollectionmodel.TreeList.maximum_product_of_split_support_tree` method:
+
+.. literalinclude:: /examples/mcct.py
+
+Unfortunately, terminology in usage and literature regarding this type of summary is *very* confusing, and sometimes the term "MCCT" is used to refer to the tree that maximizes the *sum* of split support and "MCT" to the tree that maximizes the product of split support.
+If the tree that maximizes the *sum* of split support is the criteria required, then the :meth:`~dendropy.datamodel.treecollectionmodel.TreeList.maximum_sum_of_split_support_tree` method of the |TreeList| object should be used.
+
+Scoring Trees Under the Coalescent
+==================================
+
+Probability Under the Coalescent Model
+---------------------------------------
+
+The :mod:`~dendropy.model.coalescent` module provides a range of methods for simulations and calculations under Kingman's coalescent framework and related models. For example:
+
+    :func:`~dendropy.model.coalescent.log_probability_of_coalescent_tree`
+        Given a |Tree| object as the first argument, and the haploid population size as the second, returns the log probability of the |Tree| under the neutral coalescent.
+
+Numbers of Deep Coalescences
+----------------------------
+
+    :func:`~dendropy.model.reconcile.reconciliation_discordance`
+        Given two |Tree| objects *sharing the same leaf-set*, this returns the number of deep coalescences resulting from fitting the first tree (e.g., a gene tree) to the second (e.g., a species tree). This is based on the algorithm described `Goodman, et al. <bioinformatics.oxfordjournals.org/cgi/reprint/14/9/819.pdf>`_ (Goodman, et al., 1979. Fitting the gene lineage into its species lineage,a parsimony strategy illustrated by cladograms constructed from globin sequences. Syst. Zool.  [...]
+
+    :func:`~dendropy.model.reconcile.monophyletic_partition_discordance`
+        Given a |Tree| object as the first argument, and a list of lists of
+        |Taxon| objects representing the expected monophyletic partitioning of the |TaxonNamespace| of the |Tree| as the second argument, this returns the number of deep coalescences found in the relationships implied by the |Tree| object, conditional on the taxon groupings given by the second argument. This statistic corresponds to the Slatkin and Maddison (1989) **s** statistic, as described `here <http://mesquiteproject.org/Mesquite_Folder/docs/mesquite/popGen/popGen.html#s>`_.
+
+Number of Deep Coalescences when Embedding One Tree in Another (e.g. Gene/Species Trees)
+----------------------------------------------------------------------------------------
+
+Imagine we wanted to generate the distribution of the number of deep coalescences under two scenarios: one in which a population underwent sequential or step-wise vicariance, and another when there was simultaneous fragmentation.
+In this case, the containing tree and the embedded trees have different leaf sets, and there is a many-to-one mapping of embedded tree taxa to containing tree taxa.
+
+The :class:`~dendropy.model.reconcile.ContainingTree` class is designed to allow for counting deep coalescences in cases like this.
+It requires a |TaxonNamespaceMapping| object, which provides an association between the embedded taxa and the containing taxa.
+The easiest way to get a |TaxonNamespaceMapping| object is to call the special factory function :meth:`~dendropy.datamodel.taxonmodel.TaxonNamespaceMapping.create_contained_taxon_mapping()`.
+This will create a new |TaxonNamespace| to manage the gene taxa, and create the associations between the gene taxa and the containing tree taxa for you.
+It takes two arguments: the |TaxonNamespace| of the containing tree, and the number of genes you want sampled from each species.
+If the gene-species associations are more complex, e.g., different numbers of genes per species, we can pass in a list of values as the second argument to :meth:`~dendropy.datamodel.taxonmodel.TaxonNamespaceMapping.create_contained_taxon_mapping()`.
+This approach should be used with caution if we cannot be certain of the order of taxa (as is the case with data read in Newick formats). In these case, and in more complex cases, we might need to directly instantiate the :class:`~dendropy.datamodel.taxonmodel.TaxonNamespaceMapping` object. The API to describe the associations when constructing this object is very similar to that of the :class:`~dendropy.datamodel.taxonmodel.TaxonNamespacePartition` object: you can use a function, attrib [...]
+
+The :class:`~dendropy.model.reconcile.ContainingTree` class has its own native contained coalescent simulator, :meth:`~dendropy.model.reconcile.ContainingTree.embed_contained_kingman()`, which simulates *and* embeds a contained coalescent tree at the same time.
+
+.. literalinclude:: /examples/sim_and_count_deepcoal2.py
+
+If you have used some other method to simulate your trees, you can use :meth:`~dendropy.model.reconcile.ContainingTree.embed_tree()` to embed the trees and count then number of deep coalescences.
+
+.. literalinclude:: /examples/sim_and_count_deepcoal1.py
+
+For more details on simulating contained coalescent trees and counting numbers of deep coalescences on them, see ":ref:`Simulating_Contained_Coalescent_Trees`" or ":ref:`Simulating_and_Counting_Deep_Coalescences`".
+
+
+
diff --git a/doc/source/primer/working_with_metadata_annotations.rst b/doc/source/primer/working_with_metadata_annotations.rst
new file mode 100644
index 0000000..78d9455
--- /dev/null
+++ b/doc/source/primer/working_with_metadata_annotations.rst
@@ -0,0 +1,898 @@
+*********************************
+Working with Metadata Annotations
+*********************************
+
+|DendroPy| provides a rich infrastructure for decorating most types of phylogenetic objects (e.g., the |DataSet|, |TaxonNamespace|, |Taxon| |TreeList|, |Tree|, and various |CharacterMatrix| classes) with metadata information.
+These phylogenetic objects have an attribute, :attr:`annotations`, that is an instance of the :class:`~dendropy.datamodel.basemodel.AnnotationSet` class, which is an iterable (derived from :class:`dendropy.utility.containers.OrderedSet`) that serves to manage a collection of :class:`~dendropy.datamodel.basemodel.Annotation` objects.
+Each :class:`~dendropy.datamodel.basemodel.Annotation` object tracks a single annotation element.
+These annotations will be rendered as ``meta`` elements when writing to NeXML format or ampersand-prepended comemnt strings when writing to NEXUS/NEWICK format.
+Note that full and robust expression of metadata annotations, including stable and consistent round-tripping of information, can only be achieved while in the NeXML format.
+
+Overview of the Infrastructure for Metadata Annotation in |DendroPy|
+====================================================================
+
+Each item of metadata is maintained in an object of the :class:`~dendropy.datamodel.basemodel.Annotation` class.
+This class has the following attributes:
+
+    :attr:`~dendropy.datamodel.basemodel.Annotation.name`
+        The name of the metadata item or annotation.
+
+    :attr:`~dendropy.datamodel.basemodel.Annotation.value`
+        The value or content of the metadata item or annotation.
+
+    :attr:`~dendropy.datamodel.basemodel.Annotation.datatype_hint`
+        Custom data type indication for NeXML output (e.g. "xsd:string").
+
+    :attr:`~dendropy.datamodel.basemodel.Annotation.name_prefix`
+        Prefix that represents an abbreviation of the namespace associated with
+        this metadata item.
+
+    :attr:`~dendropy.datamodel.basemodel.Annotation.namespace`
+        The namespace (e.g. "http://www.w3.org/XML/1998/namespace") of this
+        metadata item (NeXML output).
+
+    :attr:`~dendropy.datamodel.basemodel.Annotation.annotate_as_reference`
+        If |True|, indicates that this annotation should not be interpreted semantically as a literal value, but rather as a source to be dereferenced.
+
+    :attr:`~dendropy.datamodel.basemodel.Annotation.is_hidden`
+        If |True|, indicates that this annotation should not be printed or written out.
+
+    :attr:`~dendropy.datamodel.basemodel.Annotation.prefixed_name`
+        Returns the name of this annotation with its namespace prefix (e.g. "dc:subject").
+
+These :class:`~dendropy.datamodel.basemodel.Annotation` objects are typically collected and managed in a "annotations manager" container class, :class:`~dendropy.datamodel.basemodel.AnnotationSet`.
+This is a specialization of :class:`dendropy.utility.containers.OrderedSet` whose elements are instances of :class:`~dendropy.datamodel.basemodel.Annotation`.
+The full set of annotations associated with each object of |DataSet|, |TaxonNamespace|, |Taxon| |TreeList|, |Tree|, various |CharacterMatrix| and other phylogenetic data class types is available through the :attr:`annotations` attribute of those objects, which is an instance of :class:`~dendropy.datamodel.basemodel.AnnotationSet`.
+The :class:`~dendropy.datamodel.basemodel.AnnotationSet` includes the following additional methods to support the creation, access, and management of the :class:`~dendropy.datamodel.basemodel.Annotation` object elements contained within it:
+
+    - :meth:`~dendropy.datamodel.basemodel.AnnotationSet.add_new()`
+    - :meth:`~dendropy.datamodel.basemodel.AnnotationSet.add_bound_attribute()`
+    - :meth:`~dendropy.datamodel.basemodel.AnnotationSet.add_citation()`
+    - :meth:`~dendropy.datamodel.basemodel.AnnotationSet.findall()`
+    - :meth:`~dendropy.datamodel.basemodel.AnnotationSet.find()`
+    - :meth:`~dendropy.datamodel.basemodel.AnnotationSet.drop()`
+    - :meth:`~dendropy.datamodel.basemodel.AnnotationSet.values_as_dict()`
+
+
+..
+    The fundamental unit of metadata in |DendroPy| is the :class:`~dendropy.datamodel.basemodel.Annotation` object.
+    Each :class:`~dendropy.datamodel.basemodel.Annotation` object stores information regarding a single item of metadata, keeping track of, at a minimum, the name and value or content of the metadata item, which is accessible through the attributes ":attr:`~dendropy.datamodel.basemodel.Annotation.name`" and  ":attr:`~dendropy.datamodel.basemodel.Annotation.value`" respectively.
+    These :class:`~dendropy.datamodel.basemodel.Annotation` objects are typically collected and managed in a "annotations manager" container class, :class:`~dendropy.datamodel.basemodel.AnnotationSet`, which is a specialization of ":class:`dendropy.utility.containers.OrderedSet`".
+    Phylogenetic data objects of |DataSet|, |TaxonNamespace|, |Taxon| |TreeList|, |Tree|, various |CharacterMatrix| and other classes all have an attribute, ":attr:`annotations`", that represents an instance of the :class:`~dendropy.datamodel.basemodel.AnnotationSet` class, and whose elements are :class:`~dendropy.datamodel.basemodel.Annotation` objects that collectively make up the full set of annotations or metadata associated with that particular phylogenetic data object.
+        The elements of the ":attr:`annotations`" attribute of phylogenetic data objects are objects of :class:`~dendropy.datamodel.basemodel.Annotation` that collectively make up the full set of annotations or metadata associated with that particular phylogenetic data object.
+
+
+The following code snippet reads in a data file in NeXML format, and dumps out the annotations::
+
+    #! /usr/bin/env python
+
+    import sys
+    import dendropy
+    ds = dendropy.DataSet.get_from_path("sample1.xml", "nexml")
+    print "-- (dataset) ---\n"
+    for a in ds.annotations:
+        print "%s = '%s'" % (a.name, a.value)
+    for tree_list in ds.tree_lists:
+        for tree in tree_list:
+            print "\n-- (tree '%s') --\n" % tree.label
+            for a in tree.annotations:
+                print "%s = '%s'" % (a.name, a.value)
+
+Running the above results in::
+
+    -- (dataset) ---
+
+    bibliographicCitation = 'Wiklund H., Altamira I.V., Glover A., Smith C., Baco A., & Dahlgren T.G. 2012. Systematics and biodiversity of Ophryotrocha (Annelida, Dorvilleidae) with descriptions of six new species from deep-sea whale-fall and wood-fall habitats in the north-east Pacific. Systematics and Biodiversity, .'
+    subject = 'whale-fall'
+    changeNote = 'Generated on Wed Jun 06 11:02:45 EDT 2012'
+    subject = 'wood-fall'
+    title = 'Systematics and biodiversity of Ophryotrocha (Annelida, Dorvilleidae) with descriptions of six new species from deep-sea whale-fall and wood-fall habitats in the north-east Pacific'
+    publicationName = 'Systematics and Biodiversity'
+    creator = 'Wiklund H., Altamira I.V., Glover A., Smith C., Baco A., & Dahlgren T.G.'
+    publisher = 'Systematics and Biodiversity'
+    contributor = 'Wiklund H.'
+    volume = ''
+    contributor = 'Altamira I.V.'
+    number = ''
+    contributor = 'Glover A.'
+    historyNote = 'Mapped from TreeBASE schema using org.cipres.treebase.domain.nexus.nexml.NexmlDocumentWriter at 645f9132 $Rev: 1060 $'
+    contributor = 'Smith C.'
+    modificationDate = '2012-06-04'
+    contributor = 'Baco A.'
+    contributor = 'Dahlgren T.G.'
+    identifier.study.tb1 = 'None'
+    publicationDate = '2012'
+    section = 'Study'
+    doi = ''
+    title.study = 'Systematics and biodiversity of Ophryotrocha (Annelida, Dorvilleidae) with descriptions of six new species from deep-sea whale-fall and wood-fall habitats in the north-east Pacific'
+    subject = 'New species'
+    subject = 'Ophryotrocha'
+    creationDate = '2012-05-09'
+    subject = 'polychaeta'
+    date = '2012-06-04'
+    subject = 'molecular phylogeny'
+    identifier.study = '12713'
+
+    -- (tree 'con 50 majrule') --
+
+    ntax.tree = '41'
+    kind.tree = 'Species Tree'
+    quality.tree = 'Unrated'
+    isDefinedBy = 'http://purl.org/phylo/treebase/phylows/study/TB2:S12713'
+    type.tree = 'Consensus'
+
+
+The following sections discuss these methods and attributes in detail, describing how the create, read, write, search, and manipulate annotations.
+
+Metadata Annotation Creation
+=============================
+
+Reading Data from an External Source
+------------------------------------
+
+When reading data in NeXML format, metadata annotations given in the source are automatically created and associated with the corresponding data objects.
+
+The metadata annotations associated with the phylogenetic data objects are collected in the attribute ``annotations`` of the objects, which is an object of type :class:`~dendropy.datamodel.basemodel.AnnotationSet`.
+Each annotation item is represented as an
+object of type :class:`~dendropy.datamodel.basemodel.Annotation`.
+
+For example::
+
+    #! /usr/bin/env python
+    import dendropy
+    ds = dendropy.DataSet.get_from_path("pythonidae.annotated.nexml",
+    "nexml")
+    for a in ds.annotations:
+        print "Data Set '%s': %s" % (ds.label, a)
+    for taxon_namespace in ds.taxon_namespaces:
+        for a in taxon_namespace.annotations:
+            print "Taxon Set '%s': %s" % (taxon_namespace.label, a)
+        for taxon in taxon_namespace:
+            for a in taxon.annotations:
+                print "Taxon '%s': %s" % (taxon.label, a)
+    for tree_list in ds.tree_lists:
+        for a in tree_list.annotations:
+            print "Tree List '%s': %s" % (tree_list.label, a)
+        for tree in tree_list:
+            for a in tree.annotations:
+                print "Tree '%s': %s" % (tree.label, a)
+
+produces::
+
+    Data Set 'None': description="composite dataset of Pythonid sequences and trees"
+    Data Set 'None': subject="Pythonidae"
+    Taxon Set 'None': subject="Pythonidae"
+    Taxon 'Python regius': closeMatch="http://purl.uniprot.org/taxonomy/51751"
+    Taxon 'Python sebae': closeMatch="http://purl.uniprot.org/taxonomy/51752"
+    Taxon 'Python molurus': closeMatch="http://purl.uniprot.org/taxonomy/51750"
+    Taxon 'Python curtus': closeMatch="http://purl.uniprot.org/taxonomy/143436"
+    Taxon 'Morelia bredli': closeMatch="http://purl.uniprot.org/taxonomy/461327"
+    Taxon 'Morelia spilota': closeMatch="http://purl.uniprot.org/taxonomy/51896"
+    Taxon 'Morelia tracyae': closeMatch="http://purl.uniprot.org/taxonomy/129332"
+    Taxon 'Morelia clastolepis': closeMatch="http://purl.uniprot.org/taxonomy/129329"
+    Taxon 'Morelia kinghorni': closeMatch="http://purl.uniprot.org/taxonomy/129330"
+    Taxon 'Morelia nauta': closeMatch="http://purl.uniprot.org/taxonomy/129331"
+    Taxon 'Morelia amethistina': closeMatch="http://purl.uniprot.org/taxonomy/51895"
+    Taxon 'Morelia oenpelliensis': closeMatch="http://purl.uniprot.org/taxonomy/461329"
+    Taxon 'Antaresia maculosa': closeMatch="http://purl.uniprot.org/taxonomy/51891"
+    Taxon 'Antaresia perthensis': closeMatch="http://purl.uniprot.org/taxonomy/461324"
+    Taxon 'Antaresia stimsoni': closeMatch="http://purl.uniprot.org/taxonomy/461325"
+    Taxon 'Antaresia childreni': closeMatch="http://purl.uniprot.org/taxonomy/51888"
+    Taxon 'Morelia carinata': closeMatch="http://purl.uniprot.org/taxonomy/461328"
+    Taxon 'Morelia viridisN': closeMatch="http://purl.uniprot.org/taxonomy/129333"
+    Taxon 'Morelia viridisS': closeMatch="http://purl.uniprot.org/taxonomy/129333"
+    Taxon 'Apodora papuana': closeMatch="http://purl.uniprot.org/taxonomy/129310"
+    Taxon 'Liasis olivaceus': closeMatch="http://purl.uniprot.org/taxonomy/283338"
+    Taxon 'Liasis fuscus': closeMatch="http://purl.uniprot.org/taxonomy/129327"
+    Taxon 'Liasis mackloti': closeMatch="http://purl.uniprot.org/taxonomy/51889"
+    Taxon 'Antaresia melanocephalus': closeMatch="http://purl.uniprot.org/taxonomy/51883"
+    Taxon 'Antaresia ramsayi': closeMatch="http://purl.uniprot.org/taxonomy/461326"
+    Taxon 'Liasis albertisii': closeMatch="http://purl.uniprot.org/taxonomy/129326"
+    Taxon 'Bothrochilus boa': closeMatch="http://purl.uniprot.org/taxonomy/461341"
+    Taxon 'Morelia boeleni': closeMatch="http://purl.uniprot.org/taxonomy/129328"
+    Taxon 'Python timoriensis': closeMatch="http://purl.uniprot.org/taxonomy/51753"
+    Taxon 'Python reticulatus': closeMatch="http://purl.uniprot.org/taxonomy/37580"
+    Taxon 'Xenopeltis unicolor': closeMatch="http://purl.uniprot.org/taxonomy/196253"
+    Taxon 'Candoia aspera': closeMatch="http://purl.uniprot.org/taxonomy/51853"
+    Taxon 'Loxocemus bicolor': closeMatch="http://purl.uniprot.org/taxonomy/39078"
+    Tree '0': treeEstimator="RAxML"
+    Tree '0': substitutionModel="GTR+G+I"
+
+Metadata annotations in NEXUS and NEWICK must be given in the form of "hot comments" either in BEAST/FigTree syntax::
+
+    [&subject='Pythonidae']
+
+    [&length_hpd95={0.01917252,0.06241567},length_quant_5_95={0.02461821,0.06197141},length_range={0.01570374,0.07787249},length_mean=0.0418470252488,length_median=0.04091105,length_sd=0.0113086027131]
+
+or NHX-like syntax::
+
+    [&&subject='Pythonidae']
+
+    [&&length_hpd95={0.01917252,0.06241567},length_quant_5_95={0.02461821,0.06197141},length_range={0.01570374,0.07787249},length_mean=0.0418470252488,length_median=0.04091105,length_sd=0.0113086027131]
+
+However, by default these annotations are not parsed into |DendroPy| data model
+unless the keyword argument ``extract_comment_metadata=True`` is passed in to the call::
+
+    >>> ds = dendropy.DataSet.get_from_path("data.nex",
+    ... "nexus",
+    ... extract_comment_metadata=True)
+
+In general, support for metadata in NEXUS and NEWICK formats is very basic and lossy, and is limited to a small range of phylogenetic data types (taxa, trees, nodes, edges).
+These issues and limits are fundamental to the NEXUS and NEWICK formats, and thus if metadata is important to you and your work, you should be working with NeXML format.
+The NeXML format provides for rich, flexible and robust metadata annotation for the broad range of phylogenetic data, and |DendroPy| provides full support for metadata reading and writing in NeXML.
+
+
+Direct Composition with Literal Values
+--------------------------------------
+
+The :meth:`~dendropy.datamodel.basemodel.AnnotationSet.add_new` method of the :attr:`annotations` attribute allows for direct adding of metadata. This method has two mandatory arguments, "``name``" and "``value``"::
+
+    >>> import dendropy
+    >>> tree = dendropy.Tree.get_from_path('pythonidae.mle.tree', 'nexus')
+    >>> tree = dendropy.Tree.get_from_path('examples/pythonidae.mle.nex', 'nexus')
+    >>> tree.annotations.add_new(
+    ... name="subject",
+    ... value="Python phylogenetics",
+    ... )
+
+When printing the tree in NeXML, the metadata will be rendered as a "``<meta>``" tag child element of the associated "``<tree>``" element::
+
+    <nex:nexml
+        version="0.9"
+        xsi:schemaLocation="http://www.nexml.org/2009"
+        xmlns:dendropy="http://packages.python.org/DendroPy/"
+        xmlns="http://www.nexml.org/2009"
+        xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+        xmlns:xml="http://www.w3.org/XML/1998/namespace"
+        xmlns:nex="http://www.nexml.org/2009"
+    >
+    .
+    .
+    .
+        <trees id="x4320340992" otus="x4320340552">
+            <tree id="x4320381904" label="0" xsi:type="nex:FloatTree">
+                <meta xsi:type="nex:LiteralMeta" property="dendropy:subject" content="Python phylogenetics" id="meta4320379536" />
+    .
+    .
+    .
+
+As can be seen, by default, the metadata property is mapped to the "``dendropy``" namespace (i.e., '``xmlns:dendropy="http://packages.python.org/DendroPy/"``').
+This can be customized by using the "``name_prefix``" and "``namespace``" arguments to the call to :meth:`~dendropy.datamodel.basemodel.AnnotationSet.add_new`::
+
+    >>> tree.annotations.add_new(
+    ... name="subject",
+    ... value="Python phylogenetics",
+    ... name_prefix="dc",
+    ... namespace="http://purl.org/dc/elements/1.1/",
+    ... )
+
+This will result in the following NeXML fragment::
+
+    <nex:nexml
+        version="0.9"
+        xsi:schemaLocation="http://www.nexml.org/2009"
+        xmlns:dc="http://purl.org/dc/elements/1.1/"
+        xmlns="http://www.nexml.org/2009"
+        xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+        xmlns:xml="http://www.w3.org/XML/1998/namespace"
+        xmlns:nex="http://www.nexml.org/2009"
+        xmlns:dendropy="http://packages.python.org/DendroPy/"
+    >
+    .
+    .
+    .
+        <trees id="x4320340904" otus="x4320340464">
+            <tree id="x4320377872" label="0" xsi:type="nex:FloatTree">
+                <meta xsi:type="nex:LiteralMeta" property="dc:subject" content="Python phylogenetics" id="meta4320375440" />
+    .
+    .
+    .
+
+Note that the "``name_prefix``" or "``namespace``" must be specified simultaneously; that is, if one is specified, then the other must be specified as well.
+For convenience, you can specify the name of the annotation with the name prefix prepended by specifying "``name_is_prefixed=True``", though the namespace must still be provided separately::
+
+    >>> tree.annotations.add_new(
+    ... name="dc:subject",
+    ... value="Python phylogenetics",
+    ... name_is_prefixed=True,
+    ... namespace="http://purl.org/dc/elements/1.1/",
+    ... )
+
+For NeXML output, you can also specify a datatype::
+
+
+    >>> tree.annotations.add_new(
+    ... name="subject",
+    ... value="Python phylogenetics",
+    ... datatype_hint="xsd:string",
+    ... )
+    >>> tree.annotations.add_new(
+    ... name="answer",
+    ... value=42,
+    ... datatype_hint="xsd:integer",
+    ... )
+
+When writing to NeXML, this will result in the following fragment::
+
+    <trees id="x4320340992" otus="x4320340552">
+        <tree id="x4320381968" label="0" xsi:type="nex:FloatTree">
+            <meta xsi:type="nex:LiteralMeta" property="dendropy:answer" content="42" datatype="xsd:integer" id="meta4320379536" />
+            <meta xsi:type="nex:LiteralMeta" property="dendropy:subject" content="Python phylogenetics" datatype="xsd:string" id="meta4320379472" />
+
+You can also specify that the data should be interpreted as a source to be dereferenced in NeXML by passing in ``annotate_as_reference=True``.
+Note that this does not actually populate the contents of the annotation from the source (unlike the dynamic attribute value binding discussed below), but just indicates the the contents of the annotation should be *interpreted* differently by semantic readers.
+Thus, the following annotation::
+
+    >>> tree.annotations.add_new(
+    ... name="subject",
+    ... value="http://en.wikipedia.org/wiki/Pythonidae",
+    ... name_prefix="dc",
+    ... namespace="http://purl.org/dc/elements/1.1/",
+    ... annotate_as_reference=True,
+    ... )
+
+will be rendered in NeXML as::
+
+    <meta xsi:type="nex:ResourceMeta" rel="dc:subject" href="http://en.wikipedia.org/wiki/Pythonidae" />
+
+Sometimes, you may want to annotate an object with metadata, but do not want it to be printed or written out.
+Passing the ``is_hidden=True`` argument will result in the annotation being suppressed in all output::
+
+    >>> tree.annotations.add_new(
+    ... name="subject",
+    ... value="Python phylogenetics",
+    ... name_prefix="dc",
+    ... namespace="http://purl.org/dc/elements/1.1/",
+    ... is_hidden=True,
+    ... )
+
+The ``is_hidden`` attribute of the an :class:`~dendropy.datamodel.basemodel.Annotation` object can also be set directly::
+
+    >>> subject_annotations = tree.annotations.findall(name="citation")
+    >>> for a in subject_annotations:
+    ...    a.is_hidden = True
+
+Dynamically Binding Annotation Values to Object Attribute Values
+----------------------------------------------------------------
+
+In some cases, instead of "hard-wiring" in metadata for an object, you may want to write out metadata that takes its value from the value of an attribute of the object.
+The :meth:`~dendropy.datamodel.basemodel.AnnotationSet.add_bound_attribute` method allows you to do this.
+This method takes, as a minimum, a *string* specifying the *name* of an existing attribute to which the value of the annotation will be dynamically bound.
+
+For example:
+
+.. literalinclude:: /examples/dynamic_annotations1.py
+
+results in::
+
+    <?xml version="1.0" encoding="ISO-8859-1"?>
+    <nex:nexml
+        version="0.9"
+        xsi:schemaLocation="http://www.nexml.org/2009"
+        xmlns:dendropy="http://packages.python.org/DendroPy/"
+        xmlns="http://www.nexml.org/2009"
+        xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+        xmlns:xml="http://www.w3.org/XML/1998/namespace"
+        xmlns:nex="http://www.nexml.org/2009"
+    >
+        <otus id="x4320344648">
+            <otu id="x4320380112" label="A">
+                <meta xsi:type="nex:LiteralMeta" property="dendropy:category" content="tiny" id="meta4320379472" />
+            </otu>
+            <otu id="x4320380432" label="B">
+                <meta xsi:type="nex:LiteralMeta" property="dendropy:category" content="medium" id="meta4320379536" />
+            </otu>
+            <otu id="x4320380752" label="C">
+                <meta xsi:type="nex:LiteralMeta" property="dendropy:category" content="N/A" id="meta4320379792" />
+            </otu>
+            <otu id="x4320381072" label="D">
+                <meta xsi:type="nex:LiteralMeta" property="dendropy:category" content="tiny" id="meta4320381328" />
+            </otu>
+            <otu id="x4320381264" label="E">
+                <meta xsi:type="nex:LiteralMeta" property="dendropy:category" content="tiny" id="meta4320381392" />
+            </otu>
+        </otus>
+        <trees id="x4320344560" otus="x4320344648">
+            <tree id="x4320379600" xsi:type="nex:FloatTree">
+                <node id="x4320379856">
+                    <meta xsi:type="nex:LiteralMeta" property="dendropy:pop_size" content="5491" id="meta4320379280" />
+                </node>
+                <node id="x4320379984" otu="x4320380112">
+                    <meta xsi:type="nex:LiteralMeta" property="dendropy:pop_size" content="2721" id="meta4320379408" />
+                </node>
+                <node id="x4320380176">
+                    <meta xsi:type="nex:LiteralMeta" property="dendropy:pop_size" content="4627" id="meta4320379344" />
+                </node>
+                <node id="x4320380304" otu="x4320380432">
+                    <meta xsi:type="nex:LiteralMeta" property="dendropy:pop_size" content="7202" id="meta4320381456" />
+                </node>
+                <node id="x4320380496">
+                    <meta xsi:type="nex:LiteralMeta" property="dendropy:pop_size" content="5337" id="meta4320379664" />
+                </node>
+                <node id="x4320380624" otu="x4320380752">
+                    <meta xsi:type="nex:LiteralMeta" property="dendropy:pop_size" content="1478" id="meta4320381520" />
+                </node>
+                <node id="x4320380816">
+                    <meta xsi:type="nex:LiteralMeta" property="dendropy:pop_size" content="1539" id="meta4320379728" />
+                </node>
+                <node id="x4320380944" otu="x4320381072">
+                    <meta xsi:type="nex:LiteralMeta" property="dendropy:pop_size" content="3457" id="meta4320381584" />
+                </node>
+                <node id="x4320381136" otu="x4320381264">
+                    <meta xsi:type="nex:LiteralMeta" property="dendropy:pop_size" content="3895" id="meta4320381648" />
+                </node>
+                <rootedge id="x4320379920" target="x4320379856" />
+                <edge id="x4320380048" source="x4320379856" target="x4320379984" />
+                <edge id="x4320380240" source="x4320379856" target="x4320380176" />
+                <edge id="x4320380368" source="x4320380176" target="x4320380304" />
+                <edge id="x4320380560" source="x4320380176" target="x4320380496" />
+                <edge id="x4320380688" source="x4320380496" target="x4320380624" />
+                <edge id="x4320380880" source="x4320380496" target="x4320380816" />
+                <edge id="x4320381008" source="x4320380816" target="x4320380944" />
+                <edge id="x4320381200" source="x4320380816" target="x4320381136" />
+            </tree>
+        </trees>
+    </nex:nexml>
+
+
+By default, the :meth:`~dendropy.datamodel.basemodel.AnnotationSet.add_bound_attribute` method uses the name of the attribute as the name of the annotation.
+The "``annotation_name``" argument allows you explictly set the name of the annotation.
+In addition, the method call also supports the other customization arguments of the :meth:`~dendropy.datamodel.basemodel.AnnotationSet.add_new` method: "``datatype_hint``", "``name_prefix``", "``namespace``", "``name_is_prefixed``", "``annotate_as_reference``", "``is_hidden``", etc.::
+
+    >>> tree.source_uri = None
+    >>> tree.annotations.add_bound_attribute(
+    ... "source_uri",
+    ... annotation_name="dc:subject",
+    ... namespace="http://purl.org/dc/elements/1.1/",
+    ... annotate_as_reference=True)
+
+Adding Citation Metadata
+------------------------
+
+You can add citation annotations using the :meth:`~dendropy.datamodel.basemodel.AnnotationSet.add_citation` method.
+This method takes at least one argument, ``citation``.
+This can be a string representing the citation as a BibTex record or a dictionary with BibTex fields as keys and field content as values.
+
+For example:
+
+.. literalinclude:: /examples/bibtex_annotations1.py
+
+will result in::
+
+    <?xml version="1.0" encoding="ISO-8859-1"?>
+    <nex:nexml
+        version="0.9"
+        xsi:schemaLocation="http://www.nexml.org/2009"
+        xmlns:bibtex="http://www.edutella.org/bibtex#"
+        xmlns="http://www.nexml.org/2009"
+        xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+        xmlns:xml="http://www.w3.org/XML/1998/namespace"
+        xmlns:nex="http://www.nexml.org/2009"
+        xmlns:dendropy="http://packages.python.org/DendroPy/"
+    >
+        <meta xsi:type="nex:LiteralMeta" property="bibtex:journal" content="Molecular Biology and Evolution" datatype="xsd:string" id="meta4320453648" />
+        <meta xsi:type="nex:LiteralMeta" property="bibtex:bibtype" content="article" datatype="xsd:string" id="meta4320453200" />
+        <meta xsi:type="nex:LiteralMeta" property="bibtex:number" content="3" datatype="xsd:string" id="meta4320453776" />
+        <meta xsi:type="nex:LiteralMeta" property="bibtex:citekey" content="HeathHH2012" datatype="xsd:string" id="meta4320453328" />
+        <meta xsi:type="nex:LiteralMeta" property="bibtex:pages" content="939-955" datatype="xsd:string" id="meta4320453968" />
+        <meta xsi:type="nex:LiteralMeta" property="bibtex:volume" content="29" datatype="xsd:string" id="meta4320453840" />
+        <meta xsi:type="nex:LiteralMeta" property="bibtex:year" content="2012" datatype="xsd:string" id="meta4320453904" />
+        <meta xsi:type="nex:LiteralMeta" property="bibtex:doi" content="10.1093/molbev/msr255" datatype="xsd:string" id="meta4320453456" />
+        <meta xsi:type="nex:LiteralMeta" property="bibtex:title" content="A {Dirichlet} Process Prior for Estimating Lineage-Specific Substitution Rates." datatype="xsd:string" id="meta4320453520" />
+        <meta xsi:type="nex:LiteralMeta" property="bibtex:url" content="http://mbe.oxfordjournals.org/content/early/2011/11/04/molbev.msr255.abstract" datatype="xsd:string" id="meta4320453584" />
+        <meta xsi:type="nex:LiteralMeta" property="bibtex:author" content="Tracy A. Heath and Mark T. Holder and John P. Huelsenbeck" datatype="xsd:string" id="meta4320453712" />
+        .
+        .
+        .
+
+
+The following results in the same output as above, but the citation is given as a dictionary with BibTex fields as keys and content as values:
+
+.. literalinclude:: /examples/bibtex_annotations2.py
+
+By default, the citation gets annotated as a series of separate BibTex elements.
+You can specify alternate formats by using the "``store_as``" argument.
+This argument can take one of the following values:
+
+    - "``bibtex``"
+        Each BibTex field gets recorded as a separate annotation, with name
+        given by the field name, content by the field value.
+        This is the default, and the results in NeXML are shown above.
+
+    - "``dublin``"
+        A subset of the BibTex fields gets recorded as a set of Dublin Core (Publishing Requirements for Industry Standard Metadata) annotations, one per field::
+
+        <meta xsi:type="nex:LiteralMeta" property="dc:date" content="2012" datatype="xsd:string" id="meta4320461584" />
+        <meta xsi:type="nex:LiteralMeta" property="dc:publisher" content="Molecular Biology and Evolution" datatype="xsd:string" id="meta4320461648" />
+        <meta xsi:type="nex:LiteralMeta" property="dc:title" content="A {Dirichlet} Process Prior for Estimating Lineage-Specific Substitution Rates." datatype="xsd:string" id="meta4320461776" />
+        <meta xsi:type="nex:LiteralMeta" property="dc:creator" content="Tracy A. Heath and Mark T. Holder and John P. Huelsenbeck" datatype="xsd:string" id="meta4320461712" />
+
+    - "``prism``"
+        A subset of the BibTex fields gets recorded as a set of PRISM (Publishing Requirements for Industry Standard Metadata) annotations, one per field::
+
+        <meta xsi:type="nex:LiteralMeta" property="prism:volume" content="29" datatype="xsd:string" id="meta4320461584" />
+        <meta xsi:type="nex:LiteralMeta" property="prism:pageRange" content="939-955" datatype="xsd:string" id="meta4320461648" />
+        <meta xsi:type="nex:LiteralMeta" property="prism:publicationDate" content="2012" datatype="xsd:string" id="meta4320461776" />
+        <meta xsi:type="nex:LiteralMeta" property="prism:publicationName" content="Molecular Biology and Evolution" datatype="xsd:string" id="meta4320461712" />
+
+
+In addition, the method call also supports some of the other customization arguments of the :meth:`~dendropy.datamodel.basemodel.AnnotationSet.add_new` method:  "``name_prefix``", "``namespace``", "``name_is_prefixed``", "``is_hidden``".
+
+Copying Metadata Annotations from One Phylogenetic Data Object to Another
+-------------------------------------------------------------------------
+
+As the :class:`~dendropy.datamodel.basemodel.AnnotationSet` is derived from :class:`dendropy.utility.containers.OrderedSet`, it has the :meth:`dendropy.utility.containers.OrderedSet.add` and :meth:`dendropy.utility.containers.OrderedSet.update` methods available for direct addition of :class:`~dendropy.datamodel.basemodel.Annotation` objects.
+The following example shows how to add metadata annotations associated with a |DataSet| object to all its |Tree| objects::
+
+    import dendropy
+    ds = dendropy.DataSet.get_from_path("sample1.xml",
+            "nexml")
+    ds_annotes = ds.annotations.findall(name_prefix="dc").values_as_dict()
+    for tree_list in ds.tree_lists:
+        for tree in tree_list:
+            tree.annotations.update(ds_annotes)
+
+Or, alternatively::
+
+    import dendropy
+    ds = dendropy.DataSet.get_from_path("sample1.xml",
+            "nexml")
+    ds_annotes = ds.annotations.findall(name_prefix="dc").values_as_dict()
+    for tree_list in ds.tree_lists:
+        for tree in tree_list:
+            for a in ds_annotes:
+                tree.annotations.add(a)
+
+
+Metadata Annotation Access and Manipulation
+===========================================
+
+Iterating Over Collections of Annotations
+-----------------------------------------
+
+The collection of :class:`~dendropy.datamodel.basemodel.Annotation` objects representing metadata annotations associated with particular phylgoenetic data objects can be accessed through the :attr:`annotations` attribute of each particular object.
+
+For example::
+
+    #! /usr/bin/env python
+    ds = dendropy.DataSet.get_from_path("pythonidae.annotated.nexml",
+    "nexml")
+    for a in ds.annotations:
+        print "The dataset has metadata annotation '%s' with content '%s'" % (a.name, a.value)
+    tree = ds.tree_lists[0][0]
+    for a in tree.annotations:
+        print "Tree '%s' has metadata annotation '%s' with content '%s'" % (tree.label, a.name, a.value)
+
+will result in::
+
+    The dataset has metadata annotation 'description' with content 'composite dataset of Pythonid sequences and trees'
+    The dataset has metadata annotation 'subject' with content 'Pythonidae'
+    Tree '0' has metadata annotation 'treeEstimator' with content 'RAxML'
+    Tree '0' has metadata annotation 'substitutionModel' with content 'GTR+G+I'
+
+Retrieving Annotations By Search Criteria
+-----------------------------------------
+
+Instead of interating through every element in the :attr:`annotations` attribute of data objects, you can use the :meth:`~dendropy.datamodel.basemodel.AnnotationSet.findall` method of the the :attr:`annotations` object to return a *collection* of :class:`~dendropy.datamodel.basemodel.Annotation` objects that match the search or filter criteria specified in keyword arguments to the :meth:`~dendropy.datamodel.basemodel.AnnotationSet.findall` call.
+These keyword arguments should specify attributes of :class:`~dendropy.datamodel.basemodel.Annotation` and the corresponding value to be matched.
+Multiple keyword-value pairs can be specified, and only :class:`~dendropy.datamodel.basemodel.Annotation` objects that match *all* the criteria will be returned.
+
+For example, the following returns a collection of annotations that have a name of "contributor"::
+
+    import dendropy
+    ds = dendropy.DataSet.get_from_path("sample1.xml",
+            "nexml")
+    results = ds.annotations.findall(name="contributor")
+    for a in results:
+        print "%s='%s'" % (a.name, a.value)
+
+and will result in::
+
+    contributor='Dahlgren T.G.'
+    contributor='Baco A.'
+    contributor='Smith C.'
+    contributor='Glover A.'
+    contributor='Altamira I.V.'
+    contributor='Wiklund H.'
+
+While the following returns a collection of annotations that are in the Dublin Core namespace::
+
+    import dendropy
+    ds = dendropy.DataSet.get_from_path("sample1.xml",
+            "nexml")
+    results = ds.annotations.findall(namespace="http://purl.org/dc/elements/1.1/")
+    for a in results:
+        print "%s='%s'" % (a.name, a.value)
+
+and results in::
+
+    subject='wood-fall'
+    contributor='Wiklund H.'
+    publisher='Systematics and Biodiversity'
+    subject='whale-fall'
+    contributor='Dahlgren T.G.'
+    contributor='Smith C.'
+    date='2012-06-04'
+    subject='polychaeta'
+    contributor='Glover A.'
+    subject='Ophryotrocha'
+    title='Systematics and biodiversity of Ophryotrocha (Annelida, Dorvilleidae) with descriptions of six new species from deep-sea whale-fall and wood-fall habitats in the north-east Pacific'
+    subject='New species'
+    subject='molecular phylogeny'
+    contributor='Altamira I.V.'
+    creator='Wiklund H., Altamira I.V., Glover A., Smith C., Baco A., & Dahlgren T.G.'
+    contributor='Baco A.'
+
+The following, in turn, searches for and suppresses printing of annotations that have a name prefix of "dc" *and* have empty values::
+
+    import dendropy
+    ds = dendropy.DataSet.get_from_path("sample1.xml",
+            "nexml")
+    results = ds.annotations.findall(name_prefix="dc", value="")
+    for a in results:
+        a.is_hidden = True
+
+Modifying the :class:`~dendropy.datamodel.basemodel.Annotation` objects in a returned collection modifies the metadata of the parent data object. For example, the following sets all the field values to upper case characters::
+
+    import dendropy
+    ds = dendropy.DataSet.get_from_path("sample1.xml",
+            "nexml")
+    results = ds.annotations.findall(name="contributor")
+    for a in results:
+        a.value = a.value.upper()
+    results = ds.annotations.findall(name="contributor")
+    for a in results:
+        print a.value
+
+and results in::
+
+    DAHLGREN T.G.
+    BACO A.
+    SMITH C.
+    GLOVER A.
+    ALTAMIRA I.V.
+    WIKLUND H.
+
+The collection returned by the :meth:`~dendropy.datamodel.basemodel.AnnotationSet.findall` method is an object of type :class:`~dendropy.datamodel.basemodel.AnnotationSet`.
+However, while modifying :class:`~dendropy.datamodel.basemodel.Annotation` objects in this collection will result in the metadata of the parent object being modified (as in the previous example), adding new annotations to this returned collection will *not*  add them to the collection of metadata annotations of the parent object.
+Thus, the following example shows that the size of the annotations collection associated with the dataset is unchanged by adding new annotations to the results of a :meth:`~dendropy.datamodel.basemodel.AnnotationSet.findall` call::
+
+    import dendropy
+    ds = dendropy.DataSet.get_from_path("sample1.xml",
+            "nexml")
+    print len(ds.annotations)
+    results = ds.annotations.findall(namespace="http://purl.org/dc/elements/1.1/")
+    results.add_new(name="color", value="blue")
+    results.add_new(name="height", value="100")
+    results.add_new(name="length", value="200")
+    results.add_new(name="width", value="50")
+    print len(ds.annotations)
+
+The above produces::
+
+    30
+    30
+
+As can be seen, no new annotations are added to the data set metadata.
+
+If no matching :class:`~dendropy.datamodel.basemodel.Annotation` objects are found then the :class:`~dendropy.datamodel.basemodel.AnnotationSet` that is returned is empty.
+
+If *no* keyword arguments are passed to :meth:`~dendropy.datamodel.basemodel.Annotation.findall`, then *all* annotations are returned::
+
+    import dendropy
+    ds = dendropy.DataSet.get_from_path("sample1.xml",
+            "nexml")
+    results = ds.annotations.findall()
+    print len(results) == len(ds.annotations)
+
+The above produces::
+
+    True
+
+Retrieving a Single Annotation By Search Criteria
+-------------------------------------------------
+
+The :meth:`~dendropy.datamodel.basemodel.AnnotationSet.find` method of the the :attr:`annotations` object return a the *first* :class:`~dendropy.datamodel.basemodel.Annotation` object that matches the search or filter criteria specified in keyword arguments to the :meth:`~dendropy.datamodel.basemodel.AnnotationSet.findall` call.
+These keyword arguments should specify attributes of :class:`~dendropy.datamodel.basemodel.Annotation` and the corresponding value to be matched.
+Multiple keyword-value pairs can be specified, and only the first :class:`~dendropy.datamodel.basemodel.Annotation` object that matches *all* the criteria will be returned.
+
+For example::
+
+    import dendropy
+    ds = dendropy.DataSet.get_from_path("sample1.xml",
+            "nexml")
+    print ds.annotations.find(name="contributor")
+
+and will result in::
+
+    contributor='Dahlgren T.G.'
+
+While the following returns the first annotation in the Dublin Core namespace::
+
+    import dendropy
+    ds = dendropy.DataSet.get_from_path("sample1.xml",
+            "nexml")
+    print ds.annotations.find(namespace="http://purl.org/dc/elements/1.1/")
+
+and results in::
+
+    subject='wood-fall'
+
+If no matching :class:`~dendropy.datamodel.basemodel.Annotation` objects are found then a default of |None| is returned::
+
+    >>> print ds.annotations.find(name="author")
+    None
+
+Unlike :meth:`~dendropy.datamodel.basemodel.AnnotationSet.findall`, it is invalid to call :meth:`~dendropy.datamodel.basemodel.AnnotationSet.find` with no search criteria keyword arguments, and an ``TypeError`` exception will be raised.
+
+Retrieving the Value of a Single Annotation
+-------------------------------------------
+
+For convenience, the :meth:`~dendropy.datamodel.basemodel.AnnotationSet.get_value`, method is provided.
+This will search the :class:`~dendropy.datamodel.basemodel.AnnotationSet` for the *first* :class:`~dendropy.datamodel.basemodel.Annotation` that has its name field equal to the first argument passed to the :meth:`~dendropy.datamodel.basemodel.AnnotationSet.get_value` method, and return its value.
+If no match is found, the second argument is returned (or |None|, if no second argument is specified).
+Examples::
+
+    >>> print tree.annotations.get_value("subject")
+    molecular phylogeny
+    >>> print tree.annotations.get_value("creator")
+    Yoder A.D., & Yang Z.
+    >>> print tree.annotations.get_value("generator")
+    None
+    >>> print tree.annotations.get_value("generator", "unspecified")
+    unspecified
+
+Transforming Annotations to a Dictionary
+----------------------------------------
+
+In some applications, it might be more convenient to work with dictionaries rather than :class:`~dendropy.datamodel.basemodel.AnnotationSet` objects.
+The :meth:`~dendropy.datamodel.basemodel.Annotation.values_as_dict` methods creates a dictionary populated with key-value pairs from the collection.
+By default, the keys are the ``name`` attribute of the :class:`~dendropy.datamodel.basemodel.Annotation` object and the values are the ``value`` attribute.
+Thus, the following::
+
+    import dendropy
+    ds = dendropy.DataSet.get_from_path("sample1.xml",
+            "nexml")
+    a = ds.annotations.values_as_dict()
+    print a
+
+results in::
+
+    {'volume': '',
+    'doi': '',
+    'date': '2012-06-04',
+    'bibliographicCitation': 'Wiklund H., Altamira I.V., Glover A., Smith C., Baco A., & Dahlgren T.G. 2012. Systematics and biodiversity of Ophryotrocha (Annelida, Dorvilleidae) with descriptions of six new species from deep-sea whale-fall and wood-fall habitats in the north-east Pacific. Systematics and Biodiversity, .',
+    'changeNote': 'Generated on Wed Jun 06 11:02:45 EDT 2012',
+    'creator': 'Wiklund H., Altamira I.V., Glover A., Smith C., Baco A., & Dahlgren T.G.',
+    'section': 'Study',
+    'title': 'Systematics and biodiversity of Ophryotrocha (Annelida, Dorvilleidae) with descriptions of six new species from deep-sea whale-fall and wood-fall habitats in the north-east Pacific',
+    'publisher': 'Systematics and Biodiversity',
+    'identifier.study.tb1': None,
+    'number': '',
+    'identifier.study': '12713',
+    'modificationDate': '2012-06-04',
+    'historyNote': 'Mapped from TreeBASE schema using org.cipres.treebase.domain.nexus.nexml.NexmlDocumentWriter at 645f9132 $Rev: 1060 $',
+    'publicationDate': '2012',
+    'contributor': 'Wiklund H.',
+    'publicationName': 'Systematics and Biodiversity',
+    'creationDate': '2012-05-09',
+    'title.study': 'Systematics and biodiversity of Ophryotrocha (Annelida, Dorvilleidae) with descriptions of six new species from deep-sea whale-fall and wood-fall habitats in the north-east Pacific',
+    'subject': 'molecular phylogeny'}
+
+Note that no attempt is made to prevent or account for key collision: :class:`~dendropy.datamodel.basemodel.Annotation` with the same name value will overwrite each other in the dictionary.
+Custom control of the dictionary key/value generation can be specified via keyword arguments:
+
+    ``key_attr``
+        String specifying an Annotation object attribute name to be used
+        as keys for the dictionary.
+
+    ``key_func``
+        Function that takes an Annotation object as an argument and returns
+        the value to be used as a key for the dictionary.
+
+    ``value_attr``
+        String specifying an Annotation object attribute name to be used
+        as values for the dictionary.
+
+    ``value_func``
+        Function that takes an Annotation object as an argument and returns
+        the value to be used as a value for the dictionary.
+
+For example::
+
+    import dendropy
+    ds = dendropy.DataSet.get_from_path("sample1.xml",
+            "nexml")
+    a = ds.annotations.values_as_dict(key_attr="prefixed_name")
+    a = ds.annotations.values_as_dict(key_attr="prefixed_name", value_attr="namespace")
+    a = ds.annotations.values_as_dict(key_func=lambda a: a.namespace + a.name)
+    a = ds.annotations.values_as_dict(key_func=lambda a: a.namespace + a.name,
+            value_attr="value")
+
+
+As the collection returned by the :meth:`~dendropy.datamodel.basemodel.AnnotationSet.findall` method is an object of type :class:`~dendropy.datamodel.basemodel.AnnotationSet`, this can also be transformed to a dictionary.
+For example::
+
+    import dendropy
+    ds = dendropy.DataSet.get_from_path("sample1.xml",
+            "nexml")
+    a = ds.annotations.findall(name_prefix="dc").values_as_dict()
+    print a
+
+will result in::
+
+    {'publisher': 'Systematics and Biodiversity',
+    'creator': 'Wiklund H., Altamira I.V., Glover A., Smith C., Baco A., & Dahlgren T.G.',
+    'title': 'Systematics and biodiversity of Ophryotrocha (Annelida, Dorvilleidae) with descriptions of six new species from deep-sea whale-fall and wood-fall habitats in the north-east Pacific',
+    'date': '2012-06-04',
+    'contributor': 'Baco A.',
+    'subject': 'molecular phylogeny'}
+
+Note how only one entry for "contributor" is present: the others were overwritten/replaced.
+
+Adding to, deleting, or modifying either the keys or the values of the dictionary returned by :meth:`~dendropy.datamodel.basemodel.Annotation.values_as_dict` in *no way* changes any of the original metadata: it is serves as snapshot copy of literal values of the metadata.
+
+
+Deleting or Removing Metadata Annotations
+-----------------------------------------
+
+The :meth:`~dendropy.datamodel.basemodel.AnnotationSet.drop` method of :class:`~dendropy.datamodel.basemodel.AnnotationSet` objects takes search criteria similar to :meth:`~dendropy.datamodel.basemodel.AnnotationSet.findall`, but instead of returning the matched  :class:`~dendropy.datamodel.basemodel.Annotation` objects, it *removes* them from the parent collection.
+For example, the following removes all metadata annotations with the name prefix "dc" from the |DataSet| object ``ds``::
+
+    import dendropy
+    ds = dendropy.DataSet.get_from_path("sample1.xml",
+            "nexml")
+    print "Original: %d items" % len(ds.annotations)
+    removed = ds.annotations.drop(name_prefix="dc")
+    print "Removed: %d items" % len(removed)
+    print "Current: %d items" % len(ds.annotations)
+
+and results in::
+
+    Original: 30 items
+    Removed: 16 items
+    Current: 14 items
+
+As can be seen, the :meth:`~dendropy.datamodel.basemodel.AnnotationSet.drop` method returns the individual :class:`~dendropy.datamodel.basemodel.Annotation` removed as a new :class:`~dendropy.datamodel.basemodel.AnnotationSet` collection.
+This is useful if you still want to use the removed :class:`~dendropy.datamodel.basemodel.Annotation` objects elsewhere.
+
+As with the :meth:`~dendropy.datamodel.basemodel.AnnotationSet.findall` method, multiple keyword criteria can be specified::
+
+    import dendropy
+    ds = dendropy.DataSet.get_from_path("sample1.xml",
+            "nexml")
+    ds.annotations.drop(name_prefix="dc", name="contributor")
+
+In addition, again similar in behavior to the :meth:`~dendropy.datamodel.basemodel.AnnotationSet.findall` method, *no* keyword arguments result in *all* the annotations being removed.
+Thus, the following results in all metadata annotations being deleted from the |DataSet| object ``ds``::
+
+    import dendropy
+    ds = dendropy.DataSet.get_from_path("sample1.xml",
+        "nexml")
+    print "Original: %d items" % len(ds.annotations)
+    removed = ds.annotations.drop()
+    print "Removed: %d items" % len(removed)
+    print "Current: %d items" % len(ds.annotations)
+
+and results in::
+
+    Original: 30 items
+    Removed: 30 items
+    Current: 0 items
+
+Writing or Saving Metadata
+==========================
+
+When writing to NeXML format, all metadata annotations are preserved and can be fully round-tripped.
+Currently, this is the only data format that allows for robust treatment of metadata.
+
+Due to the fundamental limitations of the NEXUS/Newick format, metadata handling in this format is limited and rather idiosyncratic.
+Currently, metadata will be written out as name-value pairs (separated by "=") in ampersand-prepended comments associated with the particular phylogenetic data object.
+This syntax corresponds to the BEAST or FigTree style of metadata annotation.
+However, this association might not be preserved.
+For example, metadata annotations associated with edges and nodes of trees will be written out fully in NEXUS and NEWICK formats, but when read in again will all be associated with nodes.
+The keyword argument ``annotations_as_nhx=True`` passed to the call to write the data in NEXUS/NEWICK format will result in a double ampersand prefix to the comment, thus (partially) conforming to NHX specifications.
+Metadata associated with |DataSet| objects will be written in out in the same BEAST/FigTree/NHX syntax at the top of the file, while metadata associated with |TaxonNamespace| and |Taxon| objects will be written out immediately after the start of the Taxa Block and taxon labels respectively.
+This is very fragile: for example, a metadata annotation *before* a taxon label will be associated with the *previous* taxon when being read in again.
+As noted above, if metadata annotations are important for yourself, your workflow, or your task, then the NeXML format should be used rather than NEXUS or NEWICK.
+
diff --git a/doc/source/programs/index.rst b/doc/source/programs/index.rst
new file mode 100644
index 0000000..2369954
--- /dev/null
+++ b/doc/source/programs/index.rst
@@ -0,0 +1,8 @@
+#########################
+Programs and Applications
+#########################
+
+.. toctree::
+    :maxdepth: 4
+
+    sumtrees
diff --git a/doc/source/programs/sumtrees.rst b/doc/source/programs/sumtrees.rst
new file mode 100644
index 0000000..d73e945
--- /dev/null
+++ b/doc/source/programs/sumtrees.rst
@@ -0,0 +1,567 @@
+########################################################
+SumTrees: Phylogenetic Tree Summarization and Annotation
+########################################################
+
+Introduction
+============
+
+`SumTrees <sumtrees.html>`_ is a program by |js|_ and |mth|_ to summarize non-parameteric bootstrap or Bayesian posterior probability support for splits or clades on phylogenetic trees.
+
+The basis of the support assessment is typically given by a set of non-parametric bootstrap replicate tree samples produced by programs such as GARLI or RAxML, or by a set of MCMC tree samples produced by programs such as Mr. Bayes or BEAST.
+The proportion of trees out of the samples in which a particular split is found is taken to be the degree of support for that split as indicated by the samples.
+The samples that are the basis of the support can be distributed across multiple files, and a burn-in option allows for an initial number of trees in each file to be excluded from the analysis if they are not considered to be drawn from the true support distribution.
+
+The support for the splits will be mapped onto one or more target trees either in terms of node labels or branch lengths.
+The target trees can be supplied by yourself, or, if no target trees are given, then a
+a *summary* tree can be constructed.
+This summary tree can be a Maximum Clade Credibility Topology (i.e., MCCT, a topology that maximizes the product of the the clade posterior probabilities), the majority-rule clade consensus tree, or some other type.
+If a majority-rule consensus tree is selected, you have the option of specifying the minimum posterior probability or proportional frequency threshold for a clade to be included on the consensus tree.
+
+By default SumTrees will provide summaries of edge lengths (i.e., mean, median, standard deviation, range, 95% HPD, 5% and 95% quantiles, etc.) as special node comments. These can be visualized in `FigTree <http://tree.bio.ed.ac.uk/software/figtree/>`_ by, for example, checking "Node Labels", then selecting one of "length_mean", "length_median", "length_sd", "length_hpd95", etc.
+If the trees are ultrametric and the "``--summarize-node-ages``" flag is used, or edge lengths are set so that node ages on the output trees correspond to be mean or median of the node ages of the input trees ("``--edges=mean-age``" or "``--edges=median-age``"), then node ages will be summarized as well. In all cases, the flag "``--suppress-annotations``" will suppress calculation and output of these summaries.
+
+If you are processing multiple source files and you have multiple cores available on your machine, you can specify the "``-M``" flag to use all the cores or, e.g., "``-m 4``" to use 4 cores.
+Using multiple cores will, of course, speed up processing of your files.
+
+Where to Find the Package
+=========================
+
+SumTrees is distributed and installed as part of the |DendroPy|_ phylogenetic computing library, which has its homepage here:
+
+        |dendropy_homepage_url|
+
+with the main download page here:
+
+        |dendropy_download_url|
+
+How to Install the Package
+==========================
+
+DendroPy is fully easy-installable and can be installed using |pip|_::
+
+    $ sudo pip install -U dendropy
+
+If you do not have |pip|_ installed, you should *definitely* `install it <https://pip.pypa.io/en/latest/installing.html>`_ !
+Note: the "``sudo``" command should only be used if installing system-wide on a machine on which you have administrative privileges. Otherwise, you would use the "``--user``" flag for a local user install::
+
+    $ pip install --user -U dendropy
+
+These, and other ways of obtaining and installing DendroPy (e.g., by downloading the |dendropy_source_archive|_, or by cloning the `DendroPy Git repository <http://github.com/jeetsukumaran/DendroPy>`_), are discussed in detail in the ":doc:`/downloading`" section.
+
+Checking the Installation
+=========================
+
+If the installation was successful, you should be able to type "``sumtrees.py``" in the terminal window and see something like the following (with possibly a different date or version number)::
+
+    /==========================================================================\
+    |                                 SumTrees                                 |
+    |                     Phylogenetic Tree Summarization                      |
+    |                       Version 4.0.0 (Jan 31 2015)                        |
+    |                   By Jeet Sukumaran and Mark T. Holder                   |
+    |    Using: DendroPy 4.0.0.dev (DendroPy4-feea2b0, 2015-06-02 20:13:49)    |
+    +--------------------------------------------------------------------------+
+    |                                 Citation                                 |
+    |                                 ~~~~~~~~                                 |
+    | If any stage of your work or analyses relies on code or programs from    |
+    | this library, either directly or indirectly (e.g., through usage of your |
+    | own or third-party programs, pipelines, or toolkits which use, rely on,  |
+    | incorporate, or are otherwise primarily derivative of code/programs in   |
+    | this library), please cite:                                              |
+    |                                                                          |
+    |   Sukumaran, J and MT Holder. 2010. DendroPy: a Python library for       |
+    |     phylogenetic computing. Bioinformatics 26: 1569-1571.                |
+    |                                                                          |
+    |   Sukumaran, J and MT Holder. SumTrees: Phylogenetic Tree Summarization. |
+    |     4.0.0 (Jan 31 2015). Available at                                    |
+    |     https://github.com/jeetsukumaran/DendroPy.                           |
+    |                                                                          |
+    | Note that, in the interests of scientific reproducibility, you should    |
+    | describe in the text of your publications not only the specific          |
+    | version of the SumTrees program, but also the DendroPy library used in   |
+    | your analysis. For your information, you are running DendroPy            |
+    | 4.0.0.dev (DendroPy4-feea2b0, 2015-06-02 20:13:49).                      |
+    \==========================================================================/
+
+    usage: sumtrees.py [-i FORMAT] [-b BURNIN] [--force-rooted] [--force-unrooted]
+                    [-v] [--weighted-trees] [--preserve-underscores]
+                    [--taxon-name-file FILEPATH] [-t FILE] [-s SUMMARY-TYPE]
+                    [-f #.##] [--allow-unknown-target-tree-taxa]
+                    [--root-target-at-outgroup TAXON-LABEL]
+                    [--root-target-at-midpoint] [--set-outgroup TAXON-LABEL]
+                    [-e STRATEGY]
+                    [--force-minimum-edge-length FORCE_MINIMUM_EDGE_LENGTH]
+                    [--collapse-negative-edges] [--summarize-node-ages]
+                    [-l {support,keep,clear}] [--suppress-annotations] [-p]
+                    [-d #] [-o FILEPATH] [-F {nexus,newick,phylip,nexml}]
+                    [-x PREFIX] [--no-taxa-block]
+                    [--no-analysis-metainformation] [-c ADDITIONAL_COMMENTS]
+                    [-r] [-M] [-m NUM-PROCESSES] [-g LOG-FREQUENCY] [-q]
+                    [--ignore-missing-support] [-h] [--citation]
+                    [--usage-examples] [--describe]
+                    [TREE-FILEPATH [TREE-FILEPATH ...]]
+
+    Type 'sumtrees.py --help' for details on usage.
+    Type 'sumtrees.py --usage-examples' for examples of usage.
+
+You can now delete the original downloaded archive and unpacked directory if you want.
+
+How to Use the Program
+======================
+
+SumTrees is typically invoked by providing it a list of one or more tree files to be summarized::
+
+    $ sumtrees.py [OPTIONS] <TREEFILE> [<TREEFILE> [<TREEFILE> ...]]]
+
+Common options include specification of a target topology onto which to map the support ("``-t``" or "``--target``"), a summary tree to use (e.g., "``-s mcct``" or "``--summary-target=mcct``" an output file ("``-o``" or "``--output``"), and a burn-in value ("``-b``" or "``--burnin``").
+
+Full help on program usage and options is given by using the "``--help``" option::
+
+    $ sumtrees.py --help
+
+
+Quick Recipes
+=============
+
+Summarization of Posterior Probabilities of Clades with a Consensus Tree
+------------------------------------------------------------------------
+Summarize a set of tree files using a 95% rule consensus tree, with support for clades indicated as proportions (posterior probabilities) using branch labels, and branch lengths being the mean across all trees, dropping the first 200 trees in each file as a burn-in, and saving the result to "``result.tre``"::
+
+    $ sumtrees.py --min-clade-freq=0.95 --burnin=200 --output-tree-filepath=result.tre treefile1.tre treefile2.tre treefile3.tre
+    $ sumtrees.py -f0.95 -b200 -o result.tre treefile1.tre treefile2.tre treefile3.tre
+
+Summarization of Posterior Probabilities of Clades with a Maximum Clade Credibility Tree (MCCT)
+-----------------------------------------------------------------------------------------------
+
+Summarize a set of tree files using a tree in the input set that maximizes the product of clade support, with support for clades indicated as proportions (posterior probabilities) using branch labels, and branch lengths the mean across all trees, dropping the first 200 trees in each file as a burn-in, and saving the result to "``result.tre``"::
+
+    $ sumtrees.py --summary-target=mcct --burnin=200 --support-as-labels --output-tree-filepath=result.tre treefile1.tre treefile2.tre treefile3.tre
+    $ sumtrees.py -s mcct -b200 -l -o result.tre treefile1.tre treefile2.tre treefile3.tre
+
+Non-parametric Bootstrap Support of a Model Tree
+------------------------------------------------
+Calculate support for nodes on a specific tree, "``best.tre``" as given by a set of tree files, with support reported as percentages rounded to integers, and saving the result to "``result.tre``"::
+
+    $ sumtrees.py --decimals=0 --percentages --output-tree-filepath=result.tre --target=best.tre treefile1.tre treefile2.tre treefile3.tre
+    $ sumtrees.py -d0 -p -o result.tre -t best.tre treefile1.tre treefile2.tre treefile3.tre
+
+Set Node Ages of Target or Summary Tree(s) to Mean/Median Node Age of Input Trees
+---------------------------------------------------------------------------------
+
+Summarize a set of ultrametric tree files using a 95% majority-rule consensus tree, with support for clades indicated as proportions (posterior probabilities) using branch labels, and branch lengths adjusted so the ages of internal nodes are the mean across all trees, dropping the first 200 trees in each file as a burn-in::
+
+    $ sumtrees.py --min-clade-freq=0.95 --burnin=200 --edges=mean-age --output-tree-filepath=result.tre treefile1.tre treefile2.tre treefile3.tre
+    $ sumtrees.py -f0.95 -b200 -o result.tre -l -e mean-age treefile1.tre treefile2.tre treefile3.tre
+
+To use the median age instead::
+
+    $ sumtrees.py --min-clade-freq=0.95 --burnin=200 --edges=median-age --output-tree-filepath=result.tre treefile1.tre treefile2.tre treefile3.tre
+    $ sumtrees.py -f0.95 -b200 -o result.tre -e median-age treefile1.tre treefile2.tre treefile3.tre
+
+Running in Parallel Mode
+------------------------
+
+Running in parallel mode will analyze each input source in its own independent process, with multiple processes running in parallel.
+Multiprocessing analysis is invoked by adding the "``-m``" or "``--multiprocessing``"  flag to the SumTrees command, and passing in the maximum number of processes to run in parallel.
+For example, if your machine has two cores, and you want to run the previous analyses using both of them, you would specify that SumTrees run in parallel mode with two processes by adding "``-m2``" or "``--multiprocessing=2``" to the SumTrees command invocation::
+
+    $ sumtrees.py --multiprocessing=2 --decimals=0 --percentages --output-tree-filepath=result.tre --target=best.tre treefile1.tre treefile2.tre treefile3.tre
+    $ sumtrees.py -m2 -d0 -p -o result.tre -t best.tre treefile1.tre treefile2.tre treefile3.tre
+    $ sumtrees.py --multiprocessing=2 --min-clade-freq=0.95 --burnin=200 --support-as-labels --output-tree-filepath=result.tre treefile1.tre treefile2.tre treefile3.tre
+    $ sumtrees.py -m2 -f0.95 -b200 -l -o result.tre treefile1.tre treefile2.tre treefile3.tre
+
+You can specify as many processes as you want, up to the total number of tree support files passed as input sources.
+If you want to use *all* the available cores on your machine, you can use the "``-M``" or "``--maximum-multiprocessing``" flag::
+
+    $ sumtrees.py --maximum-multiprocessing --decimals=0 --percentages --output-tree-filepath=result.tre --target=best.tre treefile1.tre treefile2.tre treefile3.tre
+    $ sumtrees.py -M -d0 -p -o result.tre -t best.tre treefile1.tre treefile2.tre treefile3.tre
+
+If you specify fewer processes than input sources, then the files will be cycled through the processes.
+
+Primers and Examples
+====================
+
+At its most basic, you will need to supply SumTrees with the path to one or more tree files in Newick or NEXUS format that you want to summarize::
+
+    $ sumtrees.py phylo.tre
+
+As no target tree was provided and no summary tree type was specified, SumTrees will, by default construct ad 50% majority-rule clade consensus tree of all the trees found in the file "``phylo.tre``" as  the summarization target.
+The internal node labels of the resulting consensus tree will, by default, indicate the proportion of trees in "``phylo.tre``" in which that clade was found, while the branch lengths of the resulting consensus tree being set to the mean of the branch lengths of that clade across all the trees in "``phylo.tre``".
+
+If you have split searches across multiple runs (across, for example, multiple computers, so as to speed up the search time), such that you have multiple tree files ("``phylo.run1.tre``", "``phylo.run2.tre``", "``phylo.run3.tre``", ...), you can instruct SumTrees to consider all these files together when summarizing the support by simply listing them one after another separated by spaces::
+
+    $ sumtrees.py phylo.run1.tre phylo.run2.tre phylo.run3.tre
+
+As before, the above command will construct a 50% majority-rule consensus tree with clade supported indicated by internal node labels and branch lengths being the mean across all trees, but this time it will use all the trees found across all the files listed: "``phylo.run1.tre``", "``phylo.run2.tre``", and "``phylo.run3.tre``".
+
+You will notice that the final resulting tree is displayed to the terminal and not saved anywhere.
+It will probably be more useful if we can save it to a file for visualization for further analysis.
+This can be done in one of two ways, either by redirecting the screen output to a file, using the standard (at least on UNIX and UNIX-like systems) redirection operator, ``>``::
+
+    $ sumtrees.py phylo.tre > phylo.consensus.sumtrees
+
+or by using the or "``--output-tree-filepath``" option::
+
+    $ sumtrees.py --output-tree-filepath=phylo.consensus.sumtrees phylo.tre
+
+If the files are in different directories, or you are not in the same directory as the files, you should use the full directory path specification::
+
+    $ sumtrees.py --output-tree-filepath=/Users/myself/MyProjects/phylo1/final/phylo.consensus.sumtrees /Users/myself/MyProjects/phylo1/phylo.tre
+
+More extended options specify things like: where to save the output (by default it goes to the screen), the topology or tree to which to map the support (user-supplied or consensus tree), the output format (NEXUS by default, but it can also be Newick), whether support is indicated in terms of proportions or percentages etc.
+All of these options are specified on the command line when invoking the program, with multiple options separated by spaces.
+Many of the options have two alternate forms, a long form (a word or phrase preceded by two dashes, e.g., "``--option``") and a short form (a single letter preceded by a single dash, "``-o``").
+The long form of the options needs an equals sign before setting the paramater (e.g., "``--option=1``"), while the short one does not (e.g., "``-o1``" or "``-o 1``").
+Most of the options have default values that will be used if not explicitly set when the program is invoked.
+The order that the options are given does *not* matter, i.e., "``sumtrees.py --option1=something --option2=something``" is the same as "``sumtrees.py --option2=something --option1=something``".
+As mentioned above, full details on these options, their long and short forms, as well as their default values will be given by invoking the program with the "``--help``" or "``-h``" option: "``sumtrees.py --help``".
+
+Specifying and Customization of the Summarization Target
+========================================================
+
+SumTrees maps support values calculated from the input set of trees onto a *target* topology.
+This target topology can be a *summary* topology constructed from the input set of trees based on a strategy specified by the user (using the the "``--summary-target``" or "``-s``" flag to specify, for example, a majority-rule consensus tree or a maximum credbility tree) or a topology provided by the user (using the "``--target-tree-filepath``" or "``-t``" option to provide, e.g., a maximum-likehood estimate or some other topology sourced by other means).
+
+Specifying a Summarization Topology Type
+----------------------------------------
+
+You can specify the type of summary topology onto which the support values are mapped using the "``--summary-target``" or "``-s``" option.
+This option takes one of three values as an argument:
+
++-------------+---------------------------------------------------------------------------------------------------------------------------+
++-------------+---------------------------------------------------------------------------------------------------------------------------+
+| "consensus" | The majority-rule consensus tree (default)                                                                                |
++-------------+---------------------------------------------------------------------------------------------------------------------------+
+| "mcct"      | The Maximum Credibility Tree: the topology from the input set that maximizes the *product* of the support of the clades   |
++-------------+---------------------------------------------------------------------------------------------------------------------------+
+| "msct"      | The Maximum Sum of Credibilities Tree: the topology from the input set that maximizes the *sum* of support of the clades. |
++-------------+---------------------------------------------------------------------------------------------------------------------------+
+
+Majority-Rule Consensus Topology
+................................
+
+For example, say you have completed a 1000-replicate non-parametric analysis of your dataset using a program such as GARLI or RAxML.
+You want to construct a 70% majority-rule consensus tree of the replicates, with support indicated as percentages on the node labels.
+If the bootstrap replicates are in the file "``phylo-boots.tre``", you would then enter the following command::
+
+    $ sumtrees.py --summary-target=consensus --min-clade-freq=0.7 --percentages --decimals=0 phylo-boots.tre
+
+Or, using the short option syntax::
+
+    $ sumtrees.py -s consensus -f0.7 -p -d0 phylo-boots.tre
+
+Here, the "``--min-clade-freq=0.7``" or "``-f0.7``" option lowers the minimum threshold for clade inclusion to 70%.
+If you want a 95% majority-rule consensus tree instead, you would use "``--min-clade-freq=0.95``" or "``-f0.95``".
+The default threshold if none is specified is 0.5 (50%).
+The "``--percentages``" or "``-p``" option instructs SumTrees to report the support in terms of percentages, while the "``--decimals=0``" or "``-d 0``" option instructs SumTrees not to bother reporting any decimals.
+Note that even if you instruct SumTrees to report the support in terms of percentages, the minimum clade inclusion threshold is still given in terms of proportions.
+
+.. note::
+
+    As noted, if no target topology is specified (either using the "``--summary-target``"/"``-s``" or "``--target-tree-filepath``"/"``-t``" options), then SumTrees by default will construct and use a majority-rule consensus topology as a target, and hence the explicit specification of this as a target is not needed. Thus, the following will produce exactly the same results as above::
+
+        $ sumtrees.py --min-clade-freq=0.7 --percentages --decimals=0 phylo-boots.tre
+        $ sumtrees.py -f0.7 -p -d0 phylo-boots.tre
+
+Again, if we want to actually save the results to the file, we should use the "``--output-tree-filepath``" option::
+
+    $ sumtrees.py --output-tree-filepath=phylo-mle-support.sumtrees --min-clade-freq=0.7 --percentages --decimals=0 phylo-boots.tre
+    $ sumtrees.py -o phylo-mle-support.sumtrees -f0.7 --p --d0 phylo-boots.tre
+
+Maximum Clade Credibility Topology
+..................................
+
+The Maximum Clade Credibility Topology, or MCCT, is the topology that maximizes the *product* of the split support. You can use this as the target topology by specifying "``--summary-target=mcct``" or "``-s mcct``"::
+
+    $ sumtrees.py --summary-target=mcct phylo-boots.tre
+    $ sumtrees.py -s mcct phylo-boots.tre
+
+As might be expected, in can be combined with other options. For example, to discard the first 200 trees from each of the input sources and write the results to a file, "``results.tre``::
+
+    $ sumtrees.py --summary-target=mcct --burnin=200 --output-tree-filepath=results.tre treefile1.tre treefile2.tre treefile3.tre
+    $ sumtrees.py -s mcct -b 200 -o results.tre treefile1.tre treefile2.tre treefile3.tre
+
+
+.. .. note::
+
+        Unfortunately, there is a *lot* of confusion regarding the terminology of this topology. The earlier versions of *BEAST* manual described a summary topology they called the "Maximum Clade Credibility Tree" or MCCT, which is the topology amongst the input set that maximized the *sum* of the clade credibilities. Later versions of *BEAST* introduced a summarization approach that used the topology amongst the input set that maximized the *product* of the clade credibilities. In *some [...]
+
+
+Specifying a Custom Topology or Set of Topologies
+-------------------------------------------------
+
+Say you also have a maximum likelihood estimate of the phylogeny, and want to annotate the nodes of the maximum likelihood tree with the proportion of trees out of the bootstrap replicates in which the node is found.
+Then, assuming your maximum likelihood tree is in the file, "``phylo-mle.tre``", and the bootstrap tree file is "``phylo-boots.tre``", you would use the "``--target-tree-filepath``" options, as in the following command::
+
+    $ sumtrees.py --target-tree-filepath=phylo-mle.tre phylo-boots.tre
+
+Here, "``--target-tree-filepath``" specifies the target topology onto which the support will be mapped, while the remaining (unprefixed) argument specifies the tree file that is the source of the support.
+An equivalent form of the same command, using the short option syntax is::
+
+    $ sumtrees.py -t phylo-mle.tre phylo-boots.tre
+
+If you want the support expressed in percentages instead of proportions, and the final tree saved to a file, you would enter::
+
+    $ sumtrees.py --output phylo-mle-support.sumtrees --target-tree-filepath phylo-mle.tre --proportions --decimals=0 phylo-boots.tre
+    $ sumtrees.py -o phylo-mle-support.sumtrees -t phylo-mle.tre -p -d0 phylo-boots.tre
+
+
+Summarizing Rooted and Ultrametric Trees
+----------------------------------------
+
+SumTrees treats all trees as unrooted unless specified otherwise. You can force SumTrees to treat all trees as rooted by passing it the "``--force-rooted``" flag::
+
+    $ sumtrees.py --force-rooted phylo.trees
+
+If the trees are rooted **and** ultrametric, the "``--summarize-node-ages``" flag will result in SumTrees summarizing node age information as well::
+
+    $ sumtrees.py --summarize-node-ages phylo.trees
+
+Summarizing Edge Lengths and Node Ages
+--------------------------------------
+
+If a target topology has been specified using the "``--target-tree-filepath``" or the "``-t``" option, then by default SumTrees retains the edge lengths of the target topologies.
+Otherwise, if the input trees are ultrametric and the "``--summarize-node-ages``" option is given, then by default SumTrees will adjust the edge lengths of the target topology so that the ages of the internal nodes are the mean of the ages of the corresponding nodes in the input set of trees.
+Otherwise, if no target trees are specified and the "``--summarize-node-ages``" is not given, the edge lengths of the target topology will be set to the mean lengths of the corresponding edges of the input set.
+In all cases, these defaults can be overridden by using the "``--set-edges``" or "``-e``" option, which takes one of the following values:
+
+        - ``mean-length``: sets the edge lengths of the target/consensus tree(s) to the mean of the lengths of the corresponding edges of the input trees.
+        - ``median-length``: sets the edge lengths of the target/consensus tree(s) to the median of the lengths of the corresponding edges of the input trees.
+        - ``median-age``: adjusts the edge lengths of the target/consensus tree(s) such that the node ages correspond to the median age of corresponding nodes of the input trees [requires rooted ultrametric trees].
+        - ``mean-age``: adjusts the edge lengths of the target/consensus tree(s) such that the node ages correspond to the mean age of corresponding nodes of the input trees [requires rooted ultrametric trees].
+        - ``support``: edge lengths will be set to the support value for the split represented by the edge.
+        - ``keep``: do not change the existing edge lengths of the target topology.
+        - ``clear``: all edge lengths will be removed
+
+So, for example, to construct a consensus tree of a post-burnin set of ultrametric trees, with the node ages set to the *mean* instead of the median node age::
+
+    $ sumtrees.py --set-edges=mean-age --summarize-node-ages --burnin=200 beast1.trees beast2.trees beast3.trees
+    $ sumtrees.py --e mean-age --summarize-node-ages --b 200 beast1.trees beast2.trees beast3.trees
+
+Or to set the edges of a user-specifed tree to the median edge length of the input trees::
+
+    $ sumtrees.py --set-edges=median-length --target=mle.tre boots1.tre boots2.tre
+    $ sumtrees.py --e median-length -t mle.tre boots1.tre boots2.tre
+
+Rooting the Target Topology
+---------------------------
+
+The following options allow for re-rooting of the target topology or topologies::
+
+  --root-target-at-outgroup TAXON-LABEL
+                        Root target tree(s) using specified taxon as outgroup.
+  --root-target-at-midpoint
+                        Root target tree(s) at midpoint.
+  --set-outgroup TAXON-LABEL
+                        Rotate the target trees such the specified taxon is in
+                        the outgroup position, but do not explicitly change
+                        the target tree rooting.
+
+For example::
+
+    $ sumtrees.py --root-target-at-outgroup Python_regius --target=mle.tre boots1.tre boots2.tre
+    $ sumtrees.py --root-target-at-midpoint -s mcct trees1.tre trees2.tre
+    $ sumtrees.py --set-outgroup Python_regius -s mcct trees1.tre trees2.tre
+
+.. note:
+
+    Note that, under the Nexus/Newick standards, underscores are automatically converted to spaces. So the taxon label in the above specification is read by DendroPy *not* as "Python_regius", but "Python regius". The same holds for the input tree files. If want to suppress this conversion of underscores to spaces, specify the "``--preserve-underscores``" flag::
+
+        $ sumtrees.py --preserve-underscores --root-target-at-outgroup Python_regius --target=mle.tre boots1.tre boots2.tre
+        $ sumtrees.py --preserve-underscores --set-outgroup Python_regius -s mcct trees1.tre trees2.tre
+
+Parallelizing SumTrees
+======================
+
+Basics
+------
+
+If you have multiple input (support) files, you can greatly increase the performance of SumTrees by running it in parallel mode.
+In parallel mode, each input source will be handled in a separate process, resulting in a speed-up linearly proportional to the total number of processes running in parallel.
+At its most basic, running in parallel mode involves nothing more than adding the "``-m``" or "``--multiprocessing`` option to the SumTrees invocation, and passing in the number of parallel processes to run.
+So, for example, if you have four tree files that you want to summarize, and you want to run these using two processes in parallel::
+
+    $ sumtrees.py --multiprocessing=2 -o result.tre phylo.run1.tre phylo.run2.tre phylo.run3.tre phylo.run4.tre
+    $ sumtrees.py -m 2 -o result.tre phylo.run1.tre phylo.run2.tre phylo.run3.tre phylo.run4.tre
+
+Or, to run in four processes simultaneously::
+
+    $ sumtrees.py --multiprocessing=4 phylo.run1.tre phylo.run2.tre phylo.run3.tre phylo.run4.tre
+    $ sumtrees.py -m 4 phylo.run1.tre phylo.run2.tre phylo.run3.tre phylo.run4.tre
+
+If you want to use all the available cores on the current machine, use the "``-M``" or "``--maximum-multiprocessing``" flag::
+
+    $ sumtrees.py --maximum-multiprocessing phylo.run1.tre phylo.run2.tre phylo.run3.tre phylo.run4.tre
+    $ sumtrees.py -M phylo.run1.tre phylo.run2.tre phylo.run3.tre phylo.run4.tre
+
+
+Other options as described above, can, of course be added as needed::
+
+    $ sumtrees.py --multiprocessing=4 --burnin=200 phylo.run1.tre phylo.run2.tre phylo.run3.tre phylo.run4.tre
+    $ sumtrees.py -m 4 -b 200 phylo.run1.tre phylo.run2.tre phylo.run3.tre phylo.run4.tre
+
+    $ sumtrees.py --maximum-multiprocessing --burnin=200 --min-clade-freq=0.75 phylo.run1.tre phylo.run2.tre phylo.run3.tre phylo.run4.tre
+    $ sumtrees.py -M -b 200 -f0.75 phylo.run1.tre phylo.run2.tre phylo.run3.tre phylo.run4.tre
+
+    $ sumtrees.py --maximum-multiprocessing --burnin=200 --min-clade-freq=0.75 --output-tree-filepath=con.tre phylo.run1.tre phylo.run2.tre phylo.run3.tre phylo.run4.tre
+    $ sumtrees.py -M -b 200 -f0.75 -o con.tre phylo.run1.tre phylo.run2.tre phylo.run3.tre phylo.run4.tre
+
+Parallelization Strategy: Deciding on the Number of Processes
+-------------------------------------------------------------
+
+In parallel mode, SumTrees parallelizes by input files, which means that the maximum number of processes that can be run is limited to the number of input files.
+If you have four support tree files as input sources, as in the above example ("``phylo.run1.tre``", "``phylo.run2.tre``","``phylo.run3.tre``", and "``phylo.run4.tre``"), then you can run from 2 up to 4 processes in parallel.
+If you have 40 tree source or support files, then you can run from 2 up to 40 processes in parallel.
+
+Note that there is a difference between the number of *processes* that SumTrees runs, and the number of *processors* or cores available on your machine or in your given hardware context.
+When running SumTrees in parallel mode, you can specify *any* number of parallel *processes*, up to the number of support files, even if the number of processes greatly exceeds the number *processors* available.
+For example, you might have an octo-core machine available, which means that you have 8 processors available.
+If your analysis has 40 independent support files, you can invoke SumTrees with 40 parallel processes::
+
+    $ sumtrees.py --multiprocessing=40 -o result.tre t1.tre ... t40.tre
+
+SumTrees will indeed launch 40 parallel processes to process the files, and it will seem like 40 processes are being executed in parallel.
+In reality, however, the operating system is actually cycling the processes through the available processors in rapid succession, such that, on a nanosecond time-scale, only 8 processes are actually executing simultaneously: one on each of the available cores.
+While your run should complete without problems when oversubscribing the hardware in this way, there is going to be some degree of performance hit due to the overhead involved in managing the cycling of the processes through the processors.
+On some operating systems and hardware contexts, depending on the magnitude of oversubscription, this can be considerable.
+Thus it probably is a good idea to match the number of processes to the number of processors available.
+
+If you do not mind using all the available cores on your machine, you can use the "``--maximum-multiprocessing``" or "``-M``" flag to request this, instead of using a specific number.
+
+Another issue to consider is an even distribution of workload.
+Assuming that each of your input support files have the same number of trees, then it makes sense to specify a number of processes that is a factor of the number of input files.
+So, for example, if you have 8 input files to be summarized, you will get the best performance out of SumTrees by specifying 2, 4, or 8 processes, with the actual number given by the maximum number of processors available or that you want to dedicate to this task.
+
+Running Parallel-Mode SumTrees in a Parallel Environment on a High-Performance Computing (HPC) Cluster
+------------------------------------------------------------------------------------------------------
+
+Unfortunately, the diversity and idiosyncracies in various HPC configurations and set-ups are so great that it is very difficult to provide general recipes on how to run parallel-mode SumTrees in a parallel environment on an HPC cluster.
+However, in all cases, all you really need to do is to set up an appropriate parallel execution environment on the cluster, requesting a specific number of processors from the cluster scheduler software, and then tell SumTrees to run the same number of processes.
+
+For example, if your cluster uses the `Sun Grid Engine <http://gridengine.sunsource.net/>`_ as its scheduler, then you might use a job script such as the following:
+
+    .. parsed-literal::
+
+        # /bin/sh
+        #$ -cwd
+        #$ -V
+        #$ -pe mpi 4
+        sumtrees.py -m4 -o result.con.tre -burnin 100 mcmc1.tre mcmc2.tre mcmc3.tre mcmc4.tre
+
+The "``#$ -pe mpi 4``" line tells the SGE scheduler to allocate four processors to the job in the "``mpi``" parallel environment, while the "``-m4``" part of the SumTrees command tells SumTrees to run 4 processes in parallel.
+
+If you are using PBS/Torque as a scheduler, the equivalent job script might be:
+
+    .. parsed-literal::
+
+        # /bin/sh
+        #PBS -l nodes=2:ncpus=2
+        cd $PBS_O_WORKDIR
+        sumtrees.py -m4 -o result.con.tre -burnin 100 mcmc1.tre mcmc2.tre mcmc3.tre mcmc4.tre
+
+Here, the "``#PBS -l nodes=2:ncpus=2``" requests two processors on two nodes, while the "``-m4``" part of the SumTrees command, as before, tells SumTrees to run 4 processes in parallel.
+
+The particular job scripts you use will almost certainly be different, varying with the cluster job/load management software, scheduler and computing resources.
+Apart from the parallel environment name, number of processors and/or machine configuration, you might also need to provide a queue name, a maximum run time limit, and a soft or hard memory limit.
+None of it should make any difference to how SumTrees is actually invoked: you would still just use the "``-m``" or "``--multiprocessing``" flags to specify that SumTrees runs in parallel mode with a particular number of parallel processes).
+You just need to check with your cluster administrators or documentation to make sure your job script or execution context provides sufficient processors to match the number of processes that you want run (as well as other resources, e.g. wall time limit and memory limit, that SumTrees needs to finish its job).
+
+Improving Performance
+=====================
+
+    * Run SumTrees in parallel mode (see "`Running in Parallel Mode`_" or "`Parallelizing SumTrees`_")::
+
+        $ sumtrees.py -m 4 phylo.run1.tre phylo.run2.tre phylo.run3.tre phylo.run4.tre
+
+    * Reduce the tree-processing logging frequency.
+      By default, SumTrees will report back every 500th tree in a tree file, just to let you know where it is and to give you a sense of how long it will take. You can use the "``-g``"  or "``--log-frequency``" flag to control this behavior. If you have very large files, you may be content to have SumTrees report back every 5000th or even every 10000th tree instead::
+
+        $ sumtrees.py --log-frequency=10000 phylo.run1.tre phylo.run2.tre phylo.run3.tre phylo.run4.tre
+        $ sumtrees.py -g10000 phylo.run1.tre phylo.run2.tre phylo.run3.tre phylo.run4.tre
+
+      If you are content to let SumTrees run without reporting its progress *within* each file (SumTrees will still report back whenever it begins or ends working on a file), then you can switch off tree processing logging altogether by specifying a logging frequency of 0::
+
+        $ sumtrees.py --log-frequency=0 phylo.run1.tre phylo.run2.tre phylo.run3.tre phylo.run4.tre
+        $ sumtrees.py -g0 phylo.run1.tre phylo.run2.tre phylo.run3.tre phylo.run4.tre
+
+Troubleshooting
+===============
+
+Prerequisites
+-------------
+
+|DendroPy|_ is a |Python|_ library.
+It requires and presupposes not only the existence of a |Python|_ installation on your system, but also that this Python installation is available on the system path.
+
+The biggest problem faced by most users is not so much not having |Python|_ installed, but not having the correct version of Python installed. You can check which version of Python you have running by typing::
+
+    $ python -V
+
+SumTrees, and the |DendroPy|_ library that it is part of, works out-of-the-box with any 2.x Python version 2.4 or greater.
+
+SumTrees will not work with versions of |Python|_ prior to 2.4, such as |Python|_ 2.3. It can probably be made to work pretty easily, and if you have strong enough motiviation to use Python 2.3, it might be worth the effort for you.
+It is not for me.
+
+SumTrees (and |DendroPy|) is currently not compatible with Python 3.
+
+My Computer Does Not Know What a Python Is
+-------------------------------------------
+
+If you get a message like::
+
+    python: command not found
+
+it is either because |Python|_  is not installed on your system, or is not found on the system path.
+
+SumTrees is a |Python|_ script, and, as such, you will need to have a |Python|_  interpreter installed on your system.
+
+Otherwise, you must download and install Python 2.6 from: http://www.python.org/download/releases/2.6/.
+For your convenience, the clicking on the following links should lead you directly to the appropriate pre-compiled download:
+
+* `Mac OS X <http://www.python.org/ftp/python/2.6/python-2.6-macosx.dmg>`_
+* `Microsoft Windows <http://www.python.org/ftp/python/2.6/python-2.6.msi>`_
+
+For other platforms, the usual "``./configure``", "``make``", and "``sudo make install``" dance should get you up and running with the following:
+
+* `Cross-platform Source <http://www.python.org/ftp/python/2.6/Python-2.6.tgz>`_
+
+Microsoft Windows users should also refer to the `"Python Windows FAQ" <http://www.python.org/doc/faq/windows.html>`_
+(http://www.python.org/doc/faq/windows.html)
+after installing Python, and pay particular attention to the
+`"How do I run a Python program under Windows?" <http://www.python.org/doc/faq/windows.html#id2>`_ section, as it will
+help them greatly in getting Python up and running on the system path.
+
+Manual Installation
+===================
+
+The |DendroPy|_ library is actually quite straightforward to install manually, especially if you have any familiarity with Python and how Python files are organized.
+There are a couple of different things you could do:
+
+* Add the current location of the "``dendropy``" subdirectory to your Python path environmental variable, "``$PYTHONPATH``", and place the file "``programs/sumtrees.py``" on your system path.
+
+* Copy (or symlink) the "``dendropy``" directory to the "``site-packages``" directory of your Python installation, and place the file "``programs/sumtrees.py``" on your system path.
+
+Repository Access
+=================
+The |DendroPy|_ public-access |Git|_ repository can be cloned from:
+
+        |dendropy_public_repo_url|
+
+Bugs, Suggestions, Comments, etc.
+=================================
+
+If you encounter any problems, errors, crashes etc. while using this program, please let me (|js|_) know at jeet at ku.edu. If you include the term "sumtrees" anywhere on the subject line (e.g. "Problem such-and-such with bootscore), it would help greatly with getting through the spam filter. Please include all the datafiles involved, as
+well the complete command used (with all the options and parameters) and the complete error message returned (simply cutting-and-pasting the terminal text should work fine).
+Please feel free to contact me if you have any other questions, suggestions or comments as well.
+
+How to Cite this Program
+=========================
+
+If you use this program in your analysis, please cite it as:
+
+    |dendropy_citation|
+
+In the text of your paper, if you want to look like you know what you are doing, you should probably also mention explicitly that you specifically used the SumTrees program of the |DendroPy|_ package, as well as the particular version numbers of SumTrees and |DendroPy|_ that you used.
+
+.. include:: ../license.inc
+.. include:: ../acknowledgements.inc
+
diff --git a/doc/source/schemas/fasta.rst b/doc/source/schemas/fasta.rst
new file mode 100644
index 0000000..ef96035
--- /dev/null
+++ b/doc/source/schemas/fasta.rst
@@ -0,0 +1,182 @@
+*****
+FASTA
+*****
+
+.. contents::
+    :local:
+    :backlinks: none
+
+Description
+===========
+
+    * http://en.wikipedia.org/wiki/FASTA_format
+    * Lipman, DJ; Pearson, WR (1985). "Rapid and sensitive protein similarity searches". Science 227 (4693): 1435–41. `doi:10.1126/science.2983426 <http://www.sciencemag.org/content/227/4693/1435>`_.
+
+Reading
+=======
+
+Schema-Specific Keyword Arguments
+---------------------------------
+
+.. autokeywordargumentsonly:: dendropy.dataio.fastareader.FastaReader.__init__
+
+Supported Methods
+-----------------
+
+``DnaCharacterMatrix.get``
+..........................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.DnaCharacterMatrix.get>`)
+
+.. literalinclude:: /schemas/interfaces/fasta_dnacharactermatrix_get.py
+
+``RnaCharacterMatrix.get``
+..........................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.RnaCharacterMatrix.get>`)
+
+.. literalinclude:: /schemas/interfaces/fasta_rnacharactermatrix_get.py
+
+``ProteinCharacterMatrix.get``
+..............................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.ProteinCharacterMatrix.get>`)
+
+.. literalinclude:: /schemas/interfaces/fasta_proteincharactermatrix_get.py
+
+``RestrictionSitesCharacterMatrix.get``
+.......................................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.RestrictionSitesCharacterMatrix.get>`)
+
+.. literalinclude:: /schemas/interfaces/fasta_restrictionsitescharactermatrix_get.py
+
+``InfiniteSitesCharacterMatrix.get``
+....................................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.InfiniteSitesCharacterMatrix.get>`)
+
+.. literalinclude:: /schemas/interfaces/fasta_infinitesitescharactermatrix_get.py
+
+``StandardCharacterMatrix.get``
+...............................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.StandardCharacterMatrix.get>`)
+
+.. literalinclude:: /schemas/interfaces/fasta_standardcharactermatrix_get.py
+
+``DataSet.get``
+...............
+(:meth:`method reference <dendropy.datamodel.datasetmodel.DataSet.get>`)
+
+Note that the type of data needs to be specified using the ``data_type``
+keyword argument.
+
+.. literalinclude:: /schemas/interfaces/fasta_dataset_get.py
+
+``DataSet.read``
+................
+(:meth:`method reference <dendropy.datamodel.datasetmodel.DataSet.read>`)
+
+Note that the type of data needs to be specified using the ``data_type``
+keyword argument.
+
+.. literalinclude:: /schemas/interfaces/fasta_dataset_read.py
+
+Writing
+=======
+
+.. _schema_specific_keyword_arguments_writing_fasta:
+
+Schema-Specific Keyword Arguments
+---------------------------------
+
+.. autokeywordargumentsonly:: dendropy.dataio.fastawriter.FastaWriter.__init__
+
+Supported Methods
+-----------------
+
+``DnaCharacterMatrix.write``
+.............................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.DnaCharacterMatrix.write>`)
+
+.. literalinclude:: /schemas/interfaces/fasta_write.py
+
+``DnaCharacterMatrix.as_string``
+................................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.DnaCharacterMatrix.as_string>`)
+
+.. literalinclude:: /schemas/interfaces/fasta_as_string.py
+
+
+``RnaCharacterMatrix.write``
+.............................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.RnaCharacterMatrix.write>`)
+
+.. literalinclude:: /schemas/interfaces/fasta_write.py
+
+``RnaCharacterMatrix.as_string``
+................................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.RnaCharacterMatrix.as_string>`)
+
+.. literalinclude:: /schemas/interfaces/fasta_as_string.py
+
+
+``ProteinCharacterMatrix.write``
+.................................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.ProteinCharacterMatrix.write>`)
+
+.. literalinclude:: /schemas/interfaces/fasta_write.py
+
+``ProteinCharacterMatrix.as_string``
+.....................................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.ProteinCharacterMatrix.as_string>`)
+
+.. literalinclude:: /schemas/interfaces/fasta_as_string.py
+
+
+``RestrictionSitesCharacterMatrix.write``
+.........................................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.RestrictionSitesCharacterMatrix.write>`)
+
+.. literalinclude:: /schemas/interfaces/fasta_write.py
+
+``RestrictionSitesCharacterMatrix.as_string``
+.............................................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.RestrictionSitesCharacterMatrix.as_string>`)
+
+.. literalinclude:: /schemas/interfaces/fasta_as_string.py
+
+
+``InfiniteSitesCharacterMatrix.write``
+......................................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.InfiniteSitesCharacterMatrix.write>`)
+
+.. literalinclude:: /schemas/interfaces/fasta_write.py
+
+``InfiniteSitesCharacterMatrix.as_string``
+..........................................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.InfiniteSitesCharacterMatrix.as_string>`)
+
+.. literalinclude:: /schemas/interfaces/fasta_as_string.py
+
+
+``StandardCharacterMatrix.write``
+.................................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.StandardCharacterMatrix.write>`)
+
+.. literalinclude:: /schemas/interfaces/fasta_write.py
+
+``StandardCharacterMatrix.as_string``
+.....................................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.StandardCharacterMatrix.as_string>`)
+
+.. literalinclude:: /schemas/interfaces/fasta_as_string.py
+
+
+``DataSet.write``
+.................
+(:meth:`method reference <dendropy.datamodel.datasetmodel.DataSet.write>`)
+
+.. literalinclude:: /schemas/interfaces/fasta_write.py
+
+``DataSet.as_string``
+.....................
+(:meth:`method reference <dendropy.datamodel.datasetmodel.DataSet.as_string>`)
+
+.. literalinclude:: /schemas/interfaces/fasta_as_string.py
+
diff --git a/doc/source/schemas/index.rst b/doc/source/schemas/index.rst
new file mode 100644
index 0000000..815e83f
--- /dev/null
+++ b/doc/source/schemas/index.rst
@@ -0,0 +1,36 @@
+####################################################################
+DendroPy Schemas: Phylogenetic and Evolutionary Biology Data Formats
+####################################################################
+
+.. _Specifying_the_Data_Source_Format:
+.. _Specifying_the_Data_Writing_Format:
+
+In DendroPy, the format of the data is called its "schema".
+All the data import and export methods require specification of the data format through a "``schema``" keyword argument, which takes a *schema specification string* as a value.
+This is a string identifer that uniquely maps to a particular format, and should be one of the following values:
+
+    - ":doc:`fasta </schemas/fasta>`"
+    - ":doc:`newick </schemas/newick>`"
+    - ":doc:`nexus </schemas/nexus>`"
+    - ":doc:`nexml </schemas/nexml>`"
+    - ":doc:`phylip </schemas/phylip>`"
+
+
+.. _Schema_Specific_Keyword_Arguments:
+.. _Customizing_the_Data_Writing_Format:
+
+Furthermore, the various data reading and writing methods take other keywords arguments that vary depending on the schema to customize the way the data is parsed when read or formatted when written.
+These *schema-specific keyword arguments* are detailed in this section under each schema description, along with information and code templates/examples.
+
+
+Contents
+========
+
+.. toctree::
+    :maxdepth: 3
+
+    fasta
+    newick
+    nexml
+    nexus
+    phylip
diff --git a/doc/source/schemas/interfaces/fasta_as_string.py b/doc/source/schemas/interfaces/fasta_as_string.py
new file mode 100644
index 0000000..6173a93
--- /dev/null
+++ b/doc/source/schemas/interfaces/fasta_as_string.py
@@ -0,0 +1,6 @@
+s = d.as_string(
+        schema="fasta",
+        wrap=True,
+        ignore_unrecognized_keyword_arguments=False,
+        )
+
diff --git a/doc/source/schemas/interfaces/fasta_dataset_get.py b/doc/source/schemas/interfaces/fasta_dataset_get.py
new file mode 100644
index 0000000..d0df964
--- /dev/null
+++ b/doc/source/schemas/interfaces/fasta_dataset_get.py
@@ -0,0 +1,36 @@
+d = dendropy.DataSet.get(
+        path="data.fas",
+        schema="fasta",
+        label=None,
+        taxon_namespace=None,
+        ignore_unrecognized_keyword_arguments=False,
+        data_type="dna",
+        )
+
+d = dendropy.DataSet.get(
+        path="data.fas",
+        schema="fasta",
+        label=None,
+        taxon_namespace=None,
+        ignore_unrecognized_keyword_arguments=False,
+        data_type="dna",
+        )
+
+d = dendropy.DataSet.get(
+        path="data.fas",
+        schema="fasta",
+        label=None,
+        taxon_namespace=None,
+        ignore_unrecognized_keyword_arguments=False,
+        data_type="standard",
+        )
+
+d = dendropy.DataSet.get(
+        path="data.fas",
+        schema="fasta",
+        label=None,
+        taxon_namespace=None,
+        ignore_unrecognized_keyword_arguments=False,
+        data_type="standard",
+        default_state_alphabet=dendropy.new_standard_state_alphabet("01")
+        )
diff --git a/doc/source/schemas/interfaces/fasta_dataset_read.py b/doc/source/schemas/interfaces/fasta_dataset_read.py
new file mode 100644
index 0000000..f45fae4
--- /dev/null
+++ b/doc/source/schemas/interfaces/fasta_dataset_read.py
@@ -0,0 +1,40 @@
+d = dendropy.DataSet()
+d.read(
+        path="data.fas",
+        schema="fasta",
+        label=None,
+        taxon_namespace=None,
+        ignore_unrecognized_keyword_arguments=False,
+        data_type="dna",
+        )
+
+d = dendropy.DataSet()
+d.read(
+        path="data.fas",
+        schema="fasta",
+        label=None,
+        taxon_namespace=None,
+        ignore_unrecognized_keyword_arguments=False,
+        data_type="protein",
+        )
+
+d = dendropy.DataSet()
+d.read(
+        path="data.fas",
+        schema="fasta",
+        label=None,
+        taxon_namespace=None,
+        ignore_unrecognized_keyword_arguments=False,
+        data_type="standard",
+        )
+
+d = dendropy.DataSet()
+d.read(
+        path="data.fas",
+        schema="fasta",
+        label=None,
+        taxon_namespace=None,
+        ignore_unrecognized_keyword_arguments=False,
+        data_type="standard",
+        default_state_alphabet=dendropy.new_standard_state_alphabet("abc"),
+        )
diff --git a/doc/source/schemas/interfaces/fasta_dnacharactermatrix_get.py b/doc/source/schemas/interfaces/fasta_dnacharactermatrix_get.py
new file mode 100644
index 0000000..d6f20dc
--- /dev/null
+++ b/doc/source/schemas/interfaces/fasta_dnacharactermatrix_get.py
@@ -0,0 +1,9 @@
+d = dendropy.DnaCharacterMatrix.get(
+        path="data.fas",
+        schema="fasta",
+        label=None,
+        taxon_namespace=None,
+        matrix_offset=None,
+        ignore_unrecognized_keyword_arguments=False,
+        )
+
diff --git a/doc/source/schemas/interfaces/fasta_infinitesitescharactermatrix_get.py b/doc/source/schemas/interfaces/fasta_infinitesitescharactermatrix_get.py
new file mode 100644
index 0000000..2678e97
--- /dev/null
+++ b/doc/source/schemas/interfaces/fasta_infinitesitescharactermatrix_get.py
@@ -0,0 +1,9 @@
+d = dendropy.InfiniteSitesCharacterMatrix.get(
+        path="data.fas",
+        schema="fasta",
+        label=None,
+        taxon_namespace=None,
+        matrix_offset=None,
+        ignore_unrecognized_keyword_arguments=False,
+        )
+
diff --git a/doc/source/schemas/interfaces/fasta_proteincharactermatrix_get.py b/doc/source/schemas/interfaces/fasta_proteincharactermatrix_get.py
new file mode 100644
index 0000000..8cab4c4
--- /dev/null
+++ b/doc/source/schemas/interfaces/fasta_proteincharactermatrix_get.py
@@ -0,0 +1,9 @@
+d = dendropy.ProteinCharacterMatrix.get(
+        path="data.fas",
+        schema="fasta",
+        label=None,
+        taxon_namespace=None,
+        matrix_offset=None,
+        ignore_unrecognized_keyword_arguments=False,
+        )
+
diff --git a/doc/source/schemas/interfaces/fasta_restrictionsitescharactermatrix_get.py b/doc/source/schemas/interfaces/fasta_restrictionsitescharactermatrix_get.py
new file mode 100644
index 0000000..09a4172
--- /dev/null
+++ b/doc/source/schemas/interfaces/fasta_restrictionsitescharactermatrix_get.py
@@ -0,0 +1,9 @@
+d = dendropy.RestrictionSitesCharacterMatrix.get(
+        path="data.fas",
+        schema="fasta",
+        label=None,
+        taxon_namespace=None,
+        matrix_offset=None,
+        ignore_unrecognized_keyword_arguments=False,
+        )
+
diff --git a/doc/source/schemas/interfaces/fasta_rnacharactermatrix_get.py b/doc/source/schemas/interfaces/fasta_rnacharactermatrix_get.py
new file mode 100644
index 0000000..159a702
--- /dev/null
+++ b/doc/source/schemas/interfaces/fasta_rnacharactermatrix_get.py
@@ -0,0 +1,8 @@
+d = dendropy.RnaCharacterMatrix.get(
+        path="data.fas",
+        schema="fasta",
+        label=None,
+        taxon_namespace=None,
+        matrix_offset=None,
+        ignore_unrecognized_keyword_arguments=False,
+        )
diff --git a/doc/source/schemas/interfaces/fasta_standardcharactermatrix_get.py b/doc/source/schemas/interfaces/fasta_standardcharactermatrix_get.py
new file mode 100644
index 0000000..c669c99
--- /dev/null
+++ b/doc/source/schemas/interfaces/fasta_standardcharactermatrix_get.py
@@ -0,0 +1,19 @@
+d = dendropy.StandardCharacterMatrix.get(
+        path="data.fas",
+        schema="fasta",
+        label=None,
+        taxon_namespace=None,
+        matrix_offset=None,
+        ignore_unrecognized_keyword_arguments=False,
+        default_state_alphabet=None,
+        )
+
+d = dendropy.StandardCharacterMatrix.get(
+        path="data.fas",
+        schema="fasta",
+        label=None,
+        taxon_namespace=None,
+        matrix_offset=None,
+        ignore_unrecognized_keyword_arguments=False,
+        default_state_alphabet=dendropy.new_standard_state_alphabet("0123456789"),
+        )
diff --git a/doc/source/schemas/interfaces/fasta_write.py b/doc/source/schemas/interfaces/fasta_write.py
new file mode 100644
index 0000000..1b6101a
--- /dev/null
+++ b/doc/source/schemas/interfaces/fasta_write.py
@@ -0,0 +1,7 @@
+d.write(
+        path="data.fas",
+        schema="fasta",
+        wrap=True,
+        ignore_unrecognized_keyword_arguments=False,
+        )
+
diff --git a/doc/source/schemas/interfaces/newick_as_string.py b/doc/source/schemas/interfaces/newick_as_string.py
new file mode 100644
index 0000000..a7a815d
--- /dev/null
+++ b/doc/source/schemas/interfaces/newick_as_string.py
@@ -0,0 +1,22 @@
+s = d.as_string(
+        schema='newick',
+        suppress_leaf_taxon_labels=False,
+        suppress_leaf_node_labels=True,
+        suppress_internal_taxon_labels=False,
+        suppress_internal_node_labels=False,
+        suppress_rooting=False,
+        suppress_edge_lengths=False,
+        unquoted_underscores=False,
+        preserve_spaces=False,
+        store_tree_weights=False,
+        taxon_token_map=None,
+        suppress_annotations=True,
+        annotations_as_nhx=False,
+        suppress_item_comments=True,
+        node_label_element_separator=' ',
+        node_label_compose_fn=None,
+        edge_label_compose_fn=None,
+        real_value_format_specifier='',
+        ignore_unrecognized_keyword_arguments=False,
+        )
+
diff --git a/doc/source/schemas/interfaces/newick_dataset_get.py b/doc/source/schemas/interfaces/newick_dataset_get.py
new file mode 100644
index 0000000..bfd39a7
--- /dev/null
+++ b/doc/source/schemas/interfaces/newick_dataset_get.py
@@ -0,0 +1,24 @@
+data_set = dendropy.DataSet.get(
+    path="path/to/file",
+    schema="newick",
+    label=None,
+    taxon_namespace=None,
+    exclude_chars=False,
+    exclude_trees=False,
+    rooting="default-unrooted",
+    edge_length_type=float,
+    suppress_edge_lengths=False,
+    extract_comment_metadata=True,
+    store_tree_weights=False,
+    encode_splits=False,
+    finish_node_fn=None,
+    case_sensitive_taxon_labels=False,
+    preserve_underscores=False,
+    suppress_internal_node_taxa=True,
+    suppress_leaf_node_taxa=False,
+    terminating_semicolon_required=True,
+    ignore_unrecognized_keyword_arguments=False,
+    )
+
+
+
diff --git a/doc/source/schemas/interfaces/newick_dataset_read.py b/doc/source/schemas/interfaces/newick_dataset_read.py
new file mode 100644
index 0000000..bb18222
--- /dev/null
+++ b/doc/source/schemas/interfaces/newick_dataset_read.py
@@ -0,0 +1,23 @@
+data_set = dendropy.DataSet()
+data_set.read(
+    path="path/to/file",
+    schema="newick",
+    exclude_chars=False,
+    exclude_trees=False,
+    rooting="default-unrooted",
+    edge_length_type=float,
+    suppress_edge_lengths=False,
+    extract_comment_metadata=True,
+    store_tree_weights=False,
+    encode_splits=False,
+    finish_node_fn=None,
+    case_sensitive_taxon_labels=False,
+    preserve_underscores=False,
+    suppress_internal_node_taxa=True,
+    suppress_leaf_node_taxa=False,
+    terminating_semicolon_required=True,
+    ignore_unrecognized_keyword_arguments=False,
+    )
+
+
+
diff --git a/doc/source/schemas/interfaces/newick_tree_get.py b/doc/source/schemas/interfaces/newick_tree_get.py
new file mode 100644
index 0000000..5457742
--- /dev/null
+++ b/doc/source/schemas/interfaces/newick_tree_get.py
@@ -0,0 +1,23 @@
+tree = dendropy.Tree.get(
+    path="tree.tre",
+    schema="newick",
+    label=None,
+    taxon_namespace=None,
+    collection_offset=None,
+    tree_offset=None,
+    rooting="default-unrooted",
+    edge_length_type=float,
+    suppress_edge_lengths=False,
+    extract_comment_metadata=True,
+    store_tree_weights=False,
+    encode_splits=False,
+    finish_node_fn=None,
+    case_sensitive_taxon_labels=False,
+    preserve_underscores=False,
+    suppress_internal_node_taxa=True,
+    suppress_leaf_node_taxa=False,
+    terminating_semicolon_required=True,
+    ignore_unrecognized_keyword_arguments=False,
+    )
+
+
diff --git a/doc/source/schemas/interfaces/newick_tree_get_from_path.py b/doc/source/schemas/interfaces/newick_tree_get_from_path.py
new file mode 100644
index 0000000..00ead25
--- /dev/null
+++ b/doc/source/schemas/interfaces/newick_tree_get_from_path.py
@@ -0,0 +1,22 @@
+tree = dendropy.Tree.get_from_path(
+    src="tree.tre",
+    schema="newick",
+    label=None,
+    taxon_namespace=None,
+    collection_offset=None,
+    tree_offset=None,
+    rooting="default-unrooted",
+    edge_length_type=float,
+    suppress_edge_lengths=False,
+    extract_comment_metadata=True,
+    store_tree_weights=False,
+    encode_splits=False,
+    finish_node_fn=None,
+    case_sensitive_taxon_labels=False,
+    preserve_underscores=False,
+    suppress_internal_node_taxa=True,
+    suppress_leaf_node_taxa=False,
+    terminating_semicolon_required=True,
+    ignore_unrecognized_keyword_arguments=False,
+    )
+
diff --git a/doc/source/schemas/interfaces/newick_tree_get_from_stream.py b/doc/source/schemas/interfaces/newick_tree_get_from_stream.py
new file mode 100644
index 0000000..f2a1974
--- /dev/null
+++ b/doc/source/schemas/interfaces/newick_tree_get_from_stream.py
@@ -0,0 +1,22 @@
+tree = dendropy.Tree.get_from_stream(
+    src=open("tree.tre", "r"),
+    schema="newick",
+    label=None,
+    taxon_namespace=None,
+    collection_offset=None,
+    tree_offset=None,
+    rooting="default-unrooted",
+    edge_length_type=float,
+    suppress_edge_lengths=False,
+    extract_comment_metadata=True,
+    store_tree_weights=False,
+    encode_splits=False,
+    finish_node_fn=None,
+    case_sensitive_taxon_labels=False,
+    preserve_underscores=False,
+    suppress_internal_node_taxa=True,
+    suppress_leaf_node_taxa=False,
+    terminating_semicolon_required=True,
+    ignore_unrecognized_keyword_arguments=False,
+    )
+
diff --git a/doc/source/schemas/interfaces/newick_treearray_read.py b/doc/source/schemas/interfaces/newick_treearray_read.py
new file mode 100644
index 0000000..bb44aea
--- /dev/null
+++ b/doc/source/schemas/interfaces/newick_treearray_read.py
@@ -0,0 +1,22 @@
+tree_array = dendropy.TreeArray()
+tree_array.read(
+    path="path/to/file",
+    schema="newick",
+    label=None,
+    collection_offset=None,
+    tree_offset=None,
+    rooting="default-unrooted",
+    edge_length_type=float,
+    suppress_edge_lengths=False,
+    extract_comment_metadata=True,
+    store_tree_weights=False,
+    encode_splits=False,
+    finish_node_fn=None,
+    case_sensitive_taxon_labels=False,
+    preserve_underscores=False,
+    suppress_internal_node_taxa=True,
+    suppress_leaf_node_taxa=False,
+    terminating_semicolon_required=True,
+    ignore_unrecognized_keyword_arguments=False,
+    )
+
diff --git a/doc/source/schemas/interfaces/newick_treelist_get.py b/doc/source/schemas/interfaces/newick_treelist_get.py
new file mode 100644
index 0000000..4024c6d
--- /dev/null
+++ b/doc/source/schemas/interfaces/newick_treelist_get.py
@@ -0,0 +1,21 @@
+tree_list = dendropy.TreeList.get(
+    path="path/to/file",
+    schema="newick",
+    label=None,
+    taxon_namespace=None,
+    collection_offset=None,
+    tree_offset=None,
+    rooting="default-unrooted",
+    edge_length_type=float,
+    suppress_edge_lengths=False,
+    extract_comment_metadata=True,
+    store_tree_weights=False,
+    encode_splits=False,
+    finish_node_fn=None,
+    case_sensitive_taxon_labels=False,
+    preserve_underscores=False,
+    suppress_internal_node_taxa=True,
+    suppress_leaf_node_taxa=False,
+    terminating_semicolon_required=True,
+    ignore_unrecognized_keyword_arguments=False,
+    )
diff --git a/doc/source/schemas/interfaces/newick_treelist_read.py b/doc/source/schemas/interfaces/newick_treelist_read.py
new file mode 100644
index 0000000..bec8db1
--- /dev/null
+++ b/doc/source/schemas/interfaces/newick_treelist_read.py
@@ -0,0 +1,20 @@
+tree_list = dendropy.TreeList()
+tree_list.read(
+    path="path/to/file",
+    schema="newick",
+    collection_offset=None,
+    tree_offset=None,
+    rooting="default-unrooted",
+    edge_length_type=float,
+    suppress_edge_lengths=False,
+    extract_comment_metadata=True,
+    store_tree_weights=False,
+    encode_splits=False,
+    finish_node_fn=None,
+    case_sensitive_taxon_labels=False,
+    preserve_underscores=False,
+    suppress_internal_node_taxa=True,
+    suppress_leaf_node_taxa=False,
+    terminating_semicolon_required=True,
+    ignore_unrecognized_keyword_arguments=False,
+    )
diff --git a/doc/source/schemas/interfaces/newick_write.py b/doc/source/schemas/interfaces/newick_write.py
new file mode 100644
index 0000000..d4ccfb5
--- /dev/null
+++ b/doc/source/schemas/interfaces/newick_write.py
@@ -0,0 +1,23 @@
+d.write(
+        path='outputfile.tre',
+        schema='newick',
+        suppress_leaf_taxon_labels=False,
+        suppress_leaf_node_labels=True,
+        suppress_internal_taxon_labels=False,
+        suppress_internal_node_labels=False,
+        suppress_rooting=False,
+        suppress_edge_lengths=False,
+        unquoted_underscores=False,
+        preserve_spaces=False,
+        store_tree_weights=False,
+        taxon_token_map=None,
+        suppress_annotations=True,
+        annotations_as_nhx=False,
+        suppress_item_comments=True,
+        node_label_element_separator=' ',
+        node_label_compose_fn=None,
+        edge_label_compose_fn=None,
+        real_value_format_specifier='',
+        ignore_unrecognized_keyword_arguments=False,
+        )
+
diff --git a/doc/source/schemas/interfaces/nexml_chars_as_string.py b/doc/source/schemas/interfaces/nexml_chars_as_string.py
new file mode 100644
index 0000000..79ac34f
--- /dev/null
+++ b/doc/source/schemas/interfaces/nexml_chars_as_string.py
@@ -0,0 +1,8 @@
+s = d.as_string(
+        schema='nexml',
+        markup_as_sequences=False,
+        ignore_unrecognized_keyword_arguments=False,
+        )
+
+
+
diff --git a/doc/source/schemas/interfaces/nexml_chars_write.py b/doc/source/schemas/interfaces/nexml_chars_write.py
new file mode 100644
index 0000000..a47ce51
--- /dev/null
+++ b/doc/source/schemas/interfaces/nexml_chars_write.py
@@ -0,0 +1,9 @@
+d.write(
+        path='data.xml',
+        schema='nexml',
+        markup_as_sequences=False,
+        ignore_unrecognized_keyword_arguments=False,
+        )
+
+
+
diff --git a/doc/source/schemas/interfaces/nexml_dataset_as_string.py b/doc/source/schemas/interfaces/nexml_dataset_as_string.py
new file mode 100644
index 0000000..ca85044
--- /dev/null
+++ b/doc/source/schemas/interfaces/nexml_dataset_as_string.py
@@ -0,0 +1,8 @@
+s = d.as_string(
+        schema='nexml',
+        markup_as_sequences=False,
+        suppress_unreferenced_taxon_namespaces=False,
+        ignore_unrecognized_keyword_arguments=False,
+        )
+
+
diff --git a/doc/source/schemas/interfaces/nexml_dataset_get.py b/doc/source/schemas/interfaces/nexml_dataset_get.py
new file mode 100644
index 0000000..e7cf0db
--- /dev/null
+++ b/doc/source/schemas/interfaces/nexml_dataset_get.py
@@ -0,0 +1,14 @@
+data_set = dendropy.DataSet.get(
+    path="path/to/file",
+    schema="nexus",
+    label=None,
+    taxon_namespace=None,
+    exclude_chars=False,
+    exclude_trees=False,
+    default_namespace="http://www.nexml.org/2009",
+    case_sensitive_taxon_labels=False,
+    ignore_unrecognized_keyword_arguments=False,
+    )
+
+
+
diff --git a/doc/source/schemas/interfaces/nexml_dataset_read.py b/doc/source/schemas/interfaces/nexml_dataset_read.py
new file mode 100644
index 0000000..214a074
--- /dev/null
+++ b/doc/source/schemas/interfaces/nexml_dataset_read.py
@@ -0,0 +1,13 @@
+data_set = dendropy.DataSet()
+data_set.read(
+    path="path/to/file",
+    schema="nexml",
+    exclude_chars=False,
+    exclude_trees=False,
+    default_namespace="http://www.nexml.org/2009",
+    case_sensitive_taxon_labels=False,
+    ignore_unrecognized_keyword_arguments=False,
+    )
+
+
+
diff --git a/doc/source/schemas/interfaces/nexml_dataset_write.py b/doc/source/schemas/interfaces/nexml_dataset_write.py
new file mode 100644
index 0000000..af65106
--- /dev/null
+++ b/doc/source/schemas/interfaces/nexml_dataset_write.py
@@ -0,0 +1,9 @@
+d.write(
+        path='data.xml',
+        schema='nexml',
+        markup_as_sequences=False,
+        suppress_unreferenced_taxon_namespaces=False,
+        ignore_unrecognized_keyword_arguments=False,
+        )
+
+
diff --git a/doc/source/schemas/interfaces/nexml_dnacharactermatrix_get.py b/doc/source/schemas/interfaces/nexml_dnacharactermatrix_get.py
new file mode 100644
index 0000000..25783ad
--- /dev/null
+++ b/doc/source/schemas/interfaces/nexml_dnacharactermatrix_get.py
@@ -0,0 +1,10 @@
+chars = dendropy.DnaCharacterMatrix.get(
+    path="path/to/file",
+    schema="nexml",
+    label=None,
+    taxon_namespace=None,
+    matrix_offset=None,
+    default_namespace="http://www.nexml.org/2009",
+    case_sensitive_taxon_labels=False,
+    ignore_unrecognized_keyword_arguments=False,
+    )
diff --git a/doc/source/schemas/interfaces/nexml_infinitesitescharactermatrix_get.py b/doc/source/schemas/interfaces/nexml_infinitesitescharactermatrix_get.py
new file mode 100644
index 0000000..0fd0efd
--- /dev/null
+++ b/doc/source/schemas/interfaces/nexml_infinitesitescharactermatrix_get.py
@@ -0,0 +1,10 @@
+chars = dendropy.InfiniteSitesCharacterMatrix.get(
+    path="path/to/file",
+    schema="nexml",
+    label=None,
+    taxon_namespace=None,
+    matrix_offset=None,
+    default_namespace="http://www.nexml.org/2009",
+    case_sensitive_taxon_labels=False,
+    ignore_unrecognized_keyword_arguments=False,
+    )
diff --git a/doc/source/schemas/interfaces/nexml_proteincharactermatrix_get.py b/doc/source/schemas/interfaces/nexml_proteincharactermatrix_get.py
new file mode 100644
index 0000000..3758f13
--- /dev/null
+++ b/doc/source/schemas/interfaces/nexml_proteincharactermatrix_get.py
@@ -0,0 +1,10 @@
+chars = dendropy.ProteinCharacterMatrix.get(
+    path="path/to/file",
+    schema="nexml",
+    label=None,
+    taxon_namespace=None,
+    matrix_offset=None,
+    default_namespace="http://www.nexml.org/2009",
+    case_sensitive_taxon_labels=False,
+    ignore_unrecognized_keyword_arguments=False,
+    )
diff --git a/doc/source/schemas/interfaces/nexml_restrictionsitescharactermatrix_get.py b/doc/source/schemas/interfaces/nexml_restrictionsitescharactermatrix_get.py
new file mode 100644
index 0000000..5ef048e
--- /dev/null
+++ b/doc/source/schemas/interfaces/nexml_restrictionsitescharactermatrix_get.py
@@ -0,0 +1,10 @@
+chars = dendropy.RestrictionSitesCharacterMatrix.get(
+    path="path/to/file",
+    schema="nexml",
+    label=None,
+    taxon_namespace=None,
+    matrix_offset=None,
+    default_namespace="http://www.nexml.org/2009",
+    case_sensitive_taxon_labels=False,
+    ignore_unrecognized_keyword_arguments=False,
+    )
diff --git a/doc/source/schemas/interfaces/nexml_rnacharactermatrix_get.py b/doc/source/schemas/interfaces/nexml_rnacharactermatrix_get.py
new file mode 100644
index 0000000..8be1c12
--- /dev/null
+++ b/doc/source/schemas/interfaces/nexml_rnacharactermatrix_get.py
@@ -0,0 +1,10 @@
+chars = dendropy.RnaCharacterMatrix.get(
+    path="path/to/file",
+    schema="nexml",
+    label=None,
+    taxon_namespace=None,
+    matrix_offset=None,
+    default_namespace="http://www.nexml.org/2009",
+    case_sensitive_taxon_labels=False,
+    ignore_unrecognized_keyword_arguments=False,
+    )
diff --git a/doc/source/schemas/interfaces/nexml_standardcharactermatrix_get.py b/doc/source/schemas/interfaces/nexml_standardcharactermatrix_get.py
new file mode 100644
index 0000000..bf4bf7f
--- /dev/null
+++ b/doc/source/schemas/interfaces/nexml_standardcharactermatrix_get.py
@@ -0,0 +1,10 @@
+chars = dendropy.StandardCharacterMatrix.get(
+    path="path/to/file",
+    schema="nexml",
+    label=None,
+    taxon_namespace=None,
+    matrix_offset=None,
+    default_namespace="http://www.nexml.org/2009",
+    case_sensitive_taxon_labels=False,
+    ignore_unrecognized_keyword_arguments=False,
+    )
diff --git a/doc/source/schemas/interfaces/nexml_tree_get.py b/doc/source/schemas/interfaces/nexml_tree_get.py
new file mode 100644
index 0000000..7059b43
--- /dev/null
+++ b/doc/source/schemas/interfaces/nexml_tree_get.py
@@ -0,0 +1,11 @@
+tree = dendropy.Tree.get(
+    path="path/to/file",
+    schema="nexml",
+    label=None,
+    taxon_namespace=None,
+    collection_offset=None,
+    tree_offset=None,
+    default_namespace="http://www.nexml.org/2009",
+    case_sensitive_taxon_labels=False,
+    ignore_unrecognized_keyword_arguments=False,
+    )
diff --git a/doc/source/schemas/interfaces/nexml_treearray_get.py b/doc/source/schemas/interfaces/nexml_treearray_get.py
new file mode 100644
index 0000000..ba6cd84
--- /dev/null
+++ b/doc/source/schemas/interfaces/nexml_treearray_get.py
@@ -0,0 +1,11 @@
+tree_array = dendropy.TreeArray.get(
+    path="path/to/file",
+    schema="nexml",
+    label=None,
+    taxon_namespace=None,
+    collection_offset=None,
+    tree_offset=None,
+    default_namespace="http://www.nexml.org/2009",
+    case_sensitive_taxon_labels=False,
+    ignore_unrecognized_keyword_arguments=False,
+    )
diff --git a/doc/source/schemas/interfaces/nexml_treearray_read.py b/doc/source/schemas/interfaces/nexml_treearray_read.py
new file mode 100644
index 0000000..d9884a5
--- /dev/null
+++ b/doc/source/schemas/interfaces/nexml_treearray_read.py
@@ -0,0 +1,10 @@
+tree_array = dendropy.TreeArray()
+tree_array.read(
+    path="path/to/file",
+    schema="nexml",
+    collection_offset=None,
+    tree_offset=None,
+    default_namespace="http://www.nexml.org/2009",
+    case_sensitive_taxon_labels=False,
+    ignore_unrecognized_keyword_arguments=False,
+    )
diff --git a/doc/source/schemas/interfaces/nexml_treelist_get.py b/doc/source/schemas/interfaces/nexml_treelist_get.py
new file mode 100644
index 0000000..1d21ff0
--- /dev/null
+++ b/doc/source/schemas/interfaces/nexml_treelist_get.py
@@ -0,0 +1,11 @@
+tree_list = dendropy.TreeList.get(
+    path="path/to/file",
+    schema="nexml",
+    label=None,
+    taxon_namespace=None,
+    collection_offset=None,
+    tree_offset=None,
+    default_namespace="http://www.nexml.org/2009",
+    case_sensitive_taxon_labels=False,
+    ignore_unrecognized_keyword_arguments=False,
+    )
diff --git a/doc/source/schemas/interfaces/nexml_treelist_read.py b/doc/source/schemas/interfaces/nexml_treelist_read.py
new file mode 100644
index 0000000..a3ea052
--- /dev/null
+++ b/doc/source/schemas/interfaces/nexml_treelist_read.py
@@ -0,0 +1,10 @@
+tree_list = dendropy.TreeList()
+tree_list.read(
+    path="path/to/file",
+    schema="nexml",
+    collection_offset=None,
+    tree_offset=None,
+    default_namespace="http://www.nexml.org/2009",
+    case_sensitive_taxon_labels=False,
+    ignore_unrecognized_keyword_arguments=False,
+    )
diff --git a/doc/source/schemas/interfaces/nexml_trees_as_string.py b/doc/source/schemas/interfaces/nexml_trees_as_string.py
new file mode 100644
index 0000000..5b2ce19
--- /dev/null
+++ b/doc/source/schemas/interfaces/nexml_trees_as_string.py
@@ -0,0 +1,6 @@
+s = d.as_string(
+        schema='nexml',
+        ignore_unrecognized_keyword_arguments=False,
+        )
+
+
diff --git a/doc/source/schemas/interfaces/nexml_trees_write.py b/doc/source/schemas/interfaces/nexml_trees_write.py
new file mode 100644
index 0000000..17e0fe2
--- /dev/null
+++ b/doc/source/schemas/interfaces/nexml_trees_write.py
@@ -0,0 +1,7 @@
+d.write(
+        path='trees.xml',
+        schema='nexml',
+        ignore_unrecognized_keyword_arguments=False,
+        )
+
+
diff --git a/doc/source/schemas/interfaces/nexus_chars_as_string.py b/doc/source/schemas/interfaces/nexus_chars_as_string.py
new file mode 100644
index 0000000..8b414a2
--- /dev/null
+++ b/doc/source/schemas/interfaces/nexus_chars_as_string.py
@@ -0,0 +1,20 @@
+s = d.as_string(
+        schema='nexus',
+        simple=False,
+        suppress_taxa_blocks=None,
+        suppress_block_titles=None,
+        file_comments=[],
+        preamble_blocks=[],
+        supplemental_blocks=[],
+        allow_multiline_comments=True,
+        continuous_character_state_value_format_fn=None,
+        discrete_character_state_value_format_fn=None,
+        unquoted_underscores=False,
+        preserve_spaces=False,
+        suppress_annotations=False,
+        annotations_as_nhx=False,
+        suppress_item_comments=False,
+        real_value_format_specifier='',
+        ignore_unrecognized_keyword_arguments=False,
+        )
+
diff --git a/doc/source/schemas/interfaces/nexus_chars_write.py b/doc/source/schemas/interfaces/nexus_chars_write.py
new file mode 100644
index 0000000..7561cd5
--- /dev/null
+++ b/doc/source/schemas/interfaces/nexus_chars_write.py
@@ -0,0 +1,21 @@
+d.write(
+        path='chars.nex',
+        schema='nexus',
+        simple=False,
+        suppress_taxa_blocks=None,
+        suppress_block_titles=None,
+        file_comments=[],
+        preamble_blocks=[],
+        supplemental_blocks=[],
+        allow_multiline_comments=True,
+        continuous_character_state_value_format_fn=None,
+        discrete_character_state_value_format_fn=None,
+        unquoted_underscores=False,
+        preserve_spaces=False,
+        suppress_annotations=False,
+        annotations_as_nhx=False,
+        suppress_item_comments=False,
+        real_value_format_specifier='',
+        ignore_unrecognized_keyword_arguments=False,
+        )
+
diff --git a/doc/source/schemas/interfaces/nexus_dataset_as_string.py b/doc/source/schemas/interfaces/nexus_dataset_as_string.py
new file mode 100644
index 0000000..e9773e2
--- /dev/null
+++ b/doc/source/schemas/interfaces/nexus_dataset_as_string.py
@@ -0,0 +1,32 @@
+s = d.as_string(
+        schema='nexus',
+        simple=False,
+        suppress_taxa_blocks=None,
+        suppress_unreferenced_taxon_namespaces=False,
+        suppress_block_titles=None,
+        file_comments=[],
+        preamble_blocks=[],
+        supplemental_blocks=[],
+        allow_multiline_comments=True,
+        continuous_character_state_value_format_fn=None,
+        discrete_character_state_value_format_fn=None,
+        suppress_leaf_taxon_labels=False,
+        suppress_leaf_node_labels=True,
+        suppress_internal_taxon_labels=False,
+        suppress_internal_node_labels=False,
+        suppress_rooting=False,
+        suppress_edge_lengths=False,
+        unquoted_underscores=False,
+        preserve_spaces=False,
+        store_tree_weights=False,
+        translate_tree_taxa=None,
+        suppress_annotations=False,
+        annotations_as_nhx=False,
+        suppress_item_comments=False,
+        node_label_element_separator=' ',
+        node_label_compose_fn=None,
+        edge_label_compose_fn=None,
+        real_value_format_specifier='',
+        ignore_unrecognized_keyword_arguments=False,
+        )
+
diff --git a/doc/source/schemas/interfaces/nexus_dataset_get.py b/doc/source/schemas/interfaces/nexus_dataset_get.py
new file mode 100644
index 0000000..82d55c9
--- /dev/null
+++ b/doc/source/schemas/interfaces/nexus_dataset_get.py
@@ -0,0 +1,24 @@
+data_set = dendropy.DataSet.get(
+    path="path/to/file",
+    schema="nexus",
+    label=None,
+    taxon_namespace=None,
+    exclude_chars=False,
+    exclude_trees=False,
+    rooting="default-unrooted",
+    edge_length_type=float,
+    suppress_edge_lengths=False,
+    extract_comment_metadata=True,
+    store_tree_weights=False,
+    encode_splits=False,
+    finish_node_fn=None,
+    case_sensitive_taxon_labels=False,
+    preserve_underscores=False,
+    suppress_internal_node_taxa=True,
+    suppress_leaf_node_taxa=False,
+    terminating_semicolon_required=True,
+    ignore_unrecognized_keyword_arguments=False,
+    )
+
+
+
diff --git a/doc/source/schemas/interfaces/nexus_dataset_read.py b/doc/source/schemas/interfaces/nexus_dataset_read.py
new file mode 100644
index 0000000..e1da9ce
--- /dev/null
+++ b/doc/source/schemas/interfaces/nexus_dataset_read.py
@@ -0,0 +1,23 @@
+data_set = dendropy.DataSet()
+data_set.read(
+    path="path/to/file",
+    schema="nexus",
+    exclude_chars=False,
+    exclude_trees=False,
+    rooting="default-unrooted",
+    edge_length_type=float,
+    suppress_edge_lengths=False,
+    extract_comment_metadata=True,
+    store_tree_weights=False,
+    encode_splits=False,
+    finish_node_fn=None,
+    case_sensitive_taxon_labels=False,
+    preserve_underscores=False,
+    suppress_internal_node_taxa=True,
+    suppress_leaf_node_taxa=False,
+    terminating_semicolon_required=True,
+    ignore_unrecognized_keyword_arguments=False,
+    )
+
+
+
diff --git a/doc/source/schemas/interfaces/nexus_dataset_write.py b/doc/source/schemas/interfaces/nexus_dataset_write.py
new file mode 100644
index 0000000..9253069
--- /dev/null
+++ b/doc/source/schemas/interfaces/nexus_dataset_write.py
@@ -0,0 +1,33 @@
+d.write(
+        path='dataset.nex',
+        schema='nexus',
+        simple=False,
+        suppress_taxa_blocks=None,
+        suppress_unreferenced_taxon_namespaces=False,
+        suppress_block_titles=None,
+        file_comments=[],
+        preamble_blocks=[],
+        supplemental_blocks=[],
+        allow_multiline_comments=True,
+        continuous_character_state_value_format_fn=None,
+        discrete_character_state_value_format_fn=None,
+        suppress_leaf_taxon_labels=False,
+        suppress_leaf_node_labels=True,
+        suppress_internal_taxon_labels=False,
+        suppress_internal_node_labels=False,
+        suppress_rooting=False,
+        suppress_edge_lengths=False,
+        unquoted_underscores=False,
+        preserve_spaces=False,
+        store_tree_weights=False,
+        translate_tree_taxa=None,
+        suppress_annotations=False,
+        annotations_as_nhx=False,
+        suppress_item_comments=False,
+        node_label_element_separator=' ',
+        node_label_compose_fn=None,
+        edge_label_compose_fn=None,
+        real_value_format_specifier='',
+        ignore_unrecognized_keyword_arguments=False,
+        )
+
diff --git a/doc/source/schemas/interfaces/nexus_dnacharactermatrix_get.py b/doc/source/schemas/interfaces/nexus_dnacharactermatrix_get.py
new file mode 100644
index 0000000..3189cdd
--- /dev/null
+++ b/doc/source/schemas/interfaces/nexus_dnacharactermatrix_get.py
@@ -0,0 +1,9 @@
+d = dendropy.DnaCharacterMatrix.get(
+        path="data.fas",
+        schema="nexus",
+        label=None,
+        taxon_namespace=None,
+        matrix_offset=None,
+        ignore_unrecognized_keyword_arguments=False,
+        )
+
diff --git a/doc/source/schemas/interfaces/nexus_infinitesitescharactermatrix_get.py b/doc/source/schemas/interfaces/nexus_infinitesitescharactermatrix_get.py
new file mode 100644
index 0000000..6e4b0be
--- /dev/null
+++ b/doc/source/schemas/interfaces/nexus_infinitesitescharactermatrix_get.py
@@ -0,0 +1,9 @@
+d = dendropy.InfiniteSitesCharacterMatrix.get(
+        path="data.fas",
+        schema="nexus",
+        label=None,
+        taxon_namespace=None,
+        matrix_offset=None,
+        ignore_unrecognized_keyword_arguments=False,
+        )
+
diff --git a/doc/source/schemas/interfaces/nexus_proteincharactermatrix_get.py b/doc/source/schemas/interfaces/nexus_proteincharactermatrix_get.py
new file mode 100644
index 0000000..e25bba9
--- /dev/null
+++ b/doc/source/schemas/interfaces/nexus_proteincharactermatrix_get.py
@@ -0,0 +1,9 @@
+d = dendropy.ProteinCharacterMatrix.get(
+        path="data.fas",
+        schema="nexus",
+        label=None,
+        taxon_namespace=None,
+        matrix_offset=None,
+        ignore_unrecognized_keyword_arguments=False,
+        )
+
diff --git a/doc/source/schemas/interfaces/nexus_restrictionsitescharactermatrix_get.py b/doc/source/schemas/interfaces/nexus_restrictionsitescharactermatrix_get.py
new file mode 100644
index 0000000..cf87ba8
--- /dev/null
+++ b/doc/source/schemas/interfaces/nexus_restrictionsitescharactermatrix_get.py
@@ -0,0 +1,9 @@
+d = dendropy.RestrictionSitesCharacterMatrix.get(
+        path="data.fas",
+        schema="nexus",
+        label=None,
+        taxon_namespace=None,
+        matrix_offset=None,
+        ignore_unrecognized_keyword_arguments=False,
+        )
+
diff --git a/doc/source/schemas/interfaces/nexus_rnacharactermatrix_get.py b/doc/source/schemas/interfaces/nexus_rnacharactermatrix_get.py
new file mode 100644
index 0000000..fcc7576
--- /dev/null
+++ b/doc/source/schemas/interfaces/nexus_rnacharactermatrix_get.py
@@ -0,0 +1,8 @@
+d = dendropy.DnaCharacterMatrix.get(
+        path="data.fas",
+        schema="nexus",
+        label=None,
+        taxon_namespace=None,
+        matrix_offset=None,
+        ignore_unrecognized_keyword_arguments=False,
+        )
diff --git a/doc/source/schemas/interfaces/nexus_standardcharactermatrix_get.py b/doc/source/schemas/interfaces/nexus_standardcharactermatrix_get.py
new file mode 100644
index 0000000..c656fd6
--- /dev/null
+++ b/doc/source/schemas/interfaces/nexus_standardcharactermatrix_get.py
@@ -0,0 +1,19 @@
+d = dendropy.StandardCharacterMatrix.get(
+        path="data.fas",
+        schema="nexus",
+        label=None,
+        taxon_namespace=None,
+        matrix_offset=None,
+        ignore_unrecognized_keyword_arguments=False,
+        default_state_alphabet=None,
+        )
+
+d = dendropy.StandardCharacterMatrix.get(
+        path="data.fas",
+        schema="nexus",
+        label=None,
+        taxon_namespace=None,
+        matrix_offset=None,
+        ignore_unrecognized_keyword_arguments=False,
+        default_state_alphabet=dendropy.new_standard_state_alphabet("0123456789"),
+        )
diff --git a/doc/source/schemas/interfaces/nexus_tree_get.py b/doc/source/schemas/interfaces/nexus_tree_get.py
new file mode 100644
index 0000000..109adab
--- /dev/null
+++ b/doc/source/schemas/interfaces/nexus_tree_get.py
@@ -0,0 +1,23 @@
+tree = dendropy.Tree.get(
+    path="tree.tre",
+    schema="nexus",
+    label=None,
+    taxon_namespace=None,
+    collection_offset=None,
+    tree_offset=None,
+    rooting="default-unrooted",
+    edge_length_type=float,
+    suppress_edge_lengths=False,
+    extract_comment_metadata=True,
+    store_tree_weights=False,
+    encode_splits=False,
+    finish_node_fn=None,
+    case_sensitive_taxon_labels=False,
+    preserve_underscores=False,
+    suppress_internal_node_taxa=True,
+    suppress_leaf_node_taxa=False,
+    terminating_semicolon_required=True,
+    ignore_unrecognized_keyword_arguments=False,
+    )
+
+
diff --git a/doc/source/schemas/interfaces/nexus_tree_get_from_path.py b/doc/source/schemas/interfaces/nexus_tree_get_from_path.py
new file mode 100644
index 0000000..7ef91c1
--- /dev/null
+++ b/doc/source/schemas/interfaces/nexus_tree_get_from_path.py
@@ -0,0 +1,22 @@
+tree = dendropy.Tree.get_from_path(
+    src="tree.tre",
+    schema="nexus",
+    label=None,
+    taxon_namespace=None,
+    collection_offset=None,
+    tree_offset=None,
+    rooting="default-unrooted",
+    edge_length_type=float,
+    suppress_edge_lengths=False,
+    extract_comment_metadata=True,
+    store_tree_weights=False,
+    encode_splits=False,
+    finish_node_fn=None,
+    case_sensitive_taxon_labels=False,
+    preserve_underscores=False,
+    suppress_internal_node_taxa=True,
+    suppress_leaf_node_taxa=False,
+    terminating_semicolon_required=True,
+    ignore_unrecognized_keyword_arguments=False,
+    )
+
diff --git a/doc/source/schemas/interfaces/nexus_tree_get_from_stream.py b/doc/source/schemas/interfaces/nexus_tree_get_from_stream.py
new file mode 100644
index 0000000..04f197c
--- /dev/null
+++ b/doc/source/schemas/interfaces/nexus_tree_get_from_stream.py
@@ -0,0 +1,22 @@
+tree = dendropy.Tree.get_from_stream(
+    src=open("tree.tre", "r"),
+    schema="nexus",
+    label=None,
+    taxon_namespace=None,
+    collection_offset=None,
+    tree_offset=None,
+    rooting="default-unrooted",
+    edge_length_type=float,
+    suppress_edge_lengths=False,
+    extract_comment_metadata=True,
+    store_tree_weights=False,
+    encode_splits=False,
+    finish_node_fn=None,
+    case_sensitive_taxon_labels=False,
+    preserve_underscores=False,
+    suppress_internal_node_taxa=True,
+    suppress_leaf_node_taxa=False,
+    terminating_semicolon_required=True,
+    ignore_unrecognized_keyword_arguments=False,
+    )
+
diff --git a/doc/source/schemas/interfaces/nexus_treearray_read.py b/doc/source/schemas/interfaces/nexus_treearray_read.py
new file mode 100644
index 0000000..c9ce9b7
--- /dev/null
+++ b/doc/source/schemas/interfaces/nexus_treearray_read.py
@@ -0,0 +1,22 @@
+tree_array = dendropy.TreeArray()
+tree_array.read(
+    path="path/to/file",
+    schema="nexus",
+    label=None,
+    collection_offset=None,
+    tree_offset=None,
+    rooting="default-unrooted",
+    edge_length_type=float,
+    suppress_edge_lengths=False,
+    extract_comment_metadata=True,
+    store_tree_weights=False,
+    encode_splits=False,
+    finish_node_fn=None,
+    case_sensitive_taxon_labels=False,
+    preserve_underscores=False,
+    suppress_internal_node_taxa=True,
+    suppress_leaf_node_taxa=False,
+    terminating_semicolon_required=True,
+    ignore_unrecognized_keyword_arguments=False,
+    )
+
diff --git a/doc/source/schemas/interfaces/nexus_treelist_get.py b/doc/source/schemas/interfaces/nexus_treelist_get.py
new file mode 100644
index 0000000..df2ff6f
--- /dev/null
+++ b/doc/source/schemas/interfaces/nexus_treelist_get.py
@@ -0,0 +1,21 @@
+tree_list = dendropy.TreeList.get(
+    path="path/to/file",
+    schema="nexus",
+    label=None,
+    taxon_namespace=None,
+    collection_offset=None,
+    tree_offset=None,
+    rooting="default-unrooted",
+    edge_length_type=float,
+    suppress_edge_lengths=False,
+    extract_comment_metadata=True,
+    store_tree_weights=False,
+    encode_splits=False,
+    finish_node_fn=None,
+    case_sensitive_taxon_labels=False,
+    preserve_underscores=False,
+    suppress_internal_node_taxa=True,
+    suppress_leaf_node_taxa=False,
+    terminating_semicolon_required=True,
+    ignore_unrecognized_keyword_arguments=False,
+    )
diff --git a/doc/source/schemas/interfaces/nexus_treelist_read.py b/doc/source/schemas/interfaces/nexus_treelist_read.py
new file mode 100644
index 0000000..a884581
--- /dev/null
+++ b/doc/source/schemas/interfaces/nexus_treelist_read.py
@@ -0,0 +1,20 @@
+tree_list = dendropy.TreeList()
+tree_list.read(
+    path="path/to/file",
+    schema="nexus",
+    collection_offset=None,
+    tree_offset=None,
+    rooting="default-unrooted",
+    edge_length_type=float,
+    suppress_edge_lengths=False,
+    extract_comment_metadata=True,
+    store_tree_weights=False,
+    encode_splits=False,
+    finish_node_fn=None,
+    case_sensitive_taxon_labels=False,
+    preserve_underscores=False,
+    suppress_internal_node_taxa=True,
+    suppress_leaf_node_taxa=False,
+    terminating_semicolon_required=True,
+    ignore_unrecognized_keyword_arguments=False,
+    )
diff --git a/doc/source/schemas/interfaces/nexus_trees_as_string.py b/doc/source/schemas/interfaces/nexus_trees_as_string.py
new file mode 100644
index 0000000..8313693
--- /dev/null
+++ b/doc/source/schemas/interfaces/nexus_trees_as_string.py
@@ -0,0 +1,28 @@
+s = d.as_string(
+        schema='nexus',
+        suppress_taxa_blocks=None,
+        suppress_block_titles=None,
+        file_comments=[],
+        preamble_blocks=[],
+        supplemental_blocks=[],
+        allow_multiline_comments=True,
+        suppress_leaf_taxon_labels=False,
+        suppress_leaf_node_labels=True,
+        suppress_internal_taxon_labels=False,
+        suppress_internal_node_labels=False,
+        suppress_rooting=False,
+        suppress_edge_lengths=False,
+        unquoted_underscores=False,
+        preserve_spaces=False,
+        store_tree_weights=False,
+        translate_tree_taxa=None,
+        suppress_annotations=False,
+        annotations_as_nhx=False,
+        suppress_item_comments=False,
+        node_label_element_separator=' ',
+        node_label_compose_fn=None,
+        edge_label_compose_fn=None,
+        real_value_format_specifier='',
+        ignore_unrecognized_keyword_arguments=False,
+        )
+
diff --git a/doc/source/schemas/interfaces/nexus_trees_write.py b/doc/source/schemas/interfaces/nexus_trees_write.py
new file mode 100644
index 0000000..3aee2b3
--- /dev/null
+++ b/doc/source/schemas/interfaces/nexus_trees_write.py
@@ -0,0 +1,29 @@
+d.write(
+        path='trees.nex',
+        schema='nexus',
+        suppress_taxa_blocks=None,
+        suppress_block_titles=None,
+        file_comments=[],
+        preamble_blocks=[],
+        supplemental_blocks=[],
+        allow_multiline_comments=True,
+        suppress_leaf_taxon_labels=False,
+        suppress_leaf_node_labels=True,
+        suppress_internal_taxon_labels=False,
+        suppress_internal_node_labels=False,
+        suppress_rooting=False,
+        suppress_edge_lengths=False,
+        unquoted_underscores=False,
+        preserve_spaces=False,
+        store_tree_weights=False,
+        translate_tree_taxa=None,
+        suppress_annotations=False,
+        annotations_as_nhx=False,
+        suppress_item_comments=False,
+        node_label_element_separator=' ',
+        node_label_compose_fn=None,
+        edge_label_compose_fn=None,
+        real_value_format_specifier='',
+        ignore_unrecognized_keyword_arguments=False,
+        )
+
diff --git a/doc/source/schemas/interfaces/phylip_as_string.py b/doc/source/schemas/interfaces/phylip_as_string.py
new file mode 100644
index 0000000..3154f63
--- /dev/null
+++ b/doc/source/schemas/interfaces/phylip_as_string.py
@@ -0,0 +1,8 @@
+s = d.as_string(
+        schema="phylip",
+        strict=False,
+        spaces_to_underscores=False,
+        force_unique_taxon_labels=False,
+        ignore_unrecognized_keyword_arguments=False,
+        )
+
diff --git a/doc/source/schemas/interfaces/phylip_dataset_get.py b/doc/source/schemas/interfaces/phylip_dataset_get.py
new file mode 100644
index 0000000..3380b6a
--- /dev/null
+++ b/doc/source/schemas/interfaces/phylip_dataset_get.py
@@ -0,0 +1,56 @@
+d = dendropy.DataSet.get(
+        path="data.phylip",
+        schema="phylip",
+        label=None,
+        taxon_namespace=None,
+        strict=False,
+        interleaved=False,
+        multispace_delimiter=False,
+        underscore_to_spaces=False,
+        ignore_invalid_chars=False,
+        ignore_unrecognized_keyword_arguments=False,
+        data_type="dna",
+        )
+
+d = dendropy.DataSet.get(
+        path="data.phylip",
+        schema="phylip",
+        label=None,
+        taxon_namespace=None,
+        strict=False,
+        interleaved=False,
+        multispace_delimiter=False,
+        underscore_to_spaces=False,
+        ignore_invalid_chars=False,
+        ignore_unrecognized_keyword_arguments=False,
+        data_type="dna",
+        )
+
+d = dendropy.DataSet.get(
+        path="data.phylip",
+        schema="phylip",
+        label=None,
+        taxon_namespace=None,
+        strict=False,
+        interleaved=False,
+        multispace_delimiter=False,
+        underscore_to_spaces=False,
+        ignore_invalid_chars=False,
+        ignore_unrecognized_keyword_arguments=False,
+        data_type="standard",
+        )
+
+d = dendropy.DataSet.get(
+        path="data.phylip",
+        schema="phylip",
+        label=None,
+        taxon_namespace=None,
+        strict=False,
+        interleaved=False,
+        multispace_delimiter=False,
+        underscore_to_spaces=False,
+        ignore_invalid_chars=False,
+        ignore_unrecognized_keyword_arguments=False,
+        data_type="standard",
+        default_state_alphabet=dendropy.new_standard_state_alphabet("01")
+        )
diff --git a/doc/source/schemas/interfaces/phylip_dataset_read.py b/doc/source/schemas/interfaces/phylip_dataset_read.py
new file mode 100644
index 0000000..7c028fe
--- /dev/null
+++ b/doc/source/schemas/interfaces/phylip_dataset_read.py
@@ -0,0 +1,60 @@
+d = dendropy.DataSet()
+d.read(
+        path="data.phylip",
+        schema="phylip",
+        label=None,
+        taxon_namespace=None,
+        strict=False,
+        interleaved=False,
+        multispace_delimiter=False,
+        underscore_to_spaces=False,
+        ignore_invalid_chars=False,
+        ignore_unrecognized_keyword_arguments=False,
+        data_type="dna",
+        )
+
+d = dendropy.DataSet()
+d.read(
+        path="data.phylip",
+        schema="phylip",
+        label=None,
+        taxon_namespace=None,
+        strict=False,
+        interleaved=False,
+        multispace_delimiter=False,
+        underscore_to_spaces=False,
+        ignore_invalid_chars=False,
+        ignore_unrecognized_keyword_arguments=False,
+        data_type="protein",
+        )
+
+d = dendropy.DataSet()
+d.read(
+        path="data.phylip",
+        schema="phylip",
+        label=None,
+        taxon_namespace=None,
+        strict=False,
+        interleaved=False,
+        multispace_delimiter=False,
+        underscore_to_spaces=False,
+        ignore_invalid_chars=False,
+        ignore_unrecognized_keyword_arguments=False,
+        data_type="standard",
+        )
+
+d = dendropy.DataSet()
+d.read(
+        path="data.phylip",
+        schema="phylip",
+        label=None,
+        taxon_namespace=None,
+        strict=False,
+        interleaved=False,
+        multispace_delimiter=False,
+        underscore_to_spaces=False,
+        ignore_invalid_chars=False,
+        ignore_unrecognized_keyword_arguments=False,
+        data_type="standard",
+        default_state_alphabet=dendropy.new_standard_state_alphabet("abc"),
+        )
diff --git a/doc/source/schemas/interfaces/phylip_dnacharactermatrix_get.py b/doc/source/schemas/interfaces/phylip_dnacharactermatrix_get.py
new file mode 100644
index 0000000..f9c4b0a
--- /dev/null
+++ b/doc/source/schemas/interfaces/phylip_dnacharactermatrix_get.py
@@ -0,0 +1,13 @@
+d = dendropy.DnaCharacterMatrix.get(
+        path="data.phylip",
+        schema="phylip",
+        label=None,
+        taxon_namespace=None,
+        matrix_offset=None,
+        strict=False,
+        interleaved=False,
+        multispace_delimiter=False,
+        underscore_to_spaces=False,
+        ignore_invalid_chars=False,
+        ignore_unrecognized_keyword_arguments=False,
+        )
diff --git a/doc/source/schemas/interfaces/phylip_infinitesitescharactermatrix_get.py b/doc/source/schemas/interfaces/phylip_infinitesitescharactermatrix_get.py
new file mode 100644
index 0000000..1235d43
--- /dev/null
+++ b/doc/source/schemas/interfaces/phylip_infinitesitescharactermatrix_get.py
@@ -0,0 +1,13 @@
+d = dendropy.InfiniteSitesCharacterMatrix.get(
+        path="data.phylip",
+        schema="phylip",
+        label=None,
+        taxon_namespace=None,
+        matrix_offset=None,
+        strict=False,
+        interleaved=False,
+        multispace_delimiter=False,
+        underscore_to_spaces=False,
+        ignore_invalid_chars=False,
+        ignore_unrecognized_keyword_arguments=False,
+        )
diff --git a/doc/source/schemas/interfaces/phylip_proteincharactermatrix_get.py b/doc/source/schemas/interfaces/phylip_proteincharactermatrix_get.py
new file mode 100644
index 0000000..5aa9f88
--- /dev/null
+++ b/doc/source/schemas/interfaces/phylip_proteincharactermatrix_get.py
@@ -0,0 +1,13 @@
+d = dendropy.ProteinCharacterMatrix.get(
+        path="data.phylip",
+        schema="phylip",
+        label=None,
+        taxon_namespace=None,
+        matrix_offset=None,
+        strict=False,
+        interleaved=False,
+        multispace_delimiter=False,
+        underscore_to_spaces=False,
+        ignore_invalid_chars=False,
+        ignore_unrecognized_keyword_arguments=False,
+        )
diff --git a/doc/source/schemas/interfaces/phylip_restrictionsitescharactermatrix_get.py b/doc/source/schemas/interfaces/phylip_restrictionsitescharactermatrix_get.py
new file mode 100644
index 0000000..6d7fe69
--- /dev/null
+++ b/doc/source/schemas/interfaces/phylip_restrictionsitescharactermatrix_get.py
@@ -0,0 +1,13 @@
+d = dendropy.RestrictionSitesCharacterMatrix.get(
+        path="data.phylip",
+        schema="phylip",
+        label=None,
+        taxon_namespace=None,
+        matrix_offset=None,
+        strict=False,
+        interleaved=False,
+        multispace_delimiter=False,
+        underscore_to_spaces=False,
+        ignore_invalid_chars=False,
+        ignore_unrecognized_keyword_arguments=False,
+        )
diff --git a/doc/source/schemas/interfaces/phylip_rnacharactermatrix_get.py b/doc/source/schemas/interfaces/phylip_rnacharactermatrix_get.py
new file mode 100644
index 0000000..57c0863
--- /dev/null
+++ b/doc/source/schemas/interfaces/phylip_rnacharactermatrix_get.py
@@ -0,0 +1,14 @@
+d = dendropy.RnaCharacterMatrix.get(
+        path="data.phylip",
+        schema="phylip",
+        label=None,
+        taxon_namespace=None,
+        matrix_offset=None,
+        strict=False,
+        interleaved=False,
+        multispace_delimiter=False,
+        underscore_to_spaces=False,
+        ignore_invalid_chars=False,
+        ignore_unrecognized_keyword_arguments=False,
+        )
+
diff --git a/doc/source/schemas/interfaces/phylip_standardcharactermatrix_get.py b/doc/source/schemas/interfaces/phylip_standardcharactermatrix_get.py
new file mode 100644
index 0000000..bed8a8a
--- /dev/null
+++ b/doc/source/schemas/interfaces/phylip_standardcharactermatrix_get.py
@@ -0,0 +1,29 @@
+d = dendropy.StandardCharacterMatrix.get(
+        path="data.phylip",
+        schema="phylip",
+        label=None,
+        taxon_namespace=None,
+        matrix_offset=None,
+        strict=False,
+        interleaved=False,
+        multispace_delimiter=False,
+        underscore_to_spaces=False,
+        ignore_invalid_chars=False,
+        ignore_unrecognized_keyword_arguments=False,
+        default_state_alphabet=None,
+        )
+
+d = dendropy.StandardCharacterMatrix.get(
+        path="data.phylip",
+        schema="phylip",
+        label=None,
+        taxon_namespace=None,
+        matrix_offset=None,
+        strict=False,
+        interleaved=False,
+        multispace_delimiter=False,
+        underscore_to_spaces=False,
+        ignore_invalid_chars=False,
+        ignore_unrecognized_keyword_arguments=False,
+        default_state_alphabet=dendropy.new_standard_state_alphabet("0123456789"),
+        )
diff --git a/doc/source/schemas/interfaces/phylip_write.py b/doc/source/schemas/interfaces/phylip_write.py
new file mode 100644
index 0000000..a0e9d3b
--- /dev/null
+++ b/doc/source/schemas/interfaces/phylip_write.py
@@ -0,0 +1,9 @@
+d.write(
+        path="data.phylip",
+        schema="phylip",
+        strict=False,
+        spaces_to_underscores=False,
+        force_unique_taxon_labels=False,
+        ignore_unrecognized_keyword_arguments=False,
+        )
+
diff --git a/doc/source/schemas/newick.rst b/doc/source/schemas/newick.rst
new file mode 100644
index 0000000..499915a
--- /dev/null
+++ b/doc/source/schemas/newick.rst
@@ -0,0 +1,114 @@
+******
+Newick
+******
+
+.. contents::
+    :local:
+    :backlinks: none
+
+Description
+===========
+
+    * http://evolution.genetics.washington.edu/phylip/newicktree.html
+    * http://en.wikipedia.org/wiki/Newick_format
+    * http://evolution.genetics.washington.edu/phylip/newick_doc.html
+
+Reading
+=======
+
+.. _schema_specific_keyword_arguments_reading_newick:
+
+Schema-Specific Keyword Arguments
+---------------------------------
+
+.. autokeywordargumentsonly:: dendropy.dataio.newickreader.NewickReader.__init__
+
+Supported Methods
+-----------------
+
+``Tree.get``
+............
+(:meth:`method reference <dendropy.datamodel.treemodel.Tree.get>`)
+
+.. literalinclude:: /schemas/interfaces/newick_tree_get.py
+
+``TreeList.get``
+................
+(:meth:`method reference <dendropy.datamodel.treecollectionmodel.TreeList.get>`)
+
+.. literalinclude:: /schemas/interfaces/newick_treelist_get.py
+
+``TreeList.read``
+.................
+(:meth:`method reference <dendropy.datamodel.treecollectionmodel.TreeList.read>`)
+
+.. literalinclude:: /schemas/interfaces/newick_treelist_read.py
+
+``TreeArray.read``
+..................
+(:meth:`method reference <dendropy.datamodel.treecollectionmodel.TreeArray.read>`)
+
+.. literalinclude:: /schemas/interfaces/newick_treearray_read.py
+
+``DataSet.get``
+...............
+(:meth:`method reference <dendropy.datamodel.datasetmodel.DataSet.get>`)
+
+.. literalinclude:: /schemas/interfaces/newick_dataset_get.py
+
+``DataSet.read``
+................
+(:meth:`method reference <dendropy.datamodel.datasetmodel.DataSet.read>`)
+
+.. literalinclude:: /schemas/interfaces/newick_dataset_read.py
+
+Writing
+=======
+
+.. _schema_specific_keyword_arguments_writing_newick:
+
+Schema-Specific Keyword Arguments
+---------------------------------
+
+.. autokeywordargumentsonly:: dendropy.dataio.newickwriter.NewickWriter.__init__
+
+Supported Methods
+-----------------
+
+``Tree.write``
+..............
+(:meth:`method reference <dendropy.datamodel.treemodel.Tree.write>`)
+
+.. literalinclude:: /schemas/interfaces/newick_write.py
+
+``Tree.as_string``
+..................
+(:meth:`method reference <dendropy.datamodel.treemodel.Tree.as_string>`)
+
+.. literalinclude:: /schemas/interfaces/newick_as_string.py
+
+``TreeList.write``
+..................
+(:meth:`method reference <dendropy.datamodel.treecollectionmodel.TreeList.write>`)
+
+.. literalinclude:: /schemas/interfaces/newick_write.py
+
+``TreeList.as_string``
+......................
+(:meth:`method reference <dendropy.datamodel.treecollectionmodel.TreeList.as_string>`)
+
+.. literalinclude:: /schemas/interfaces/newick_as_string.py
+
+``DataSet.write``
+.................
+(:meth:`method reference <dendropy.datamodel.datasetmodel.DataSet.write>`)
+
+.. literalinclude:: /schemas/interfaces/newick_write.py
+
+``DataSet.as_string``
+.....................
+(:meth:`method reference <dendropy.datamodel.datasetmodel.DataSet.as_string>`)
+
+.. literalinclude:: /schemas/interfaces/newick_as_string.py
+
+
diff --git a/doc/source/schemas/nexml.rst b/doc/source/schemas/nexml.rst
new file mode 100644
index 0000000..b3c1d95
--- /dev/null
+++ b/doc/source/schemas/nexml.rst
@@ -0,0 +1,224 @@
+*****
+NeXML
+*****
+
+.. contents::
+    :local:
+    :backlinks: none
+
+Description
+===========
+
+Reading
+=======
+
+.. _schema_specific_keyword_arguments_reading_nexml:
+
+Schema-Specific Keyword Arguments
+---------------------------------
+
+.. autokeywordargumentsonly:: dendropy.dataio.nexmlreader.NexmlReader.__init__
+
+Supported Methods
+-----------------
+
+``Tree.get``
+............
+(:meth:`method reference <dendropy.datamodel.treemodel.Tree.get>`)
+
+.. literalinclude:: /schemas/interfaces/nexml_tree_get.py
+
+``TreeList.get``
+................
+(:meth:`method reference <dendropy.datamodel.treecollectionmodel.TreeList.get>`)
+
+.. literalinclude:: /schemas/interfaces/nexml_treelist_get.py
+
+``TreeList.read``
+.................
+(:meth:`method reference <dendropy.datamodel.treecollectionmodel.TreeList.read>`)
+
+.. literalinclude:: /schemas/interfaces/nexml_treelist_read.py
+
+``TreeArray.read``
+..................
+(:meth:`method reference <dendropy.datamodel.treecollectionmodel.TreeArray.read>`)
+
+.. literalinclude:: /schemas/interfaces/nexml_treearray_read.py
+
+``DnaCharacterMatrix.get``
+..........................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.DnaCharacterMatrix.get>`)
+
+.. literalinclude:: /schemas/interfaces/nexml_dnacharactermatrix_get.py
+
+``RnaCharacterMatrix.get``
+..........................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.RnaCharacterMatrix.get>`)
+
+.. literalinclude:: /schemas/interfaces/nexml_rnacharactermatrix_get.py
+
+``ProteinCharacterMatrix.get``
+..............................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.ProteinCharacterMatrix.get>`)
+
+.. literalinclude:: /schemas/interfaces/nexml_proteincharactermatrix_get.py
+
+``RestrictionSitesCharacterMatrix.get``
+.......................................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.RestrictionSitesCharacterMatrix.get>`)
+
+.. literalinclude:: /schemas/interfaces/nexml_restrictionsitescharactermatrix_get.py
+
+``InfiniteSitesCharacterMatrix.get``
+....................................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.InfiniteSitesCharacterMatrix.get>`)
+
+.. literalinclude:: /schemas/interfaces/nexml_infinitesitescharactermatrix_get.py
+
+``StandardCharacterMatrix.get``
+...............................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.StandardCharacterMatrix.get>`)
+
+.. literalinclude:: /schemas/interfaces/nexml_standardcharactermatrix_get.py
+
+``DataSet.get``
+...............
+(:meth:`method reference <dendropy.datamodel.datasetmodel.DataSet.get>`)
+
+.. literalinclude:: /schemas/interfaces/nexml_dataset_get.py
+
+``DataSet.read``
+................
+(:meth:`method reference <dendropy.datamodel.datasetmodel.DataSet.read>`)
+
+.. literalinclude:: /schemas/interfaces/nexml_dataset_read.py
+
+
+Writing
+=======
+
+.. _schema_specific_keyword_arguments_writing_nexml:
+
+Schema-Specific Keyword Arguments
+---------------------------------
+
+.. autokeywordargumentsonly:: dendropy.dataio.nexmlwriter.NexmlWriter.__init__
+
+Supported Methods
+-----------------
+
+``Tree.write``
+..............
+(:meth:`method reference <dendropy.datamodel.treemodel.Tree.write>`)
+
+.. literalinclude:: /schemas/interfaces/nexml_trees_write.py
+
+``Tree.as_string``
+..................
+(:meth:`method reference <dendropy.datamodel.treemodel.Tree.as_string>`)
+
+.. literalinclude:: /schemas/interfaces/nexml_trees_as_string.py
+
+``TreeList.write``
+..................
+(:meth:`method reference <dendropy.datamodel.treecollectionmodel.TreeList.write>`)
+
+.. literalinclude:: /schemas/interfaces/nexml_trees_write.py
+
+``TreeList.as_string``
+......................
+(:meth:`method reference <dendropy.datamodel.treecollectionmodel.TreeList.as_string>`)
+
+.. literalinclude:: /schemas/interfaces/nexml_trees_as_string.py
+
+``DnaCharacterMatrix.write``
+.............................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.DnaCharacterMatrix.write>`)
+
+.. literalinclude:: /schemas/interfaces/nexml_chars_write.py
+
+``DnaCharacterMatrix.as_string``
+................................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.DnaCharacterMatrix.as_string>`)
+
+.. literalinclude:: /schemas/interfaces/nexml_chars_as_string.py
+
+
+``RnaCharacterMatrix.write``
+.............................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.RnaCharacterMatrix.write>`)
+
+.. literalinclude:: /schemas/interfaces/nexml_chars_write.py
+
+``RnaCharacterMatrix.as_string``
+................................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.RnaCharacterMatrix.as_string>`)
+
+.. literalinclude:: /schemas/interfaces/nexml_chars_as_string.py
+
+
+``ProteinCharacterMatrix.write``
+.................................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.ProteinCharacterMatrix.write>`)
+
+.. literalinclude:: /schemas/interfaces/nexml_chars_write.py
+
+``ProteinCharacterMatrix.as_string``
+.....................................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.ProteinCharacterMatrix.as_string>`)
+
+.. literalinclude:: /schemas/interfaces/nexml_chars_as_string.py
+
+
+``RestrictionSitesCharacterMatrix.write``
+.........................................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.RestrictionSitesCharacterMatrix.write>`)
+
+.. literalinclude:: /schemas/interfaces/nexml_chars_write.py
+
+``RestrictionSitesCharacterMatrix.as_string``
+.............................................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.RestrictionSitesCharacterMatrix.as_string>`)
+
+.. literalinclude:: /schemas/interfaces/nexml_chars_as_string.py
+
+
+``InfiniteSitesCharacterMatrix.write``
+......................................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.InfiniteSitesCharacterMatrix.write>`)
+
+.. literalinclude:: /schemas/interfaces/nexml_chars_write.py
+
+``InfiniteSitesCharacterMatrix.as_string``
+..........................................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.InfiniteSitesCharacterMatrix.as_string>`)
+
+.. literalinclude:: /schemas/interfaces/nexml_chars_as_string.py
+
+
+``StandardCharacterMatrix.write``
+.................................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.StandardCharacterMatrix.write>`)
+
+.. literalinclude:: /schemas/interfaces/nexml_chars_write.py
+
+``StandardCharacterMatrix.as_string``
+.....................................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.StandardCharacterMatrix.as_string>`)
+
+.. literalinclude:: /schemas/interfaces/nexml_chars_as_string.py
+
+
+``DataSet.write``
+.................
+(:meth:`method reference <dendropy.datamodel.datasetmodel.DataSet.write>`)
+
+.. literalinclude:: /schemas/interfaces/nexml_dataset_write.py
+
+``DataSet.as_string``
+.....................
+(:meth:`method reference <dendropy.datamodel.datasetmodel.DataSet.as_string>`)
+
+.. literalinclude:: /schemas/interfaces/nexml_dataset_as_string.py
+
diff --git a/doc/source/schemas/nexus.rst b/doc/source/schemas/nexus.rst
new file mode 100644
index 0000000..4578a43
--- /dev/null
+++ b/doc/source/schemas/nexus.rst
@@ -0,0 +1,225 @@
+*****
+NEXUS
+*****
+
+.. contents::
+    :local:
+    :backlinks: none
+
+Description
+===========
+
+Reading
+=======
+
+.. _schema_specific_keyword_arguments_reading_nexus:
+
+Schema-Specific Keyword Arguments
+---------------------------------
+
+.. autokeywordargumentsonly:: dendropy.dataio.nexusreader.NexusReader.__init__
+
+Supported Methods
+-----------------
+
+``Tree.get``
+............
+(:meth:`method reference <dendropy.datamodel.treemodel.Tree.get>`)
+
+.. literalinclude:: /schemas/interfaces/nexus_tree_get.py
+
+``TreeList.get``
+................
+(:meth:`method reference <dendropy.datamodel.treecollectionmodel.TreeList.get>`)
+
+.. literalinclude:: /schemas/interfaces/nexus_treelist_get.py
+
+``TreeList.read``
+.................
+(:meth:`method reference <dendropy.datamodel.treecollectionmodel.TreeList.read>`)
+
+.. literalinclude:: /schemas/interfaces/nexus_treelist_read.py
+
+``TreeArray.read``
+..................
+(:meth:`method reference <dendropy.datamodel.treecollectionmodel.TreeArray.read>`)
+
+.. literalinclude:: /schemas/interfaces/nexus_treearray_read.py
+
+``DnaCharacterMatrix.get``
+..........................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.DnaCharacterMatrix.get>`)
+
+.. literalinclude:: /schemas/interfaces/nexus_dnacharactermatrix_get.py
+
+``RnaCharacterMatrix.get``
+..........................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.RnaCharacterMatrix.get>`)
+
+.. literalinclude:: /schemas/interfaces/nexus_rnacharactermatrix_get.py
+
+``ProteinCharacterMatrix.get``
+..............................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.ProteinCharacterMatrix.get>`)
+
+.. literalinclude:: /schemas/interfaces/nexus_proteincharactermatrix_get.py
+
+``RestrictionSitesCharacterMatrix.get``
+.......................................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.RestrictionSitesCharacterMatrix.get>`)
+
+.. literalinclude:: /schemas/interfaces/nexus_restrictionsitescharactermatrix_get.py
+
+``InfiniteSitesCharacterMatrix.get``
+....................................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.InfiniteSitesCharacterMatrix.get>`)
+
+.. literalinclude:: /schemas/interfaces/nexus_infinitesitescharactermatrix_get.py
+
+``StandardCharacterMatrix.get``
+...............................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.StandardCharacterMatrix.get>`)
+
+.. literalinclude:: /schemas/interfaces/nexus_standardcharactermatrix_get.py
+
+``DataSet.get``
+...............
+(:meth:`method reference <dendropy.datamodel.datasetmodel.DataSet.get>`)
+
+.. literalinclude:: /schemas/interfaces/nexus_dataset_get.py
+
+``DataSet.read``
+................
+(:meth:`method reference <dendropy.datamodel.datasetmodel.DataSet.read>`)
+
+.. literalinclude:: /schemas/interfaces/nexus_dataset_read.py
+
+Writing
+=======
+
+.. _schema_specific_keyword_arguments_writing_nexus:
+
+Schema-Specific Keyword Arguments
+---------------------------------
+
+.. autokeywordargumentsonly:: dendropy.dataio.nexuswriter.NexusWriter.__init__
+
+Supported Methods
+-----------------
+
+``Tree.write``
+..............
+(:meth:`method reference <dendropy.datamodel.treemodel.Tree.write>`)
+
+.. literalinclude:: /schemas/interfaces/nexus_trees_write.py
+
+``Tree.as_string``
+..................
+(:meth:`method reference <dendropy.datamodel.treemodel.Tree.as_string>`)
+
+.. literalinclude:: /schemas/interfaces/nexus_trees_as_string.py
+
+``TreeList.write``
+..................
+(:meth:`method reference <dendropy.datamodel.treecollectionmodel.TreeList.write>`)
+
+.. literalinclude:: /schemas/interfaces/nexus_trees_write.py
+
+``TreeList.as_string``
+......................
+(:meth:`method reference <dendropy.datamodel.treecollectionmodel.TreeList.as_string>`)
+
+.. literalinclude:: /schemas/interfaces/nexus_trees_as_string.py
+
+``DnaCharacterMatrix.write``
+.............................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.DnaCharacterMatrix.write>`)
+
+.. literalinclude:: /schemas/interfaces/nexus_chars_write.py
+
+``DnaCharacterMatrix.as_string``
+................................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.DnaCharacterMatrix.as_string>`)
+
+.. literalinclude:: /schemas/interfaces/nexus_chars_as_string.py
+
+
+``RnaCharacterMatrix.write``
+.............................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.RnaCharacterMatrix.write>`)
+
+.. literalinclude:: /schemas/interfaces/nexus_chars_write.py
+
+``RnaCharacterMatrix.as_string``
+................................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.RnaCharacterMatrix.as_string>`)
+
+.. literalinclude:: /schemas/interfaces/nexus_chars_as_string.py
+
+
+``ProteinCharacterMatrix.write``
+.................................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.ProteinCharacterMatrix.write>`)
+
+.. literalinclude:: /schemas/interfaces/nexus_chars_write.py
+
+``ProteinCharacterMatrix.as_string``
+.....................................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.ProteinCharacterMatrix.as_string>`)
+
+.. literalinclude:: /schemas/interfaces/nexus_chars_as_string.py
+
+
+``RestrictionSitesCharacterMatrix.write``
+.........................................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.RestrictionSitesCharacterMatrix.write>`)
+
+.. literalinclude:: /schemas/interfaces/nexus_chars_write.py
+
+``RestrictionSitesCharacterMatrix.as_string``
+.............................................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.RestrictionSitesCharacterMatrix.as_string>`)
+
+.. literalinclude:: /schemas/interfaces/nexus_chars_as_string.py
+
+
+``InfiniteSitesCharacterMatrix.write``
+......................................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.InfiniteSitesCharacterMatrix.write>`)
+
+.. literalinclude:: /schemas/interfaces/nexus_chars_write.py
+
+``InfiniteSitesCharacterMatrix.as_string``
+..........................................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.InfiniteSitesCharacterMatrix.as_string>`)
+
+.. literalinclude:: /schemas/interfaces/nexus_chars_as_string.py
+
+
+``StandardCharacterMatrix.write``
+.................................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.StandardCharacterMatrix.write>`)
+
+.. literalinclude:: /schemas/interfaces/nexus_chars_write.py
+
+``StandardCharacterMatrix.as_string``
+.....................................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.StandardCharacterMatrix.as_string>`)
+
+.. literalinclude:: /schemas/interfaces/nexus_chars_as_string.py
+
+
+``DataSet.write``
+.................
+(:meth:`method reference <dendropy.datamodel.datasetmodel.DataSet.write>`)
+
+.. literalinclude:: /schemas/interfaces/nexus_dataset_write.py
+
+``DataSet.as_string``
+.....................
+(:meth:`method reference <dendropy.datamodel.datasetmodel.DataSet.as_string>`)
+
+.. literalinclude:: /schemas/interfaces/nexus_dataset_as_string.py
+
+
+
diff --git a/doc/source/schemas/phylip.rst b/doc/source/schemas/phylip.rst
new file mode 100644
index 0000000..e512c53
--- /dev/null
+++ b/doc/source/schemas/phylip.rst
@@ -0,0 +1,180 @@
+******
+PHYLIP
+******
+
+.. contents::
+    :local:
+    :backlinks: none
+
+Description
+===========
+
+Reading
+=======
+
+Schema-Specific Keyword Arguments
+---------------------------------
+
+.. autokeywordargumentsonly:: dendropy.dataio.phylipreader.PhylipReader.__init__
+
+Supported Methods
+-----------------
+
+``DnaCharacterMatrix.get``
+..........................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.DnaCharacterMatrix.get>`)
+
+.. literalinclude:: /schemas/interfaces/phylip_dnacharactermatrix_get.py
+
+``RnaCharacterMatrix.get``
+..........................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.RnaCharacterMatrix.get>`)
+
+.. literalinclude:: /schemas/interfaces/phylip_rnacharactermatrix_get.py
+
+``ProteinCharacterMatrix.get``
+..............................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.ProteinCharacterMatrix.get>`)
+
+.. literalinclude:: /schemas/interfaces/phylip_proteincharactermatrix_get.py
+
+``RestrictionSitesCharacterMatrix.get``
+.......................................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.RestrictionSitesCharacterMatrix.get>`)
+
+.. literalinclude:: /schemas/interfaces/phylip_restrictionsitescharactermatrix_get.py
+
+``InfiniteSitesCharacterMatrix.get``
+....................................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.InfiniteSitesCharacterMatrix.get>`)
+
+.. literalinclude:: /schemas/interfaces/phylip_infinitesitescharactermatrix_get.py
+
+``StandardCharacterMatrix.get``
+...............................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.StandardCharacterMatrix.get>`)
+
+.. literalinclude:: /schemas/interfaces/phylip_standardcharactermatrix_get.py
+
+``DataSet.get``
+...............
+(:meth:`method reference <dendropy.datamodel.datasetmodel.DataSet.get>`)
+
+Note that the type of data needs to be specified using the ``data_type``
+keyword argument.
+
+.. literalinclude:: /schemas/interfaces/phylip_dataset_get.py
+
+``DataSet.read``
+................
+(:meth:`method reference <dendropy.datamodel.datasetmodel.DataSet.read>`)
+
+Note that the type of data needs to be specified using the ``data_type``
+keyword argument.
+
+.. literalinclude:: /schemas/interfaces/phylip_dataset_read.py
+
+
+Writing
+=======
+
+.. _schema_specific_keyword_arguments_writing_phylip:
+
+Schema-Specific Keyword Arguments
+---------------------------------
+
+.. autokeywordargumentsonly:: dendropy.dataio.phylipwriter.PhylipWriter.__init__
+
+Supported Methods
+-----------------
+
+``DnaCharacterMatrix.write``
+.............................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.DnaCharacterMatrix.write>`)
+
+.. literalinclude:: /schemas/interfaces/phylip_write.py
+
+``DnaCharacterMatrix.as_string``
+................................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.DnaCharacterMatrix.as_string>`)
+
+.. literalinclude:: /schemas/interfaces/phylip_as_string.py
+
+
+``RnaCharacterMatrix.write``
+.............................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.RnaCharacterMatrix.write>`)
+
+.. literalinclude:: /schemas/interfaces/phylip_write.py
+
+``RnaCharacterMatrix.as_string``
+................................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.RnaCharacterMatrix.as_string>`)
+
+.. literalinclude:: /schemas/interfaces/phylip_as_string.py
+
+
+``ProteinCharacterMatrix.write``
+.................................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.ProteinCharacterMatrix.write>`)
+
+.. literalinclude:: /schemas/interfaces/phylip_write.py
+
+``ProteinCharacterMatrix.as_string``
+.....................................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.ProteinCharacterMatrix.as_string>`)
+
+.. literalinclude:: /schemas/interfaces/phylip_as_string.py
+
+
+``RestrictionSitesCharacterMatrix.write``
+.........................................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.RestrictionSitesCharacterMatrix.write>`)
+
+.. literalinclude:: /schemas/interfaces/phylip_write.py
+
+``RestrictionSitesCharacterMatrix.as_string``
+.............................................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.RestrictionSitesCharacterMatrix.as_string>`)
+
+.. literalinclude:: /schemas/interfaces/phylip_as_string.py
+
+
+``InfiniteSitesCharacterMatrix.write``
+......................................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.InfiniteSitesCharacterMatrix.write>`)
+
+.. literalinclude:: /schemas/interfaces/phylip_write.py
+
+``InfiniteSitesCharacterMatrix.as_string``
+..........................................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.InfiniteSitesCharacterMatrix.as_string>`)
+
+.. literalinclude:: /schemas/interfaces/phylip_as_string.py
+
+
+``StandardCharacterMatrix.write``
+.................................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.StandardCharacterMatrix.write>`)
+
+.. literalinclude:: /schemas/interfaces/phylip_write.py
+
+``StandardCharacterMatrix.as_string``
+.....................................
+(:meth:`method reference <dendropy.datamodel.charmatrixmodel.StandardCharacterMatrix.as_string>`)
+
+.. literalinclude:: /schemas/interfaces/phylip_as_string.py
+
+
+``DataSet.write``
+.................
+(:meth:`method reference <dendropy.datamodel.datasetmodel.DataSet.write>`)
+
+.. literalinclude:: /schemas/interfaces/phylip_write.py
+
+``DataSet.as_string``
+.....................
+(:meth:`method reference <dendropy.datamodel.datasetmodel.DataSet.as_string>`)
+
+.. literalinclude:: /schemas/interfaces/phylip_as_string.py
+
diff --git a/setup.cfg b/setup.cfg
new file mode 100644
index 0000000..811f979
--- /dev/null
+++ b/setup.cfg
@@ -0,0 +1,13 @@
+[build_sphinx]
+source-dir = doc/source
+build-dir = doc/build
+all_files = 1
+
+[upload_sphinx]
+upload-dir = doc/build/html
+
+[egg_info]
+tag_build = 
+tag_date = 0
+tag_svn_revision = 0
+
diff --git a/setup.py b/setup.py
new file mode 100644
index 0000000..214753d
--- /dev/null
+++ b/setup.py
@@ -0,0 +1,170 @@
+#! /usr/bin/env python
+
+##############################################################################
+##  DendroPy Phylogenetic Computing Library.
+##
+##  Copyright 2010-2014 Jeet Sukumaran and Mark T. Holder.
+##  All rights reserved.
+##
+##  See "LICENSE.txt" for terms and conditions of usage.
+##
+##  If you use this work or any portion thereof in published work,
+##  please cite it as:
+##
+##     Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
+##     for phylogenetic computing. Bioinformatics 26: 1569-1571.
+##
+##############################################################################
+
+"""
+Package setup and installation.
+"""
+
+import sys
+import os
+
+###############################################################################
+# Identification
+
+from dendropy import __version__, revision_description, description
+sys.stderr.write("-setup.py: {}\n".format(description()))
+
+###############################################################################
+# setuptools/distutils/etc. import and configuration
+
+try:
+    import ez_setup
+    try:
+        ez_setup_path = " ('" + os.path.abspath(ez_setup.__file__) + "')"
+    except OSError:
+        ez_setup_path = ""
+    sys.stderr.write("-setup.py: using ez_setup{}\n".format(ez_setup_path))
+    ez_setup.use_setuptools()
+    import setuptools
+    try:
+        setuptools_path = " ('" +  os.path.abspath(setuptools.__file__) + "')"
+    except OSError:
+        setuptools_path = ""
+    sys.stderr.write("-setup.py: using setuptools{}\n".format(setuptools_path))
+    from setuptools import setup, find_packages
+except ImportError as e:
+    sys.stderr.write("-setup.py: using distutils\n")
+    from distutils.core import setup
+    sys.stderr.write("-setup.py: using canned package list\n")
+    PACKAGES = [
+            "dendropy",
+            "dendropy.calculate",
+            "dendropy.dataio",
+            "dendropy.datamodel",
+            "dendropy.interop",
+            "dendropy.legacy",
+            "dendropy.mathlib",
+            "dendropy.model",
+            "dendropy.simulate",
+            "dendropy.test",
+            "dendropy.utility",
+            "dendropy.utility.libexec",
+            ]
+else:
+    sys.stderr.write("-setup.py: searching for packages\n")
+    PACKAGES = find_packages()
+EXTRA_KWARGS = dict(
+    install_requires = ['setuptools'],
+    include_package_data = True,
+    test_suite = "dendropy.test",
+    zip_safe = True,
+    )
+
+PACKAGE_DIRS = [p.replace(".", os.path.sep) for p in PACKAGES]
+PACKAGE_INFO = [("{p[0]:>40} : {p[1]}".format(p=p)) for p in zip(PACKAGES, PACKAGE_DIRS)]
+sys.stderr.write("-setup.py: packages identified:\n{}\n".format("\n".join(PACKAGE_INFO)))
+ENTRY_POINTS = {}
+
+###############################################################################
+# Script paths
+
+SCRIPT_SUBPATHS = [
+    ['applications', 'sumtrees', 'sumtrees.py'],
+    # ['scripts', 'sumtrees', 'cattrees.py'],
+    # ['scripts', 'sumtrees', 'sumlabels.py'],
+    # ['scripts', 'calculators', 'strict_consensus_merge.py'],
+    # ['scripts', 'calculators', 'long_branch_symmdiff.py'],
+]
+SCRIPTS = [os.path.join(*i) for i in SCRIPT_SUBPATHS]
+sys.stderr.write("\n-setup.py: scripts identified: {}\n".format(", ".join(SCRIPTS)))
+
+###############################################################################
+# setuptools/distuils command extensions
+
+try:
+    from setuptools import Command
+except ImportError:
+    sys.stderr.write("-setup.py: setuptools.Command could not be imported: setuptools extensions not available\n")
+else:
+    sys.stderr.write("-setup.py: setuptools command extensions are available\n")
+    command_hook = "distutils.commands"
+    ENTRY_POINTS[command_hook] = []
+
+    ###########################################################################
+    # coverage
+    from dendropy.test.support import coverage_analysis
+    if coverage_analysis.DENDROPY_COVERAGE_ANALYSIS_AVAILABLE:
+        sys.stderr.write("-setup.py: coverage analysis available ('python setup.py coverage')\n")
+        ENTRY_POINTS[command_hook].append("coverage = dendropy.test.support.coverage_analysis:CoverageAnalysis")
+    else:
+        sys.stderr.write("-setup.py: coverage analysis not available\n")
+
+
+###############################################################################
+# Main setup
+
+### compose long description ###
+long_description = open('README.rst').read()
+long_description = long_description.replace("DendroPy-4.x.x", "DendroPy-{}".format(__version__))
+long_description = long_description.replace("""download the source code archive""",
+    """`download the source code archive <http://pypi.python.org/packages/source/D/DendroPy/DendroPy-{}.tar.gz>`_""".format(__version__))
+
+revision_text = revision_description()
+long_description = long_description + ("""\
+
+Current Release
+===============
+
+The current release of DendroPy is version {}{}.
+
+""".format(__version__, revision_text))
+
+setup(name='DendroPy',
+      version=__version__,
+      author='Jeet Sukumaran and Mark T. Holder',
+      author_email='jeetsukumaran at gmail.com and mtholder at ku.edu',
+      url='http://packages.python.org/DendroPy/',
+      description="A Python library for phylogenetics and phylogenetic computing: reading, writing, simulation, processing and manipulation of phylogenetic trees (phylogenies) and characters.",
+      license='BSD',
+      packages=PACKAGES,
+      package_dir=dict(zip(PACKAGES, PACKAGE_DIRS)),
+      # not needed?
+      # package_data={
+      #     # "dendropy.utility" : ["libexec/*"],
+      #     },
+      scripts = SCRIPTS,
+      long_description=long_description,
+      entry_points = ENTRY_POINTS,
+      classifiers = [
+            "Intended Audience :: Developers",
+            "Intended Audience :: Science/Research",
+            "License :: OSI Approved :: BSD License",
+            "Natural Language :: English",
+            "Operating System :: OS Independent",
+            "Programming Language :: Python :: 2.7",
+            "Programming Language :: Python :: 3",
+            "Programming Language :: Python :: 3.1",
+            "Programming Language :: Python :: 3.2",
+            "Programming Language :: Python :: 3.3",
+            "Programming Language :: Python :: 3.4",
+            "Programming Language :: Python",
+            "Topic :: Scientific/Engineering :: Bio-Informatics",
+            ],
+      keywords='phylogenetics phylogeny phylogenies phylogeography evolution evolutionary biology systematics coalescent population genetics phyloinformatics bioinformatics',
+      **EXTRA_KWARGS
+      )

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-med/python-dendropy.git



More information about the debian-med-commit mailing list